commit 73d4832b3832c8e639736a2609352fc1a12f370a Author: j Date: Fri Jun 24 14:50:10 2016 +0200 platform for raspberry pi diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..ac7a635 --- /dev/null +++ b/.gitignore @@ -0,0 +1,8 @@ +*.swp +*.pyc +*.pyo +*.pyd +__pycache__ +pip_cache +bin +.DS_Store diff --git a/README.md b/README.md new file mode 100644 index 0000000..a434f75 --- /dev/null +++ b/README.md @@ -0,0 +1,3 @@ +to update you need: + +apt install python3-pip python3.4 python3.4-dev virtualenv libffi-dev libssl-dev diff --git a/lib/python3.4/site-packages/OpenSSL/SSL.py b/lib/python3.4/site-packages/OpenSSL/SSL.py new file mode 100644 index 0000000..800ae1e --- /dev/null +++ b/lib/python3.4/site-packages/OpenSSL/SSL.py @@ -0,0 +1,1944 @@ +import socket +from sys import platform +from functools import wraps, partial +from itertools import count, chain +from weakref import WeakValueDictionary +from errno import errorcode + +from six import binary_type as _binary_type +from six import integer_types as integer_types +from six import int2byte, indexbytes + +from OpenSSL._util import ( + UNSPECIFIED as _UNSPECIFIED, + exception_from_error_queue as _exception_from_error_queue, + ffi as _ffi, + lib as _lib, + make_assert as _make_assert, + native as _native, + path_string as _path_string, + text_to_bytes_and_warn as _text_to_bytes_and_warn, +) + +from OpenSSL.crypto import ( + FILETYPE_PEM, _PassphraseHelper, PKey, X509Name, X509, X509Store) + +try: + _memoryview = memoryview +except NameError: + class _memoryview(object): + pass + +try: + _buffer = buffer +except NameError: + class _buffer(object): + pass + +OPENSSL_VERSION_NUMBER = _lib.OPENSSL_VERSION_NUMBER +SSLEAY_VERSION = _lib.SSLEAY_VERSION +SSLEAY_CFLAGS = _lib.SSLEAY_CFLAGS +SSLEAY_PLATFORM = _lib.SSLEAY_PLATFORM +SSLEAY_DIR = _lib.SSLEAY_DIR +SSLEAY_BUILT_ON = _lib.SSLEAY_BUILT_ON + +SENT_SHUTDOWN = _lib.SSL_SENT_SHUTDOWN +RECEIVED_SHUTDOWN = _lib.SSL_RECEIVED_SHUTDOWN + +SSLv2_METHOD = 1 +SSLv3_METHOD = 2 +SSLv23_METHOD = 3 +TLSv1_METHOD = 4 +TLSv1_1_METHOD = 5 +TLSv1_2_METHOD = 6 + +OP_NO_SSLv2 = _lib.SSL_OP_NO_SSLv2 +OP_NO_SSLv3 = _lib.SSL_OP_NO_SSLv3 +OP_NO_TLSv1 = _lib.SSL_OP_NO_TLSv1 + +OP_NO_TLSv1_1 = getattr(_lib, "SSL_OP_NO_TLSv1_1", 0) +OP_NO_TLSv1_2 = getattr(_lib, "SSL_OP_NO_TLSv1_2", 0) + +try: + MODE_RELEASE_BUFFERS = _lib.SSL_MODE_RELEASE_BUFFERS +except AttributeError: + pass + +OP_SINGLE_DH_USE = _lib.SSL_OP_SINGLE_DH_USE +OP_SINGLE_ECDH_USE = _lib.SSL_OP_SINGLE_ECDH_USE +OP_EPHEMERAL_RSA = _lib.SSL_OP_EPHEMERAL_RSA +OP_MICROSOFT_SESS_ID_BUG = _lib.SSL_OP_MICROSOFT_SESS_ID_BUG +OP_NETSCAPE_CHALLENGE_BUG = _lib.SSL_OP_NETSCAPE_CHALLENGE_BUG +OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG = ( + _lib.SSL_OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG +) +OP_SSLREF2_REUSE_CERT_TYPE_BUG = _lib.SSL_OP_SSLREF2_REUSE_CERT_TYPE_BUG +OP_MICROSOFT_BIG_SSLV3_BUFFER = _lib.SSL_OP_MICROSOFT_BIG_SSLV3_BUFFER +try: + OP_MSIE_SSLV2_RSA_PADDING = _lib.SSL_OP_MSIE_SSLV2_RSA_PADDING +except AttributeError: + pass +OP_SSLEAY_080_CLIENT_DH_BUG = _lib.SSL_OP_SSLEAY_080_CLIENT_DH_BUG +OP_TLS_D5_BUG = _lib.SSL_OP_TLS_D5_BUG +OP_TLS_BLOCK_PADDING_BUG = _lib.SSL_OP_TLS_BLOCK_PADDING_BUG +OP_DONT_INSERT_EMPTY_FRAGMENTS = _lib.SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS +OP_CIPHER_SERVER_PREFERENCE = _lib.SSL_OP_CIPHER_SERVER_PREFERENCE +OP_TLS_ROLLBACK_BUG = _lib.SSL_OP_TLS_ROLLBACK_BUG +OP_PKCS1_CHECK_1 = _lib.SSL_OP_PKCS1_CHECK_1 +OP_PKCS1_CHECK_2 = _lib.SSL_OP_PKCS1_CHECK_2 +OP_NETSCAPE_CA_DN_BUG = _lib.SSL_OP_NETSCAPE_CA_DN_BUG +OP_NETSCAPE_DEMO_CIPHER_CHANGE_BUG = ( + _lib.SSL_OP_NETSCAPE_DEMO_CIPHER_CHANGE_BUG +) +try: + OP_NO_COMPRESSION = _lib.SSL_OP_NO_COMPRESSION +except AttributeError: + pass + +OP_NO_QUERY_MTU = _lib.SSL_OP_NO_QUERY_MTU +OP_COOKIE_EXCHANGE = _lib.SSL_OP_COOKIE_EXCHANGE +try: + OP_NO_TICKET = _lib.SSL_OP_NO_TICKET +except AttributeError: + pass + +OP_ALL = _lib.SSL_OP_ALL + +VERIFY_PEER = _lib.SSL_VERIFY_PEER +VERIFY_FAIL_IF_NO_PEER_CERT = _lib.SSL_VERIFY_FAIL_IF_NO_PEER_CERT +VERIFY_CLIENT_ONCE = _lib.SSL_VERIFY_CLIENT_ONCE +VERIFY_NONE = _lib.SSL_VERIFY_NONE + +SESS_CACHE_OFF = _lib.SSL_SESS_CACHE_OFF +SESS_CACHE_CLIENT = _lib.SSL_SESS_CACHE_CLIENT +SESS_CACHE_SERVER = _lib.SSL_SESS_CACHE_SERVER +SESS_CACHE_BOTH = _lib.SSL_SESS_CACHE_BOTH +SESS_CACHE_NO_AUTO_CLEAR = _lib.SSL_SESS_CACHE_NO_AUTO_CLEAR +SESS_CACHE_NO_INTERNAL_LOOKUP = _lib.SSL_SESS_CACHE_NO_INTERNAL_LOOKUP +SESS_CACHE_NO_INTERNAL_STORE = _lib.SSL_SESS_CACHE_NO_INTERNAL_STORE +SESS_CACHE_NO_INTERNAL = _lib.SSL_SESS_CACHE_NO_INTERNAL + +SSL_ST_CONNECT = _lib.SSL_ST_CONNECT +SSL_ST_ACCEPT = _lib.SSL_ST_ACCEPT +SSL_ST_MASK = _lib.SSL_ST_MASK +SSL_ST_INIT = _lib.SSL_ST_INIT +SSL_ST_BEFORE = _lib.SSL_ST_BEFORE +SSL_ST_OK = _lib.SSL_ST_OK +SSL_ST_RENEGOTIATE = _lib.SSL_ST_RENEGOTIATE + +SSL_CB_LOOP = _lib.SSL_CB_LOOP +SSL_CB_EXIT = _lib.SSL_CB_EXIT +SSL_CB_READ = _lib.SSL_CB_READ +SSL_CB_WRITE = _lib.SSL_CB_WRITE +SSL_CB_ALERT = _lib.SSL_CB_ALERT +SSL_CB_READ_ALERT = _lib.SSL_CB_READ_ALERT +SSL_CB_WRITE_ALERT = _lib.SSL_CB_WRITE_ALERT +SSL_CB_ACCEPT_LOOP = _lib.SSL_CB_ACCEPT_LOOP +SSL_CB_ACCEPT_EXIT = _lib.SSL_CB_ACCEPT_EXIT +SSL_CB_CONNECT_LOOP = _lib.SSL_CB_CONNECT_LOOP +SSL_CB_CONNECT_EXIT = _lib.SSL_CB_CONNECT_EXIT +SSL_CB_HANDSHAKE_START = _lib.SSL_CB_HANDSHAKE_START +SSL_CB_HANDSHAKE_DONE = _lib.SSL_CB_HANDSHAKE_DONE + + +class Error(Exception): + """ + An error occurred in an `OpenSSL.SSL` API. + """ + + +_raise_current_error = partial(_exception_from_error_queue, Error) +_openssl_assert = _make_assert(Error) + + +class WantReadError(Error): + pass + + +class WantWriteError(Error): + pass + + +class WantX509LookupError(Error): + pass + + +class ZeroReturnError(Error): + pass + + +class SysCallError(Error): + pass + + +class _CallbackExceptionHelper(object): + """ + A base class for wrapper classes that allow for intelligent exception + handling in OpenSSL callbacks. + + :ivar list _problems: Any exceptions that occurred while executing in a + context where they could not be raised in the normal way. Typically + this is because OpenSSL has called into some Python code and requires a + return value. The exceptions are saved to be raised later when it is + possible to do so. + """ + + def __init__(self): + self._problems = [] + + def raise_if_problem(self): + """ + Raise an exception from the OpenSSL error queue or that was previously + captured whe running a callback. + """ + if self._problems: + try: + _raise_current_error() + except Error: + pass + raise self._problems.pop(0) + + +class _VerifyHelper(_CallbackExceptionHelper): + """ + Wrap a callback such that it can be used as a certificate verification + callback. + """ + + def __init__(self, callback): + _CallbackExceptionHelper.__init__(self) + + @wraps(callback) + def wrapper(ok, store_ctx): + cert = X509.__new__(X509) + cert._x509 = _lib.X509_STORE_CTX_get_current_cert(store_ctx) + error_number = _lib.X509_STORE_CTX_get_error(store_ctx) + error_depth = _lib.X509_STORE_CTX_get_error_depth(store_ctx) + + index = _lib.SSL_get_ex_data_X509_STORE_CTX_idx() + ssl = _lib.X509_STORE_CTX_get_ex_data(store_ctx, index) + connection = Connection._reverse_mapping[ssl] + + try: + result = callback( + connection, cert, error_number, error_depth, ok + ) + except Exception as e: + self._problems.append(e) + return 0 + else: + if result: + _lib.X509_STORE_CTX_set_error(store_ctx, _lib.X509_V_OK) + return 1 + else: + return 0 + + self.callback = _ffi.callback( + "int (*)(int, X509_STORE_CTX *)", wrapper) + + +class _NpnAdvertiseHelper(_CallbackExceptionHelper): + """ + Wrap a callback such that it can be used as an NPN advertisement callback. + """ + + def __init__(self, callback): + _CallbackExceptionHelper.__init__(self) + + @wraps(callback) + def wrapper(ssl, out, outlen, arg): + try: + conn = Connection._reverse_mapping[ssl] + protos = callback(conn) + + # Join the protocols into a Python bytestring, length-prefixing + # each element. + protostr = b''.join( + chain.from_iterable((int2byte(len(p)), p) for p in protos) + ) + + # Save our callback arguments on the connection object. This is + # done to make sure that they don't get freed before OpenSSL + # uses them. Then, return them appropriately in the output + # parameters. + conn._npn_advertise_callback_args = [ + _ffi.new("unsigned int *", len(protostr)), + _ffi.new("unsigned char[]", protostr), + ] + outlen[0] = conn._npn_advertise_callback_args[0][0] + out[0] = conn._npn_advertise_callback_args[1] + return 0 + except Exception as e: + self._problems.append(e) + return 2 # SSL_TLSEXT_ERR_ALERT_FATAL + + self.callback = _ffi.callback( + "int (*)(SSL *, const unsigned char **, unsigned int *, void *)", + wrapper + ) + + +class _NpnSelectHelper(_CallbackExceptionHelper): + """ + Wrap a callback such that it can be used as an NPN selection callback. + """ + + def __init__(self, callback): + _CallbackExceptionHelper.__init__(self) + + @wraps(callback) + def wrapper(ssl, out, outlen, in_, inlen, arg): + try: + conn = Connection._reverse_mapping[ssl] + + # The string passed to us is actually made up of multiple + # length-prefixed bytestrings. We need to split that into a + # list. + instr = _ffi.buffer(in_, inlen)[:] + protolist = [] + while instr: + l = indexbytes(instr, 0) + proto = instr[1:l + 1] + protolist.append(proto) + instr = instr[l + 1:] + + # Call the callback + outstr = callback(conn, protolist) + + # Save our callback arguments on the connection object. This is + # done to make sure that they don't get freed before OpenSSL + # uses them. Then, return them appropriately in the output + # parameters. + conn._npn_select_callback_args = [ + _ffi.new("unsigned char *", len(outstr)), + _ffi.new("unsigned char[]", outstr), + ] + outlen[0] = conn._npn_select_callback_args[0][0] + out[0] = conn._npn_select_callback_args[1] + return 0 + except Exception as e: + self._problems.append(e) + return 2 # SSL_TLSEXT_ERR_ALERT_FATAL + + self.callback = _ffi.callback( + ("int (*)(SSL *, unsigned char **, unsigned char *, " + "const unsigned char *, unsigned int, void *)"), + wrapper + ) + + +class _ALPNSelectHelper(_CallbackExceptionHelper): + """ + Wrap a callback such that it can be used as an ALPN selection callback. + """ + + def __init__(self, callback): + _CallbackExceptionHelper.__init__(self) + + @wraps(callback) + def wrapper(ssl, out, outlen, in_, inlen, arg): + try: + conn = Connection._reverse_mapping[ssl] + + # The string passed to us is made up of multiple + # length-prefixed bytestrings. We need to split that into a + # list. + instr = _ffi.buffer(in_, inlen)[:] + protolist = [] + while instr: + encoded_len = indexbytes(instr, 0) + proto = instr[1:encoded_len + 1] + protolist.append(proto) + instr = instr[encoded_len + 1:] + + # Call the callback + outstr = callback(conn, protolist) + + if not isinstance(outstr, _binary_type): + raise TypeError("ALPN callback must return a bytestring.") + + # Save our callback arguments on the connection object to make + # sure that they don't get freed before OpenSSL can use them. + # Then, return them in the appropriate output parameters. + conn._alpn_select_callback_args = [ + _ffi.new("unsigned char *", len(outstr)), + _ffi.new("unsigned char[]", outstr), + ] + outlen[0] = conn._alpn_select_callback_args[0][0] + out[0] = conn._alpn_select_callback_args[1] + return 0 + except Exception as e: + self._problems.append(e) + return 2 # SSL_TLSEXT_ERR_ALERT_FATAL + + self.callback = _ffi.callback( + ("int (*)(SSL *, unsigned char **, unsigned char *, " + "const unsigned char *, unsigned int, void *)"), + wrapper + ) + + +def _asFileDescriptor(obj): + fd = None + if not isinstance(obj, integer_types): + meth = getattr(obj, "fileno", None) + if meth is not None: + obj = meth() + + if isinstance(obj, integer_types): + fd = obj + + if not isinstance(fd, integer_types): + raise TypeError("argument must be an int, or have a fileno() method.") + elif fd < 0: + raise ValueError( + "file descriptor cannot be a negative integer (%i)" % (fd,)) + + return fd + + +def SSLeay_version(type): + """ + Return a string describing the version of OpenSSL in use. + + :param type: One of the SSLEAY_ constants defined in this module. + """ + return _ffi.string(_lib.SSLeay_version(type)) + + +def _requires_npn(func): + """ + Wraps any function that requires NPN support in OpenSSL, ensuring that + NotImplementedError is raised if NPN is not present. + """ + @wraps(func) + def wrapper(*args, **kwargs): + if not _lib.Cryptography_HAS_NEXTPROTONEG: + raise NotImplementedError("NPN not available.") + + return func(*args, **kwargs) + + return wrapper + + +def _requires_alpn(func): + """ + Wraps any function that requires ALPN support in OpenSSL, ensuring that + NotImplementedError is raised if ALPN support is not present. + """ + @wraps(func) + def wrapper(*args, **kwargs): + if not _lib.Cryptography_HAS_ALPN: + raise NotImplementedError("ALPN not available.") + + return func(*args, **kwargs) + + return wrapper + + +class Session(object): + pass + + +class Context(object): + """ + :class:`OpenSSL.SSL.Context` instances define the parameters for setting + up new SSL connections. + """ + _methods = { + SSLv2_METHOD: "SSLv2_method", + SSLv3_METHOD: "SSLv3_method", + SSLv23_METHOD: "SSLv23_method", + TLSv1_METHOD: "TLSv1_method", + TLSv1_1_METHOD: "TLSv1_1_method", + TLSv1_2_METHOD: "TLSv1_2_method", + } + _methods = dict( + (identifier, getattr(_lib, name)) + for (identifier, name) in _methods.items() + if getattr(_lib, name, None) is not None) + + def __init__(self, method): + """ + :param method: One of SSLv2_METHOD, SSLv3_METHOD, SSLv23_METHOD, or + TLSv1_METHOD. + """ + if not isinstance(method, integer_types): + raise TypeError("method must be an integer") + + try: + method_func = self._methods[method] + except KeyError: + raise ValueError("No such protocol") + + method_obj = method_func() + if method_obj == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + context = _lib.SSL_CTX_new(method_obj) + if context == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + context = _ffi.gc(context, _lib.SSL_CTX_free) + + self._context = context + self._passphrase_helper = None + self._passphrase_callback = None + self._passphrase_userdata = None + self._verify_helper = None + self._verify_callback = None + self._info_callback = None + self._tlsext_servername_callback = None + self._app_data = None + self._npn_advertise_helper = None + self._npn_advertise_callback = None + self._npn_select_helper = None + self._npn_select_callback = None + self._alpn_select_helper = None + self._alpn_select_callback = None + + # SSL_CTX_set_app_data(self->ctx, self); + # SSL_CTX_set_mode(self->ctx, SSL_MODE_ENABLE_PARTIAL_WRITE | + # SSL_MODE_ACCEPT_MOVING_WRITE_BUFFER | + # SSL_MODE_AUTO_RETRY); + self.set_mode(_lib.SSL_MODE_ENABLE_PARTIAL_WRITE) + + def load_verify_locations(self, cafile, capath=None): + """ + Let SSL know where we can find trusted certificates for the certificate + chain + + :param cafile: In which file we can find the certificates (``bytes`` or + ``unicode``). + :param capath: In which directory we can find the certificates + (``bytes`` or ``unicode``). + + :return: None + """ + if cafile is None: + cafile = _ffi.NULL + else: + cafile = _path_string(cafile) + + if capath is None: + capath = _ffi.NULL + else: + capath = _path_string(capath) + + load_result = _lib.SSL_CTX_load_verify_locations( + self._context, cafile, capath + ) + if not load_result: + _raise_current_error() + + def _wrap_callback(self, callback): + @wraps(callback) + def wrapper(size, verify, userdata): + return callback(size, verify, self._passphrase_userdata) + return _PassphraseHelper( + FILETYPE_PEM, wrapper, more_args=True, truncate=True) + + def set_passwd_cb(self, callback, userdata=None): + """ + Set the passphrase callback + + :param callback: The Python callback to use + :param userdata: (optional) A Python object which will be given as + argument to the callback + :return: None + """ + if not callable(callback): + raise TypeError("callback must be callable") + + self._passphrase_helper = self._wrap_callback(callback) + self._passphrase_callback = self._passphrase_helper.callback + _lib.SSL_CTX_set_default_passwd_cb( + self._context, self._passphrase_callback) + self._passphrase_userdata = userdata + + def set_default_verify_paths(self): + """ + Use the platform-specific CA certificate locations + + :return: None + """ + set_result = _lib.SSL_CTX_set_default_verify_paths(self._context) + if not set_result: + # TODO: This is untested. + _raise_current_error() + + def use_certificate_chain_file(self, certfile): + """ + Load a certificate chain from a file + + :param certfile: The name of the certificate chain file (``bytes`` or + ``unicode``). + + :return: None + """ + certfile = _path_string(certfile) + + result = _lib.SSL_CTX_use_certificate_chain_file( + self._context, certfile + ) + if not result: + _raise_current_error() + + def use_certificate_file(self, certfile, filetype=FILETYPE_PEM): + """ + Load a certificate from a file + + :param certfile: The name of the certificate file (``bytes`` or + ``unicode``). + :param filetype: (optional) The encoding of the file, default is PEM + + :return: None + """ + certfile = _path_string(certfile) + if not isinstance(filetype, integer_types): + raise TypeError("filetype must be an integer") + + use_result = _lib.SSL_CTX_use_certificate_file( + self._context, certfile, filetype + ) + if not use_result: + _raise_current_error() + + def use_certificate(self, cert): + """ + Load a certificate from a X509 object + + :param cert: The X509 object + :return: None + """ + if not isinstance(cert, X509): + raise TypeError("cert must be an X509 instance") + + use_result = _lib.SSL_CTX_use_certificate(self._context, cert._x509) + if not use_result: + _raise_current_error() + + def add_extra_chain_cert(self, certobj): + """ + Add certificate to chain + + :param certobj: The X509 certificate object to add to the chain + :return: None + """ + if not isinstance(certobj, X509): + raise TypeError("certobj must be an X509 instance") + + copy = _lib.X509_dup(certobj._x509) + add_result = _lib.SSL_CTX_add_extra_chain_cert(self._context, copy) + if not add_result: + # TODO: This is untested. + _lib.X509_free(copy) + _raise_current_error() + + def _raise_passphrase_exception(self): + if self._passphrase_helper is None: + _raise_current_error() + exception = self._passphrase_helper.raise_if_problem(Error) + if exception is not None: + raise exception + + def use_privatekey_file(self, keyfile, filetype=_UNSPECIFIED): + """ + Load a private key from a file + + :param keyfile: The name of the key file (``bytes`` or ``unicode``) + :param filetype: (optional) The encoding of the file, default is PEM + + :return: None + """ + keyfile = _path_string(keyfile) + + if filetype is _UNSPECIFIED: + filetype = FILETYPE_PEM + elif not isinstance(filetype, integer_types): + raise TypeError("filetype must be an integer") + + use_result = _lib.SSL_CTX_use_PrivateKey_file( + self._context, keyfile, filetype) + if not use_result: + self._raise_passphrase_exception() + + def use_privatekey(self, pkey): + """ + Load a private key from a PKey object + + :param pkey: The PKey object + :return: None + """ + if not isinstance(pkey, PKey): + raise TypeError("pkey must be a PKey instance") + + use_result = _lib.SSL_CTX_use_PrivateKey(self._context, pkey._pkey) + if not use_result: + self._raise_passphrase_exception() + + def check_privatekey(self): + """ + Check that the private key and certificate match up + + :return: None (raises an exception if something's wrong) + """ + if not _lib.SSL_CTX_check_private_key(self._context): + _raise_current_error() + + def load_client_ca(self, cafile): + """ + Load the trusted certificates that will be sent to the client. Does + not actually imply any of the certificates are trusted; that must be + configured separately. + + :param bytes cafile: The path to a certificates file in PEM format. + :return: None + """ + ca_list = _lib.SSL_load_client_CA_file( + _text_to_bytes_and_warn("cafile", cafile) + ) + _openssl_assert(ca_list != _ffi.NULL) + # SSL_CTX_set_client_CA_list doesn't return anything. + _lib.SSL_CTX_set_client_CA_list(self._context, ca_list) + + def set_session_id(self, buf): + """ + Set the session id to *buf* within which a session can be reused for + this Context object. This is needed when doing session resumption, + because there is no way for a stored session to know which Context + object it is associated with. + + :param bytes buf: The session id. + + :returns: None + """ + buf = _text_to_bytes_and_warn("buf", buf) + _openssl_assert( + _lib.SSL_CTX_set_session_id_context( + self._context, + buf, + len(buf), + ) == 1 + ) + + def set_session_cache_mode(self, mode): + """ + Enable/disable session caching and specify the mode used. + + :param mode: One or more of the SESS_CACHE_* flags (combine using + bitwise or) + :returns: The previously set caching mode. + """ + if not isinstance(mode, integer_types): + raise TypeError("mode must be an integer") + + return _lib.SSL_CTX_set_session_cache_mode(self._context, mode) + + def get_session_cache_mode(self): + """ + :returns: The currently used cache mode. + """ + return _lib.SSL_CTX_get_session_cache_mode(self._context) + + def set_verify(self, mode, callback): + """ + Set the verify mode and verify callback + + :param mode: The verify mode, this is either VERIFY_NONE or + VERIFY_PEER combined with possible other flags + :param callback: The Python callback to use + :return: None + + See SSL_CTX_set_verify(3SSL) for further details. + """ + if not isinstance(mode, integer_types): + raise TypeError("mode must be an integer") + + if not callable(callback): + raise TypeError("callback must be callable") + + self._verify_helper = _VerifyHelper(callback) + self._verify_callback = self._verify_helper.callback + _lib.SSL_CTX_set_verify(self._context, mode, self._verify_callback) + + def set_verify_depth(self, depth): + """ + Set the verify depth + + :param depth: An integer specifying the verify depth + :return: None + """ + if not isinstance(depth, integer_types): + raise TypeError("depth must be an integer") + + _lib.SSL_CTX_set_verify_depth(self._context, depth) + + def get_verify_mode(self): + """ + Get the verify mode + + :return: The verify mode + """ + return _lib.SSL_CTX_get_verify_mode(self._context) + + def get_verify_depth(self): + """ + Get the verify depth + + :return: The verify depth + """ + return _lib.SSL_CTX_get_verify_depth(self._context) + + def load_tmp_dh(self, dhfile): + """ + Load parameters for Ephemeral Diffie-Hellman + + :param dhfile: The file to load EDH parameters from (``bytes`` or + ``unicode``). + + :return: None + """ + dhfile = _path_string(dhfile) + + bio = _lib.BIO_new_file(dhfile, b"r") + if bio == _ffi.NULL: + _raise_current_error() + bio = _ffi.gc(bio, _lib.BIO_free) + + dh = _lib.PEM_read_bio_DHparams(bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) + dh = _ffi.gc(dh, _lib.DH_free) + _lib.SSL_CTX_set_tmp_dh(self._context, dh) + + def set_tmp_ecdh(self, curve): + """ + Select a curve to use for ECDHE key exchange. + + :param curve: A curve object to use as returned by either + :py:meth:`OpenSSL.crypto.get_elliptic_curve` or + :py:meth:`OpenSSL.crypto.get_elliptic_curves`. + + :return: None + """ + _lib.SSL_CTX_set_tmp_ecdh(self._context, curve._to_EC_KEY()) + + def set_cipher_list(self, cipher_list): + """ + Set the list of ciphers to be used in this context. + + See the OpenSSL manual for more information (e.g. + :manpage:`ciphers(1)`). + + :param bytes cipher_list: An OpenSSL cipher string. + :return: None + """ + cipher_list = _text_to_bytes_and_warn("cipher_list", cipher_list) + + if not isinstance(cipher_list, bytes): + raise TypeError("cipher_list must be a byte string.") + + _openssl_assert( + _lib.SSL_CTX_set_cipher_list(self._context, cipher_list) == 1 + ) + + def set_client_ca_list(self, certificate_authorities): + """ + Set the list of preferred client certificate signers for this server + context. + + This list of certificate authorities will be sent to the client when + the server requests a client certificate. + + :param certificate_authorities: a sequence of X509Names. + :return: None + """ + name_stack = _lib.sk_X509_NAME_new_null() + if name_stack == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + try: + for ca_name in certificate_authorities: + if not isinstance(ca_name, X509Name): + raise TypeError( + "client CAs must be X509Name objects, not %s " + "objects" % ( + type(ca_name).__name__, + ) + ) + copy = _lib.X509_NAME_dup(ca_name._name) + if copy == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + push_result = _lib.sk_X509_NAME_push(name_stack, copy) + if not push_result: + _lib.X509_NAME_free(copy) + _raise_current_error() + except: + _lib.sk_X509_NAME_free(name_stack) + raise + + _lib.SSL_CTX_set_client_CA_list(self._context, name_stack) + + def add_client_ca(self, certificate_authority): + """ + Add the CA certificate to the list of preferred signers for this + context. + + The list of certificate authorities will be sent to the client when the + server requests a client certificate. + + :param certificate_authority: certificate authority's X509 certificate. + :return: None + """ + if not isinstance(certificate_authority, X509): + raise TypeError("certificate_authority must be an X509 instance") + + add_result = _lib.SSL_CTX_add_client_CA( + self._context, certificate_authority._x509) + if not add_result: + # TODO: This is untested. + _raise_current_error() + + def set_timeout(self, timeout): + """ + Set session timeout + + :param timeout: The timeout in seconds + :return: The previous session timeout + """ + if not isinstance(timeout, integer_types): + raise TypeError("timeout must be an integer") + + return _lib.SSL_CTX_set_timeout(self._context, timeout) + + def get_timeout(self): + """ + Get the session timeout + + :return: The session timeout + """ + return _lib.SSL_CTX_get_timeout(self._context) + + def set_info_callback(self, callback): + """ + Set the info callback + + :param callback: The Python callback to use + :return: None + """ + @wraps(callback) + def wrapper(ssl, where, return_code): + callback(Connection._reverse_mapping[ssl], where, return_code) + self._info_callback = _ffi.callback( + "void (*)(const SSL *, int, int)", wrapper) + _lib.SSL_CTX_set_info_callback(self._context, self._info_callback) + + def get_app_data(self): + """ + Get the application data (supplied via set_app_data()) + + :return: The application data + """ + return self._app_data + + def set_app_data(self, data): + """ + Set the application data (will be returned from get_app_data()) + + :param data: Any Python object + :return: None + """ + self._app_data = data + + def get_cert_store(self): + """ + Get the certificate store for the context. + + :return: A X509Store object or None if it does not have one. + """ + store = _lib.SSL_CTX_get_cert_store(self._context) + if store == _ffi.NULL: + # TODO: This is untested. + return None + + pystore = X509Store.__new__(X509Store) + pystore._store = store + return pystore + + def set_options(self, options): + """ + Add options. Options set before are not cleared! + + :param options: The options to add. + :return: The new option bitmask. + """ + if not isinstance(options, integer_types): + raise TypeError("options must be an integer") + + return _lib.SSL_CTX_set_options(self._context, options) + + def set_mode(self, mode): + """ + Add modes via bitmask. Modes set before are not cleared! + + :param mode: The mode to add. + :return: The new mode bitmask. + """ + if not isinstance(mode, integer_types): + raise TypeError("mode must be an integer") + + return _lib.SSL_CTX_set_mode(self._context, mode) + + def set_tlsext_servername_callback(self, callback): + """ + Specify a callback function to be called when clients specify a server + name. + + :param callback: The callback function. It will be invoked with one + argument, the Connection instance. + """ + @wraps(callback) + def wrapper(ssl, alert, arg): + callback(Connection._reverse_mapping[ssl]) + return 0 + + self._tlsext_servername_callback = _ffi.callback( + "int (*)(const SSL *, int *, void *)", wrapper) + _lib.SSL_CTX_set_tlsext_servername_callback( + self._context, self._tlsext_servername_callback) + + @_requires_npn + def set_npn_advertise_callback(self, callback): + """ + Specify a callback function that will be called when offering `Next + Protocol Negotiation + `_ as a server. + + :param callback: The callback function. It will be invoked with one + argument, the Connection instance. It should return a list of + bytestrings representing the advertised protocols, like + ``[b'http/1.1', b'spdy/2']``. + """ + self._npn_advertise_helper = _NpnAdvertiseHelper(callback) + self._npn_advertise_callback = self._npn_advertise_helper.callback + _lib.SSL_CTX_set_next_protos_advertised_cb( + self._context, self._npn_advertise_callback, _ffi.NULL) + + @_requires_npn + def set_npn_select_callback(self, callback): + """ + Specify a callback function that will be called when a server offers + Next Protocol Negotiation options. + + :param callback: The callback function. It will be invoked with two + arguments: the Connection, and a list of offered protocols as + bytestrings, e.g. ``[b'http/1.1', b'spdy/2']``. It should return + one of those bytestrings, the chosen protocol. + """ + self._npn_select_helper = _NpnSelectHelper(callback) + self._npn_select_callback = self._npn_select_helper.callback + _lib.SSL_CTX_set_next_proto_select_cb( + self._context, self._npn_select_callback, _ffi.NULL) + + @_requires_alpn + def set_alpn_protos(self, protos): + """ + Specify the clients ALPN protocol list. + + These protocols are offered to the server during protocol negotiation. + + :param protos: A list of the protocols to be offered to the server. + This list should be a Python list of bytestrings representing the + protocols to offer, e.g. ``[b'http/1.1', b'spdy/2']``. + """ + # Take the list of protocols and join them together, prefixing them + # with their lengths. + protostr = b''.join( + chain.from_iterable((int2byte(len(p)), p) for p in protos) + ) + + # Build a C string from the list. We don't need to save this off + # because OpenSSL immediately copies the data out. + input_str = _ffi.new("unsigned char[]", protostr) + input_str_len = _ffi.cast("unsigned", len(protostr)) + _lib.SSL_CTX_set_alpn_protos(self._context, input_str, input_str_len) + + @_requires_alpn + def set_alpn_select_callback(self, callback): + """ + Set the callback to handle ALPN protocol choice. + + :param callback: The callback function. It will be invoked with two + arguments: the Connection, and a list of offered protocols as + bytestrings, e.g ``[b'http/1.1', b'spdy/2']``. It should return + one of those bytestrings, the chosen protocol. + """ + self._alpn_select_helper = _ALPNSelectHelper(callback) + self._alpn_select_callback = self._alpn_select_helper.callback + _lib.SSL_CTX_set_alpn_select_cb( + self._context, self._alpn_select_callback, _ffi.NULL) + +ContextType = Context + + +class Connection(object): + """ + """ + _reverse_mapping = WeakValueDictionary() + + def __init__(self, context, socket=None): + """ + Create a new Connection object, using the given OpenSSL.SSL.Context + instance and socket. + + :param context: An SSL Context to use for this connection + :param socket: The socket to use for transport layer + """ + if not isinstance(context, Context): + raise TypeError("context must be a Context instance") + + ssl = _lib.SSL_new(context._context) + self._ssl = _ffi.gc(ssl, _lib.SSL_free) + self._context = context + self._app_data = None + + # References to strings used for Next Protocol Negotiation. OpenSSL's + # header files suggest that these might get copied at some point, but + # doesn't specify when, so we store them here to make sure they don't + # get freed before OpenSSL uses them. + self._npn_advertise_callback_args = None + self._npn_select_callback_args = None + + # References to strings used for Application Layer Protocol + # Negotiation. These strings get copied at some point but it's well + # after the callback returns, so we have to hang them somewhere to + # avoid them getting freed. + self._alpn_select_callback_args = None + + self._reverse_mapping[self._ssl] = self + + if socket is None: + self._socket = None + # Don't set up any gc for these, SSL_free will take care of them. + self._into_ssl = _lib.BIO_new(_lib.BIO_s_mem()) + self._from_ssl = _lib.BIO_new(_lib.BIO_s_mem()) + + if self._into_ssl == _ffi.NULL or self._from_ssl == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + _lib.SSL_set_bio(self._ssl, self._into_ssl, self._from_ssl) + else: + self._into_ssl = None + self._from_ssl = None + self._socket = socket + set_result = _lib.SSL_set_fd( + self._ssl, _asFileDescriptor(self._socket)) + if not set_result: + # TODO: This is untested. + _raise_current_error() + + def __getattr__(self, name): + """ + Look up attributes on the wrapped socket object if they are not found + on the Connection object. + """ + if self._socket is None: + raise AttributeError("'%s' object has no attribute '%s'" % ( + self.__class__.__name__, name + )) + else: + return getattr(self._socket, name) + + def _raise_ssl_error(self, ssl, result): + if self._context._verify_helper is not None: + self._context._verify_helper.raise_if_problem() + if self._context._npn_advertise_helper is not None: + self._context._npn_advertise_helper.raise_if_problem() + if self._context._npn_select_helper is not None: + self._context._npn_select_helper.raise_if_problem() + if self._context._alpn_select_helper is not None: + self._context._alpn_select_helper.raise_if_problem() + + error = _lib.SSL_get_error(ssl, result) + if error == _lib.SSL_ERROR_WANT_READ: + raise WantReadError() + elif error == _lib.SSL_ERROR_WANT_WRITE: + raise WantWriteError() + elif error == _lib.SSL_ERROR_ZERO_RETURN: + raise ZeroReturnError() + elif error == _lib.SSL_ERROR_WANT_X509_LOOKUP: + # TODO: This is untested. + raise WantX509LookupError() + elif error == _lib.SSL_ERROR_SYSCALL: + if _lib.ERR_peek_error() == 0: + if result < 0: + if platform == "win32": + errno = _ffi.getwinerror()[0] + else: + errno = _ffi.errno + raise SysCallError(errno, errorcode.get(errno)) + else: + raise SysCallError(-1, "Unexpected EOF") + else: + # TODO: This is untested. + _raise_current_error() + elif error == _lib.SSL_ERROR_NONE: + pass + else: + _raise_current_error() + + def get_context(self): + """ + Get session context + """ + return self._context + + def set_context(self, context): + """ + Switch this connection to a new session context + + :param context: A :py:class:`Context` instance giving the new session + context to use. + """ + if not isinstance(context, Context): + raise TypeError("context must be a Context instance") + + _lib.SSL_set_SSL_CTX(self._ssl, context._context) + self._context = context + + def get_servername(self): + """ + Retrieve the servername extension value if provided in the client hello + message, or None if there wasn't one. + + :return: A byte string giving the server name or :py:data:`None`. + """ + name = _lib.SSL_get_servername( + self._ssl, _lib.TLSEXT_NAMETYPE_host_name + ) + if name == _ffi.NULL: + return None + + return _ffi.string(name) + + def set_tlsext_host_name(self, name): + """ + Set the value of the servername extension to send in the client hello. + + :param name: A byte string giving the name. + """ + if not isinstance(name, bytes): + raise TypeError("name must be a byte string") + elif b"\0" in name: + raise TypeError("name must not contain NUL byte") + + # XXX I guess this can fail sometimes? + _lib.SSL_set_tlsext_host_name(self._ssl, name) + + def pending(self): + """ + Get the number of bytes that can be safely read from the connection + + :return: The number of bytes available in the receive buffer. + """ + return _lib.SSL_pending(self._ssl) + + def send(self, buf, flags=0): + """ + Send data on the connection. NOTE: If you get one of the WantRead, + WantWrite or WantX509Lookup exceptions on this, you have to call the + method again with the SAME buffer. + + :param buf: The string, buffer or memoryview to send + :param flags: (optional) Included for compatibility with the socket + API, the value is ignored + :return: The number of bytes written + """ + # Backward compatibility + buf = _text_to_bytes_and_warn("buf", buf) + + if isinstance(buf, _memoryview): + buf = buf.tobytes() + if isinstance(buf, _buffer): + buf = str(buf) + if not isinstance(buf, bytes): + raise TypeError("data must be a memoryview, buffer or byte string") + + result = _lib.SSL_write(self._ssl, buf, len(buf)) + self._raise_ssl_error(self._ssl, result) + return result + write = send + + def sendall(self, buf, flags=0): + """ + Send "all" data on the connection. This calls send() repeatedly until + all data is sent. If an error occurs, it's impossible to tell how much + data has been sent. + + :param buf: The string, buffer or memoryview to send + :param flags: (optional) Included for compatibility with the socket + API, the value is ignored + :return: The number of bytes written + """ + buf = _text_to_bytes_and_warn("buf", buf) + + if isinstance(buf, _memoryview): + buf = buf.tobytes() + if isinstance(buf, _buffer): + buf = str(buf) + if not isinstance(buf, bytes): + raise TypeError("buf must be a memoryview, buffer or byte string") + + left_to_send = len(buf) + total_sent = 0 + data = _ffi.new("char[]", buf) + + while left_to_send: + result = _lib.SSL_write(self._ssl, data + total_sent, left_to_send) + self._raise_ssl_error(self._ssl, result) + total_sent += result + left_to_send -= result + + def recv(self, bufsiz, flags=None): + """ + Receive data on the connection. NOTE: If you get one of the WantRead, + WantWrite or WantX509Lookup exceptions on this, you have to call the + method again with the SAME buffer. + + :param bufsiz: The maximum number of bytes to read + :param flags: (optional) The only supported flag is ``MSG_PEEK``, + all other flags are ignored. + :return: The string read from the Connection + """ + buf = _ffi.new("char[]", bufsiz) + if flags is not None and flags & socket.MSG_PEEK: + result = _lib.SSL_peek(self._ssl, buf, bufsiz) + else: + result = _lib.SSL_read(self._ssl, buf, bufsiz) + self._raise_ssl_error(self._ssl, result) + return _ffi.buffer(buf, result)[:] + read = recv + + def recv_into(self, buffer, nbytes=None, flags=None): + """ + Receive data on the connection and store the data into a buffer rather + than creating a new string. + + :param buffer: The buffer to copy into. + :param nbytes: (optional) The maximum number of bytes to read into the + buffer. If not present, defaults to the size of the buffer. If + larger than the size of the buffer, is reduced to the size of the + buffer. + :param flags: (optional) The only supported flag is ``MSG_PEEK``, + all other flags are ignored. + :return: The number of bytes read into the buffer. + """ + if nbytes is None: + nbytes = len(buffer) + else: + nbytes = min(nbytes, len(buffer)) + + # We need to create a temporary buffer. This is annoying, it would be + # better if we could pass memoryviews straight into the SSL_read call, + # but right now we can't. Revisit this if CFFI gets that ability. + buf = _ffi.new("char[]", nbytes) + if flags is not None and flags & socket.MSG_PEEK: + result = _lib.SSL_peek(self._ssl, buf, nbytes) + else: + result = _lib.SSL_read(self._ssl, buf, nbytes) + self._raise_ssl_error(self._ssl, result) + + # This strange line is all to avoid a memory copy. The buffer protocol + # should allow us to assign a CFFI buffer to the LHS of this line, but + # on CPython 3.3+ that segfaults. As a workaround, we can temporarily + # wrap it in a memoryview, except on Python 2.6 which doesn't have a + # memoryview type. + try: + buffer[:result] = memoryview(_ffi.buffer(buf, result)) + except NameError: + buffer[:result] = _ffi.buffer(buf, result) + + return result + + def _handle_bio_errors(self, bio, result): + if _lib.BIO_should_retry(bio): + if _lib.BIO_should_read(bio): + raise WantReadError() + elif _lib.BIO_should_write(bio): + # TODO: This is untested. + raise WantWriteError() + elif _lib.BIO_should_io_special(bio): + # TODO: This is untested. I think io_special means the socket + # BIO has a not-yet connected socket. + raise ValueError("BIO_should_io_special") + else: + # TODO: This is untested. + raise ValueError("unknown bio failure") + else: + # TODO: This is untested. + _raise_current_error() + + def bio_read(self, bufsiz): + """ + When using non-socket connections this function reads the "dirty" data + that would have traveled away on the network. + + :param bufsiz: The maximum number of bytes to read + :return: The string read. + """ + if self._from_ssl is None: + raise TypeError("Connection sock was not None") + + if not isinstance(bufsiz, integer_types): + raise TypeError("bufsiz must be an integer") + + buf = _ffi.new("char[]", bufsiz) + result = _lib.BIO_read(self._from_ssl, buf, bufsiz) + if result <= 0: + self._handle_bio_errors(self._from_ssl, result) + + return _ffi.buffer(buf, result)[:] + + def bio_write(self, buf): + """ + When using non-socket connections this function sends "dirty" data that + would have traveled in on the network. + + :param buf: The string to put into the memory BIO. + :return: The number of bytes written + """ + buf = _text_to_bytes_and_warn("buf", buf) + + if self._into_ssl is None: + raise TypeError("Connection sock was not None") + + result = _lib.BIO_write(self._into_ssl, buf, len(buf)) + if result <= 0: + self._handle_bio_errors(self._into_ssl, result) + return result + + def renegotiate(self): + """ + Renegotiate the session. + + :return: True if the renegotiation can be started, False otherwise + :rtype: bool + """ + if not self.renegotiate_pending(): + _openssl_assert(_lib.SSL_renegotiate(self._ssl) == 1) + return True + return False + + def do_handshake(self): + """ + Perform an SSL handshake (usually called after renegotiate() or one of + set_*_state()). This can raise the same exceptions as send and recv. + + :return: None. + """ + result = _lib.SSL_do_handshake(self._ssl) + self._raise_ssl_error(self._ssl, result) + + def renegotiate_pending(self): + """ + Check if there's a renegotiation in progress, it will return False once + a renegotiation is finished. + + :return: Whether there's a renegotiation in progress + :rtype: bool + """ + return _lib.SSL_renegotiate_pending(self._ssl) == 1 + + def total_renegotiations(self): + """ + Find out the total number of renegotiations. + + :return: The number of renegotiations. + :rtype: int + """ + return _lib.SSL_total_renegotiations(self._ssl) + + def connect(self, addr): + """ + Connect to remote host and set up client-side SSL + + :param addr: A remote address + :return: What the socket's connect method returns + """ + _lib.SSL_set_connect_state(self._ssl) + return self._socket.connect(addr) + + def connect_ex(self, addr): + """ + Connect to remote host and set up client-side SSL. Note that if the + socket's connect_ex method doesn't return 0, SSL won't be initialized. + + :param addr: A remove address + :return: What the socket's connect_ex method returns + """ + connect_ex = self._socket.connect_ex + self.set_connect_state() + return connect_ex(addr) + + def accept(self): + """ + Accept incoming connection and set up SSL on it + + :return: A (conn,addr) pair where conn is a Connection and addr is an + address + """ + client, addr = self._socket.accept() + conn = Connection(self._context, client) + conn.set_accept_state() + return (conn, addr) + + def bio_shutdown(self): + """ + When using non-socket connections this function signals end of + data on the input for this connection. + + :return: None + """ + if self._from_ssl is None: + raise TypeError("Connection sock was not None") + + _lib.BIO_set_mem_eof_return(self._into_ssl, 0) + + def shutdown(self): + """ + Send closure alert + + :return: True if the shutdown completed successfully (i.e. both sides + have sent closure alerts), false otherwise (i.e. you have to + wait for a ZeroReturnError on a recv() method call + """ + result = _lib.SSL_shutdown(self._ssl) + if result < 0: + self._raise_ssl_error(self._ssl, result) + elif result > 0: + return True + else: + return False + + def get_cipher_list(self): + """ + Retrieve the list of ciphers used by the Connection object. + + :return: A list of native cipher strings. + """ + ciphers = [] + for i in count(): + result = _lib.SSL_get_cipher_list(self._ssl, i) + if result == _ffi.NULL: + break + ciphers.append(_native(_ffi.string(result))) + return ciphers + + def get_client_ca_list(self): + """ + Get CAs whose certificates are suggested for client authentication. + + :return: If this is a server connection, a list of X509Names + representing the acceptable CAs as set by + :py:meth:`OpenSSL.SSL.Context.set_client_ca_list` or + :py:meth:`OpenSSL.SSL.Context.add_client_ca`. If this is a client + connection, the list of such X509Names sent by the server, or an + empty list if that has not yet happened. + """ + ca_names = _lib.SSL_get_client_CA_list(self._ssl) + if ca_names == _ffi.NULL: + # TODO: This is untested. + return [] + + result = [] + for i in range(_lib.sk_X509_NAME_num(ca_names)): + name = _lib.sk_X509_NAME_value(ca_names, i) + copy = _lib.X509_NAME_dup(name) + if copy == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + pyname = X509Name.__new__(X509Name) + pyname._name = _ffi.gc(copy, _lib.X509_NAME_free) + result.append(pyname) + return result + + def makefile(self): + """ + The makefile() method is not implemented, since there is no dup + semantics for SSL connections + + :raise: NotImplementedError + """ + raise NotImplementedError( + "Cannot make file object of OpenSSL.SSL.Connection") + + def get_app_data(self): + """ + Get application data + + :return: The application data + """ + return self._app_data + + def set_app_data(self, data): + """ + Set application data + + :param data - The application data + :return: None + """ + self._app_data = data + + def get_shutdown(self): + """ + Get shutdown state + + :return: The shutdown state, a bitvector of SENT_SHUTDOWN, + RECEIVED_SHUTDOWN. + """ + return _lib.SSL_get_shutdown(self._ssl) + + def set_shutdown(self, state): + """ + Set shutdown state + + :param state - bitvector of SENT_SHUTDOWN, RECEIVED_SHUTDOWN. + :return: None + """ + if not isinstance(state, integer_types): + raise TypeError("state must be an integer") + + _lib.SSL_set_shutdown(self._ssl, state) + + def get_state_string(self): + """ + Retrieve a verbose string detailing the state of the Connection. + + :return: A string representing the state + :rtype: bytes + """ + return _ffi.string(_lib.SSL_state_string_long(self._ssl)) + + def server_random(self): + """ + Get a copy of the server hello nonce. + + :return: A string representing the state + """ + if self._ssl.session == _ffi.NULL: + return None + return _ffi.buffer( + self._ssl.s3.server_random, + _lib.SSL3_RANDOM_SIZE)[:] + + def client_random(self): + """ + Get a copy of the client hello nonce. + + :return: A string representing the state + """ + if self._ssl.session == _ffi.NULL: + return None + return _ffi.buffer( + self._ssl.s3.client_random, + _lib.SSL3_RANDOM_SIZE)[:] + + def master_key(self): + """ + Get a copy of the master key. + + :return: A string representing the state + """ + if self._ssl.session == _ffi.NULL: + return None + return _ffi.buffer( + self._ssl.session.master_key, + self._ssl.session.master_key_length)[:] + + def sock_shutdown(self, *args, **kwargs): + """ + See shutdown(2) + + :return: What the socket's shutdown() method returns + """ + return self._socket.shutdown(*args, **kwargs) + + def get_peer_certificate(self): + """ + Retrieve the other side's certificate (if any) + + :return: The peer's certificate + """ + cert = _lib.SSL_get_peer_certificate(self._ssl) + if cert != _ffi.NULL: + pycert = X509.__new__(X509) + pycert._x509 = _ffi.gc(cert, _lib.X509_free) + return pycert + return None + + def get_peer_cert_chain(self): + """ + Retrieve the other side's certificate (if any) + + :return: A list of X509 instances giving the peer's certificate chain, + or None if it does not have one. + """ + cert_stack = _lib.SSL_get_peer_cert_chain(self._ssl) + if cert_stack == _ffi.NULL: + return None + + result = [] + for i in range(_lib.sk_X509_num(cert_stack)): + # TODO could incref instead of dup here + cert = _lib.X509_dup(_lib.sk_X509_value(cert_stack, i)) + pycert = X509.__new__(X509) + pycert._x509 = _ffi.gc(cert, _lib.X509_free) + result.append(pycert) + return result + + def want_read(self): + """ + Checks if more data has to be read from the transport layer to complete + an operation. + + :return: True iff more data has to be read + """ + return _lib.SSL_want_read(self._ssl) + + def want_write(self): + """ + Checks if there is data to write to the transport layer to complete an + operation. + + :return: True iff there is data to write + """ + return _lib.SSL_want_write(self._ssl) + + def set_accept_state(self): + """ + Set the connection to work in server mode. The handshake will be + handled automatically by read/write. + + :return: None + """ + _lib.SSL_set_accept_state(self._ssl) + + def set_connect_state(self): + """ + Set the connection to work in client mode. The handshake will be + handled automatically by read/write. + + :return: None + """ + _lib.SSL_set_connect_state(self._ssl) + + def get_session(self): + """ + Returns the Session currently used. + + @return: An instance of :py:class:`OpenSSL.SSL.Session` or + :py:obj:`None` if no session exists. + """ + session = _lib.SSL_get1_session(self._ssl) + if session == _ffi.NULL: + return None + + pysession = Session.__new__(Session) + pysession._session = _ffi.gc(session, _lib.SSL_SESSION_free) + return pysession + + def set_session(self, session): + """ + Set the session to be used when the TLS/SSL connection is established. + + :param session: A Session instance representing the session to use. + :returns: None + """ + if not isinstance(session, Session): + raise TypeError("session must be a Session instance") + + result = _lib.SSL_set_session(self._ssl, session._session) + if not result: + _raise_current_error() + + def _get_finished_message(self, function): + """ + Helper to implement :py:meth:`get_finished` and + :py:meth:`get_peer_finished`. + + :param function: Either :py:data:`SSL_get_finished`: or + :py:data:`SSL_get_peer_finished`. + + :return: :py:data:`None` if the desired message has not yet been + received, otherwise the contents of the message. + :rtype: :py:class:`bytes` or :py:class:`NoneType` + """ + # The OpenSSL documentation says nothing about what might happen if the + # count argument given is zero. Specifically, it doesn't say whether + # the output buffer may be NULL in that case or not. Inspection of the + # implementation reveals that it calls memcpy() unconditionally. + # Section 7.1.4, paragraph 1 of the C standard suggests that + # memcpy(NULL, source, 0) is not guaranteed to produce defined (let + # alone desirable) behavior (though it probably does on just about + # every implementation...) + # + # Allocate a tiny buffer to pass in (instead of just passing NULL as + # one might expect) for the initial call so as to be safe against this + # potentially undefined behavior. + empty = _ffi.new("char[]", 0) + size = function(self._ssl, empty, 0) + if size == 0: + # No Finished message so far. + return None + + buf = _ffi.new("char[]", size) + function(self._ssl, buf, size) + return _ffi.buffer(buf, size)[:] + + def get_finished(self): + """ + Obtain the latest `handshake finished` message sent to the peer. + + :return: The contents of the message or :py:obj:`None` if the TLS + handshake has not yet completed. + :rtype: :py:class:`bytes` or :py:class:`NoneType` + """ + return self._get_finished_message(_lib.SSL_get_finished) + + def get_peer_finished(self): + """ + Obtain the latest `handshake finished` message received from the peer. + + :return: The contents of the message or :py:obj:`None` if the TLS + handshake has not yet completed. + :rtype: :py:class:`bytes` or :py:class:`NoneType` + """ + return self._get_finished_message(_lib.SSL_get_peer_finished) + + def get_cipher_name(self): + """ + Obtain the name of the currently used cipher. + + :returns: The name of the currently used cipher or :py:obj:`None` + if no connection has been established. + :rtype: :py:class:`unicode` or :py:class:`NoneType` + """ + cipher = _lib.SSL_get_current_cipher(self._ssl) + if cipher == _ffi.NULL: + return None + else: + name = _ffi.string(_lib.SSL_CIPHER_get_name(cipher)) + return name.decode("utf-8") + + def get_cipher_bits(self): + """ + Obtain the number of secret bits of the currently used cipher. + + :returns: The number of secret bits of the currently used cipher + or :py:obj:`None` if no connection has been established. + :rtype: :py:class:`int` or :py:class:`NoneType` + """ + cipher = _lib.SSL_get_current_cipher(self._ssl) + if cipher == _ffi.NULL: + return None + else: + return _lib.SSL_CIPHER_get_bits(cipher, _ffi.NULL) + + def get_cipher_version(self): + """ + Obtain the protocol version of the currently used cipher. + + :returns: The protocol name of the currently used cipher + or :py:obj:`None` if no connection has been established. + :rtype: :py:class:`unicode` or :py:class:`NoneType` + """ + cipher = _lib.SSL_get_current_cipher(self._ssl) + if cipher == _ffi.NULL: + return None + else: + version = _ffi.string(_lib.SSL_CIPHER_get_version(cipher)) + return version.decode("utf-8") + + def get_protocol_version_name(self): + """ + Obtain the protocol version of the current connection. + + :returns: The TLS version of the current connection, for example + the value for TLS 1.2 would be ``TLSv1.2``or ``Unknown`` + for connections that were not successfully established. + :rtype: :py:class:`unicode` + """ + version = _ffi.string(_lib.SSL_get_version(self._ssl)) + return version.decode("utf-8") + + def get_protocol_version(self): + """ + Obtain the protocol version of the current connection. + + :returns: The TLS version of the current connection, for example + the value for TLS 1 would be 0x769. + :rtype: :py:class:`int` + """ + version = _lib.SSL_version(self._ssl) + return version + + @_requires_npn + def get_next_proto_negotiated(self): + """ + Get the protocol that was negotiated by NPN. + """ + data = _ffi.new("unsigned char **") + data_len = _ffi.new("unsigned int *") + + _lib.SSL_get0_next_proto_negotiated(self._ssl, data, data_len) + + return _ffi.buffer(data[0], data_len[0])[:] + + @_requires_alpn + def set_alpn_protos(self, protos): + """ + Specify the client's ALPN protocol list. + + These protocols are offered to the server during protocol negotiation. + + :param protos: A list of the protocols to be offered to the server. + This list should be a Python list of bytestrings representing the + protocols to offer, e.g. ``[b'http/1.1', b'spdy/2']``. + """ + # Take the list of protocols and join them together, prefixing them + # with their lengths. + protostr = b''.join( + chain.from_iterable((int2byte(len(p)), p) for p in protos) + ) + + # Build a C string from the list. We don't need to save this off + # because OpenSSL immediately copies the data out. + input_str = _ffi.new("unsigned char[]", protostr) + input_str_len = _ffi.cast("unsigned", len(protostr)) + _lib.SSL_set_alpn_protos(self._ssl, input_str, input_str_len) + + @_requires_alpn + def get_alpn_proto_negotiated(self): + """ + Get the protocol that was negotiated by ALPN. + """ + data = _ffi.new("unsigned char **") + data_len = _ffi.new("unsigned int *") + + _lib.SSL_get0_alpn_selected(self._ssl, data, data_len) + + if not data_len: + return b'' + + return _ffi.buffer(data[0], data_len[0])[:] + + +ConnectionType = Connection + +# This is similar to the initialization calls at the end of OpenSSL/crypto.py +# but is exercised mostly by the Context initializer. +_lib.SSL_library_init() diff --git a/lib/python3.4/site-packages/OpenSSL/__init__.py b/lib/python3.4/site-packages/OpenSSL/__init__.py new file mode 100644 index 0000000..b827e3c --- /dev/null +++ b/lib/python3.4/site-packages/OpenSSL/__init__.py @@ -0,0 +1,20 @@ +# Copyright (C) AB Strakt +# See LICENSE for details. + +""" +pyOpenSSL - A simple wrapper around the OpenSSL library +""" + +from OpenSSL import rand, crypto, SSL +from OpenSSL.version import ( + __author__, __copyright__, __email__, __license__, __summary__, __title__, + __uri__, __version__, +) + + +__all__ = [ + "SSL", "crypto", "rand", + + "__author__", "__copyright__", "__email__", "__license__", "__summary__", + "__title__", "__uri__", "__version__", +] diff --git a/lib/python3.4/site-packages/OpenSSL/_util.py b/lib/python3.4/site-packages/OpenSSL/_util.py new file mode 100644 index 0000000..48bcbf5 --- /dev/null +++ b/lib/python3.4/site-packages/OpenSSL/_util.py @@ -0,0 +1,141 @@ +import sys +import warnings + +from six import PY3, binary_type, text_type + +from cryptography.hazmat.bindings.openssl.binding import Binding + + +binding = Binding() +binding.init_static_locks() +ffi = binding.ffi +lib = binding.lib + + +def text(charp): + """ + Get a native string type representing of the given CFFI ``char*`` object. + + :param charp: A C-style string represented using CFFI. + + :return: :class:`str` + """ + if not charp: + return "" + return native(ffi.string(charp)) + + +def exception_from_error_queue(exception_type): + """ + Convert an OpenSSL library failure into a Python exception. + + When a call to the native OpenSSL library fails, this is usually signalled + by the return value, and an error code is stored in an error queue + associated with the current thread. The err library provides functions to + obtain these error codes and textual error messages. + """ + errors = [] + + while True: + error = lib.ERR_get_error() + if error == 0: + break + errors.append(( + text(lib.ERR_lib_error_string(error)), + text(lib.ERR_func_error_string(error)), + text(lib.ERR_reason_error_string(error)))) + + raise exception_type(errors) + + +def make_assert(error): + """ + Create an assert function that uses :func:`exception_from_error_queue` to + raise an exception wrapped by *error*. + """ + def openssl_assert(ok): + """ + If *ok* is not True, retrieve the error from OpenSSL and raise it. + """ + if ok is not True: + exception_from_error_queue(error) + + return openssl_assert + + +def native(s): + """ + Convert :py:class:`bytes` or :py:class:`unicode` to the native + :py:class:`str` type, using UTF-8 encoding if conversion is necessary. + + :raise UnicodeError: The input string is not UTF-8 decodeable. + + :raise TypeError: The input is neither :py:class:`bytes` nor + :py:class:`unicode`. + """ + if not isinstance(s, (binary_type, text_type)): + raise TypeError("%r is neither bytes nor unicode" % s) + if PY3: + if isinstance(s, binary_type): + return s.decode("utf-8") + else: + if isinstance(s, text_type): + return s.encode("utf-8") + return s + + +def path_string(s): + """ + Convert a Python string to a :py:class:`bytes` string identifying the same + path and which can be passed into an OpenSSL API accepting a filename. + + :param s: An instance of :py:class:`bytes` or :py:class:`unicode`. + + :return: An instance of :py:class:`bytes`. + """ + if isinstance(s, binary_type): + return s + elif isinstance(s, text_type): + return s.encode(sys.getfilesystemencoding()) + else: + raise TypeError("Path must be represented as bytes or unicode string") + + +if PY3: + def byte_string(s): + return s.encode("charmap") +else: + def byte_string(s): + return s + + +# A marker object to observe whether some optional arguments are passed any +# value or not. +UNSPECIFIED = object() + +_TEXT_WARNING = ( + text_type.__name__ + " for {0} is no longer accepted, use bytes" +) + + +def text_to_bytes_and_warn(label, obj): + """ + If ``obj`` is text, emit a warning that it should be bytes instead and try + to convert it to bytes automatically. + + :param str label: The name of the parameter from which ``obj`` was taken + (so a developer can easily find the source of the problem and correct + it). + + :return: If ``obj`` is the text string type, a ``bytes`` object giving the + UTF-8 encoding of that text is returned. Otherwise, ``obj`` itself is + returned. + """ + if isinstance(obj, text_type): + warnings.warn( + _TEXT_WARNING.format(label), + category=DeprecationWarning, + stacklevel=3 + ) + return obj.encode('utf-8') + return obj diff --git a/lib/python3.4/site-packages/OpenSSL/crypto.py b/lib/python3.4/site-packages/OpenSSL/crypto.py new file mode 100644 index 0000000..2cb3cbd --- /dev/null +++ b/lib/python3.4/site-packages/OpenSSL/crypto.py @@ -0,0 +1,2807 @@ +import datetime + +from base64 import b16encode +from functools import partial +from operator import __eq__, __ne__, __lt__, __le__, __gt__, __ge__ +from warnings import warn as _warn + +from six import ( + integer_types as _integer_types, + text_type as _text_type, + PY3 as _PY3) + +from OpenSSL._util import ( + ffi as _ffi, + lib as _lib, + exception_from_error_queue as _exception_from_error_queue, + byte_string as _byte_string, + native as _native, + UNSPECIFIED as _UNSPECIFIED, + text_to_bytes_and_warn as _text_to_bytes_and_warn, +) + +FILETYPE_PEM = _lib.SSL_FILETYPE_PEM +FILETYPE_ASN1 = _lib.SSL_FILETYPE_ASN1 + +# TODO This was an API mistake. OpenSSL has no such constant. +FILETYPE_TEXT = 2 ** 16 - 1 + +TYPE_RSA = _lib.EVP_PKEY_RSA +TYPE_DSA = _lib.EVP_PKEY_DSA + + +class Error(Exception): + """ + An error occurred in an `OpenSSL.crypto` API. + """ + + +_raise_current_error = partial(_exception_from_error_queue, Error) + + +def _untested_error(where): + """ + An OpenSSL API failed somehow. Additionally, the failure which was + encountered isn't one that's exercised by the test suite so future behavior + of pyOpenSSL is now somewhat less predictable. + """ + raise RuntimeError("Unknown %s failure" % (where,)) + + +def _new_mem_buf(buffer=None): + """ + Allocate a new OpenSSL memory BIO. + + Arrange for the garbage collector to clean it up automatically. + + :param buffer: None or some bytes to use to put into the BIO so that they + can be read out. + """ + if buffer is None: + bio = _lib.BIO_new(_lib.BIO_s_mem()) + free = _lib.BIO_free + else: + data = _ffi.new("char[]", buffer) + bio = _lib.BIO_new_mem_buf(data, len(buffer)) + + # Keep the memory alive as long as the bio is alive! + def free(bio, ref=data): + return _lib.BIO_free(bio) + + if bio == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + bio = _ffi.gc(bio, free) + return bio + + +def _bio_to_string(bio): + """ + Copy the contents of an OpenSSL BIO object into a Python byte string. + """ + result_buffer = _ffi.new('char**') + buffer_length = _lib.BIO_get_mem_data(bio, result_buffer) + return _ffi.buffer(result_buffer[0], buffer_length)[:] + + +def _set_asn1_time(boundary, when): + """ + The the time value of an ASN1 time object. + + @param boundary: An ASN1_GENERALIZEDTIME pointer (or an object safely + castable to that type) which will have its value set. + @param when: A string representation of the desired time value. + + @raise TypeError: If C{when} is not a L{bytes} string. + @raise ValueError: If C{when} does not represent a time in the required + format. + @raise RuntimeError: If the time value cannot be set for some other + (unspecified) reason. + """ + if not isinstance(when, bytes): + raise TypeError("when must be a byte string") + + set_result = _lib.ASN1_GENERALIZEDTIME_set_string( + _ffi.cast('ASN1_GENERALIZEDTIME*', boundary), when) + if set_result == 0: + dummy = _ffi.gc(_lib.ASN1_STRING_new(), _lib.ASN1_STRING_free) + _lib.ASN1_STRING_set(dummy, when, len(when)) + check_result = _lib.ASN1_GENERALIZEDTIME_check( + _ffi.cast('ASN1_GENERALIZEDTIME*', dummy)) + if not check_result: + raise ValueError("Invalid string") + else: + _untested_error() + + +def _get_asn1_time(timestamp): + """ + Retrieve the time value of an ASN1 time object. + + @param timestamp: An ASN1_GENERALIZEDTIME* (or an object safely castable to + that type) from which the time value will be retrieved. + + @return: The time value from C{timestamp} as a L{bytes} string in a certain + format. Or C{None} if the object contains no time value. + """ + string_timestamp = _ffi.cast('ASN1_STRING*', timestamp) + if _lib.ASN1_STRING_length(string_timestamp) == 0: + return None + elif ( + _lib.ASN1_STRING_type(string_timestamp) == _lib.V_ASN1_GENERALIZEDTIME + ): + return _ffi.string(_lib.ASN1_STRING_data(string_timestamp)) + else: + generalized_timestamp = _ffi.new("ASN1_GENERALIZEDTIME**") + _lib.ASN1_TIME_to_generalizedtime(timestamp, generalized_timestamp) + if generalized_timestamp[0] == _ffi.NULL: + # This may happen: + # - if timestamp was not an ASN1_TIME + # - if allocating memory for the ASN1_GENERALIZEDTIME failed + # - if a copy of the time data from timestamp cannot be made for + # the newly allocated ASN1_GENERALIZEDTIME + # + # These are difficult to test. cffi enforces the ASN1_TIME type. + # Memory allocation failures are a pain to trigger + # deterministically. + _untested_error("ASN1_TIME_to_generalizedtime") + else: + string_timestamp = _ffi.cast( + "ASN1_STRING*", generalized_timestamp[0]) + string_data = _lib.ASN1_STRING_data(string_timestamp) + string_result = _ffi.string(string_data) + _lib.ASN1_GENERALIZEDTIME_free(generalized_timestamp[0]) + return string_result + + +class PKey(object): + """ + A class representing an DSA or RSA public key or key pair. + """ + _only_public = False + _initialized = True + + def __init__(self): + pkey = _lib.EVP_PKEY_new() + self._pkey = _ffi.gc(pkey, _lib.EVP_PKEY_free) + self._initialized = False + + def generate_key(self, type, bits): + """ + Generate a key pair of the given type, with the given number of bits. + + This generates a key "into" the this object. + + :param type: The key type. + :type type: :py:data:`TYPE_RSA` or :py:data:`TYPE_DSA` + :param bits: The number of bits. + :type bits: :py:data:`int` ``>= 0`` + :raises TypeError: If :py:data:`type` or :py:data:`bits` isn't + of the appropriate type. + :raises ValueError: If the number of bits isn't an integer of + the appropriate size. + :return: :py:const:`None` + """ + if not isinstance(type, int): + raise TypeError("type must be an integer") + + if not isinstance(bits, int): + raise TypeError("bits must be an integer") + + # TODO Check error return + exponent = _lib.BN_new() + exponent = _ffi.gc(exponent, _lib.BN_free) + _lib.BN_set_word(exponent, _lib.RSA_F4) + + if type == TYPE_RSA: + if bits <= 0: + raise ValueError("Invalid number of bits") + + rsa = _lib.RSA_new() + + result = _lib.RSA_generate_key_ex(rsa, bits, exponent, _ffi.NULL) + if result == 0: + # TODO: The test for this case is commented out. Different + # builds of OpenSSL appear to have different failure modes that + # make it hard to test. Visual inspection of the OpenSSL + # source reveals that a return value of 0 signals an error. + # Manual testing on a particular build of OpenSSL suggests that + # this is probably the appropriate way to handle those errors. + _raise_current_error() + + result = _lib.EVP_PKEY_assign_RSA(self._pkey, rsa) + if not result: + # TODO: It appears as though this can fail if an engine is in + # use which does not support RSA. + _raise_current_error() + + elif type == TYPE_DSA: + dsa = _lib.DSA_new() + if dsa == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + dsa = _ffi.gc(dsa, _lib.DSA_free) + res = _lib.DSA_generate_parameters_ex( + dsa, bits, _ffi.NULL, 0, _ffi.NULL, _ffi.NULL, _ffi.NULL + ) + if not res == 1: + # TODO: This is untested. + _raise_current_error() + if not _lib.DSA_generate_key(dsa): + # TODO: This is untested. + _raise_current_error() + if not _lib.EVP_PKEY_set1_DSA(self._pkey, dsa): + # TODO: This is untested. + _raise_current_error() + else: + raise Error("No such key type") + + self._initialized = True + + def check(self): + """ + Check the consistency of an RSA private key. + + This is the Python equivalent of OpenSSL's ``RSA_check_key``. + + :return: True if key is consistent. + :raise Error: if the key is inconsistent. + :raise TypeError: if the key is of a type which cannot be checked. + Only RSA keys can currently be checked. + """ + if self._only_public: + raise TypeError("public key only") + + if _lib.EVP_PKEY_type(self.type()) != _lib.EVP_PKEY_RSA: + raise TypeError("key type unsupported") + + rsa = _lib.EVP_PKEY_get1_RSA(self._pkey) + rsa = _ffi.gc(rsa, _lib.RSA_free) + result = _lib.RSA_check_key(rsa) + if result: + return True + _raise_current_error() + + def type(self): + """ + Returns the type of the key + + :return: The type of the key. + """ + return _lib.Cryptography_EVP_PKEY_id(self._pkey) + + def bits(self): + """ + Returns the number of bits of the key + + :return: The number of bits of the key. + """ + return _lib.EVP_PKEY_bits(self._pkey) +PKeyType = PKey + + +class _EllipticCurve(object): + """ + A representation of a supported elliptic curve. + + @cvar _curves: :py:obj:`None` until an attempt is made to load the curves. + Thereafter, a :py:type:`set` containing :py:type:`_EllipticCurve` + instances each of which represents one curve supported by the system. + @type _curves: :py:type:`NoneType` or :py:type:`set` + """ + _curves = None + + if _PY3: + # This only necessary on Python 3. Morever, it is broken on Python 2. + def __ne__(self, other): + """ + Implement cooperation with the right-hand side argument of ``!=``. + + Python 3 seems to have dropped this cooperation in this very narrow + circumstance. + """ + if isinstance(other, _EllipticCurve): + return super(_EllipticCurve, self).__ne__(other) + return NotImplemented + + @classmethod + def _load_elliptic_curves(cls, lib): + """ + Get the curves supported by OpenSSL. + + :param lib: The OpenSSL library binding object. + + :return: A :py:type:`set` of ``cls`` instances giving the names of the + elliptic curves the underlying library supports. + """ + if lib.Cryptography_HAS_EC: + num_curves = lib.EC_get_builtin_curves(_ffi.NULL, 0) + builtin_curves = _ffi.new('EC_builtin_curve[]', num_curves) + # The return value on this call should be num_curves again. We + # could check it to make sure but if it *isn't* then.. what could + # we do? Abort the whole process, I suppose...? -exarkun + lib.EC_get_builtin_curves(builtin_curves, num_curves) + return set( + cls.from_nid(lib, c.nid) + for c in builtin_curves) + return set() + + @classmethod + def _get_elliptic_curves(cls, lib): + """ + Get, cache, and return the curves supported by OpenSSL. + + :param lib: The OpenSSL library binding object. + + :return: A :py:type:`set` of ``cls`` instances giving the names of the + elliptic curves the underlying library supports. + """ + if cls._curves is None: + cls._curves = cls._load_elliptic_curves(lib) + return cls._curves + + @classmethod + def from_nid(cls, lib, nid): + """ + Instantiate a new :py:class:`_EllipticCurve` associated with the given + OpenSSL NID. + + :param lib: The OpenSSL library binding object. + + :param nid: The OpenSSL NID the resulting curve object will represent. + This must be a curve NID (and not, for example, a hash NID) or + subsequent operations will fail in unpredictable ways. + :type nid: :py:class:`int` + + :return: The curve object. + """ + return cls(lib, nid, _ffi.string(lib.OBJ_nid2sn(nid)).decode("ascii")) + + def __init__(self, lib, nid, name): + """ + :param _lib: The :py:mod:`cryptography` binding instance used to + interface with OpenSSL. + + :param _nid: The OpenSSL NID identifying the curve this object + represents. + :type _nid: :py:class:`int` + + :param name: The OpenSSL short name identifying the curve this object + represents. + :type name: :py:class:`unicode` + """ + self._lib = lib + self._nid = nid + self.name = name + + def __repr__(self): + return "" % (self.name,) + + def _to_EC_KEY(self): + """ + Create a new OpenSSL EC_KEY structure initialized to use this curve. + + The structure is automatically garbage collected when the Python object + is garbage collected. + """ + key = self._lib.EC_KEY_new_by_curve_name(self._nid) + return _ffi.gc(key, _lib.EC_KEY_free) + + +def get_elliptic_curves(): + """ + Return a set of objects representing the elliptic curves supported in the + OpenSSL build in use. + + The curve objects have a :py:class:`unicode` ``name`` attribute by which + they identify themselves. + + The curve objects are useful as values for the argument accepted by + :py:meth:`Context.set_tmp_ecdh` to specify which elliptical curve should be + used for ECDHE key exchange. + """ + return _EllipticCurve._get_elliptic_curves(_lib) + + +def get_elliptic_curve(name): + """ + Return a single curve object selected by name. + + See :py:func:`get_elliptic_curves` for information about curve objects. + + :param name: The OpenSSL short name identifying the curve object to + retrieve. + :type name: :py:class:`unicode` + + If the named curve is not supported then :py:class:`ValueError` is raised. + """ + for curve in get_elliptic_curves(): + if curve.name == name: + return curve + raise ValueError("unknown curve name", name) + + +class X509Name(object): + """ + An X.509 Distinguished Name. + + :ivar countryName: The country of the entity. + :ivar C: Alias for :py:attr:`countryName`. + + :ivar stateOrProvinceName: The state or province of the entity. + :ivar ST: Alias for :py:attr:`stateOrProvinceName`. + + :ivar localityName: The locality of the entity. + :ivar L: Alias for :py:attr:`localityName`. + + :ivar organizationName: The organization name of the entity. + :ivar O: Alias for :py:attr:`organizationName`. + + :ivar organizationalUnitName: The organizational unit of the entity. + :ivar OU: Alias for :py:attr:`organizationalUnitName` + + :ivar commonName: The common name of the entity. + :ivar CN: Alias for :py:attr:`commonName`. + + :ivar emailAddress: The e-mail address of the entity. + """ + + def __init__(self, name): + """ + Create a new X509Name, copying the given X509Name instance. + + :param name: The name to copy. + :type name: :py:class:`X509Name` + """ + name = _lib.X509_NAME_dup(name._name) + self._name = _ffi.gc(name, _lib.X509_NAME_free) + + def __setattr__(self, name, value): + if name.startswith('_'): + return super(X509Name, self).__setattr__(name, value) + + # Note: we really do not want str subclasses here, so we do not use + # isinstance. + if type(name) is not str: + raise TypeError("attribute name must be string, not '%.200s'" % ( + type(value).__name__,)) + + nid = _lib.OBJ_txt2nid(_byte_string(name)) + if nid == _lib.NID_undef: + try: + _raise_current_error() + except Error: + pass + raise AttributeError("No such attribute") + + # If there's an old entry for this NID, remove it + for i in range(_lib.X509_NAME_entry_count(self._name)): + ent = _lib.X509_NAME_get_entry(self._name, i) + ent_obj = _lib.X509_NAME_ENTRY_get_object(ent) + ent_nid = _lib.OBJ_obj2nid(ent_obj) + if nid == ent_nid: + ent = _lib.X509_NAME_delete_entry(self._name, i) + _lib.X509_NAME_ENTRY_free(ent) + break + + if isinstance(value, _text_type): + value = value.encode('utf-8') + + add_result = _lib.X509_NAME_add_entry_by_NID( + self._name, nid, _lib.MBSTRING_UTF8, value, -1, -1, 0) + if not add_result: + _raise_current_error() + + def __getattr__(self, name): + """ + Find attribute. An X509Name object has the following attributes: + countryName (alias C), stateOrProvince (alias ST), locality (alias L), + organization (alias O), organizationalUnit (alias OU), commonName + (alias CN) and more... + """ + nid = _lib.OBJ_txt2nid(_byte_string(name)) + if nid == _lib.NID_undef: + # This is a bit weird. OBJ_txt2nid indicated failure, but it seems + # a lower level function, a2d_ASN1_OBJECT, also feels the need to + # push something onto the error queue. If we don't clean that up + # now, someone else will bump into it later and be quite confused. + # See lp#314814. + try: + _raise_current_error() + except Error: + pass + return super(X509Name, self).__getattr__(name) + + entry_index = _lib.X509_NAME_get_index_by_NID(self._name, nid, -1) + if entry_index == -1: + return None + + entry = _lib.X509_NAME_get_entry(self._name, entry_index) + data = _lib.X509_NAME_ENTRY_get_data(entry) + + result_buffer = _ffi.new("unsigned char**") + data_length = _lib.ASN1_STRING_to_UTF8(result_buffer, data) + if data_length < 0: + # TODO: This is untested. + _raise_current_error() + + try: + result = _ffi.buffer( + result_buffer[0], data_length + )[:].decode('utf-8') + finally: + # XXX untested + _lib.OPENSSL_free(result_buffer[0]) + return result + + def _cmp(op): + def f(self, other): + if not isinstance(other, X509Name): + return NotImplemented + result = _lib.X509_NAME_cmp(self._name, other._name) + return op(result, 0) + return f + + __eq__ = _cmp(__eq__) + __ne__ = _cmp(__ne__) + + __lt__ = _cmp(__lt__) + __le__ = _cmp(__le__) + + __gt__ = _cmp(__gt__) + __ge__ = _cmp(__ge__) + + def __repr__(self): + """ + String representation of an X509Name + """ + result_buffer = _ffi.new("char[]", 512) + format_result = _lib.X509_NAME_oneline( + self._name, result_buffer, len(result_buffer)) + + if format_result == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + return "" % ( + _native(_ffi.string(result_buffer)),) + + def hash(self): + """ + Return an integer representation of the first four bytes of the + MD5 digest of the DER representation of the name. + + This is the Python equivalent of OpenSSL's ``X509_NAME_hash``. + + :return: The (integer) hash of this name. + :rtype: :py:class:`int` + """ + return _lib.X509_NAME_hash(self._name) + + def der(self): + """ + Return the DER encoding of this name. + + :return: The DER encoded form of this name. + :rtype: :py:class:`bytes` + """ + result_buffer = _ffi.new('unsigned char**') + encode_result = _lib.i2d_X509_NAME(self._name, result_buffer) + if encode_result < 0: + # TODO: This is untested. + _raise_current_error() + + string_result = _ffi.buffer(result_buffer[0], encode_result)[:] + _lib.OPENSSL_free(result_buffer[0]) + return string_result + + def get_components(self): + """ + Returns the components of this name, as a sequence of 2-tuples. + + :return: The components of this name. + :rtype: :py:class:`list` of ``name, value`` tuples. + """ + result = [] + for i in range(_lib.X509_NAME_entry_count(self._name)): + ent = _lib.X509_NAME_get_entry(self._name, i) + + fname = _lib.X509_NAME_ENTRY_get_object(ent) + fval = _lib.X509_NAME_ENTRY_get_data(ent) + + nid = _lib.OBJ_obj2nid(fname) + name = _lib.OBJ_nid2sn(nid) + + result.append(( + _ffi.string(name), + _ffi.string( + _lib.ASN1_STRING_data(fval), + _lib.ASN1_STRING_length(fval)))) + + return result + + +X509NameType = X509Name + + +class X509Extension(object): + """ + An X.509 v3 certificate extension. + """ + + def __init__(self, type_name, critical, value, subject=None, issuer=None): + """ + Initializes an X509 extension. + + :param type_name: The name of the type of extension_ to create. + :type type_name: :py:data:`bytes` + + :param bool critical: A flag indicating whether this is a critical + extension. + + :param value: The value of the extension. + :type value: :py:data:`bytes` + + :param subject: Optional X509 certificate to use as subject. + :type subject: :py:class:`X509` + + :param issuer: Optional X509 certificate to use as issuer. + :type issuer: :py:class:`X509` + + .. _extension: https://openssl.org/docs/manmaster/apps/ + x509v3_config.html#STANDARD-EXTENSIONS + """ + ctx = _ffi.new("X509V3_CTX*") + + # A context is necessary for any extension which uses the r2i + # conversion method. That is, X509V3_EXT_nconf may segfault if passed + # a NULL ctx. Start off by initializing most of the fields to NULL. + _lib.X509V3_set_ctx(ctx, _ffi.NULL, _ffi.NULL, _ffi.NULL, _ffi.NULL, 0) + + # We have no configuration database - but perhaps we should (some + # extensions may require it). + _lib.X509V3_set_ctx_nodb(ctx) + + # Initialize the subject and issuer, if appropriate. ctx is a local, + # and as far as I can tell none of the X509V3_* APIs invoked here steal + # any references, so no need to mess with reference counts or + # duplicates. + if issuer is not None: + if not isinstance(issuer, X509): + raise TypeError("issuer must be an X509 instance") + ctx.issuer_cert = issuer._x509 + if subject is not None: + if not isinstance(subject, X509): + raise TypeError("subject must be an X509 instance") + ctx.subject_cert = subject._x509 + + if critical: + # There are other OpenSSL APIs which would let us pass in critical + # separately, but they're harder to use, and since value is already + # a pile of crappy junk smuggling a ton of utterly important + # structured data, what's the point of trying to avoid nasty stuff + # with strings? (However, X509V3_EXT_i2d in particular seems like + # it would be a better API to invoke. I do not know where to get + # the ext_struc it desires for its last parameter, though.) + value = b"critical," + value + + extension = _lib.X509V3_EXT_nconf(_ffi.NULL, ctx, type_name, value) + if extension == _ffi.NULL: + _raise_current_error() + self._extension = _ffi.gc(extension, _lib.X509_EXTENSION_free) + + @property + def _nid(self): + return _lib.OBJ_obj2nid( + _lib.X509_EXTENSION_get_object(self._extension) + ) + + _prefixes = { + _lib.GEN_EMAIL: "email", + _lib.GEN_DNS: "DNS", + _lib.GEN_URI: "URI", + } + + def _subjectAltNameString(self): + method = _lib.X509V3_EXT_get(self._extension) + if method == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + ext_data = _lib.X509_EXTENSION_get_data(self._extension) + payload = ext_data.data + length = ext_data.length + + payloadptr = _ffi.new("unsigned char**") + payloadptr[0] = payload + + if method.it != _ffi.NULL: + ptr = _lib.ASN1_ITEM_ptr(method.it) + data = _lib.ASN1_item_d2i(_ffi.NULL, payloadptr, length, ptr) + names = _ffi.cast("GENERAL_NAMES*", data) + else: + names = _ffi.cast( + "GENERAL_NAMES*", + method.d2i(_ffi.NULL, payloadptr, length)) + + names = _ffi.gc(names, _lib.GENERAL_NAMES_free) + parts = [] + for i in range(_lib.sk_GENERAL_NAME_num(names)): + name = _lib.sk_GENERAL_NAME_value(names, i) + try: + label = self._prefixes[name.type] + except KeyError: + bio = _new_mem_buf() + _lib.GENERAL_NAME_print(bio, name) + parts.append(_native(_bio_to_string(bio))) + else: + value = _native( + _ffi.buffer(name.d.ia5.data, name.d.ia5.length)[:]) + parts.append(label + ":" + value) + return ", ".join(parts) + + def __str__(self): + """ + :return: a nice text representation of the extension + """ + if _lib.NID_subject_alt_name == self._nid: + return self._subjectAltNameString() + + bio = _new_mem_buf() + print_result = _lib.X509V3_EXT_print(bio, self._extension, 0, 0) + if not print_result: + # TODO: This is untested. + _raise_current_error() + + return _native(_bio_to_string(bio)) + + def get_critical(self): + """ + Returns the critical field of this X.509 extension. + + :return: The critical field. + """ + return _lib.X509_EXTENSION_get_critical(self._extension) + + def get_short_name(self): + """ + Returns the short type name of this X.509 extension. + + The result is a byte string such as :py:const:`b"basicConstraints"`. + + :return: The short type name. + :rtype: :py:data:`bytes` + + .. versionadded:: 0.12 + """ + obj = _lib.X509_EXTENSION_get_object(self._extension) + nid = _lib.OBJ_obj2nid(obj) + return _ffi.string(_lib.OBJ_nid2sn(nid)) + + def get_data(self): + """ + Returns the data of the X509 extension, encoded as ASN.1. + + :return: The ASN.1 encoded data of this X509 extension. + :rtype: :py:data:`bytes` + + .. versionadded:: 0.12 + """ + octet_result = _lib.X509_EXTENSION_get_data(self._extension) + string_result = _ffi.cast('ASN1_STRING*', octet_result) + char_result = _lib.ASN1_STRING_data(string_result) + result_length = _lib.ASN1_STRING_length(string_result) + return _ffi.buffer(char_result, result_length)[:] + + +X509ExtensionType = X509Extension + + +class X509Req(object): + """ + An X.509 certificate signing requests. + """ + + def __init__(self): + req = _lib.X509_REQ_new() + self._req = _ffi.gc(req, _lib.X509_REQ_free) + + def set_pubkey(self, pkey): + """ + Set the public key of the certificate signing request. + + :param pkey: The public key to use. + :type pkey: :py:class:`PKey` + + :return: :py:const:`None` + """ + set_result = _lib.X509_REQ_set_pubkey(self._req, pkey._pkey) + if not set_result: + # TODO: This is untested. + _raise_current_error() + + def get_pubkey(self): + """ + Get the public key of the certificate signing request. + + :return: The public key. + :rtype: :py:class:`PKey` + """ + pkey = PKey.__new__(PKey) + pkey._pkey = _lib.X509_REQ_get_pubkey(self._req) + if pkey._pkey == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + pkey._pkey = _ffi.gc(pkey._pkey, _lib.EVP_PKEY_free) + pkey._only_public = True + return pkey + + def set_version(self, version): + """ + Set the version subfield (RFC 2459, section 4.1.2.1) of the certificate + request. + + :param int version: The version number. + :return: :py:const:`None` + """ + set_result = _lib.X509_REQ_set_version(self._req, version) + if not set_result: + _raise_current_error() + + def get_version(self): + """ + Get the version subfield (RFC 2459, section 4.1.2.1) of the certificate + request. + + :return: The value of the version subfield. + :rtype: :py:class:`int` + """ + return _lib.X509_REQ_get_version(self._req) + + def get_subject(self): + """ + Return the subject of this certificate signing request. + + This creates a new :class:`X509Name` that wraps the underlying subject + name field on the certificate signing request. Modifying it will modify + the underlying signing request, and will have the effect of modifying + any other :class:`X509Name` that refers to this subject. + + :return: The subject of this certificate signing request. + :rtype: :class:`X509Name` + """ + name = X509Name.__new__(X509Name) + name._name = _lib.X509_REQ_get_subject_name(self._req) + if name._name == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + # The name is owned by the X509Req structure. As long as the X509Name + # Python object is alive, keep the X509Req Python object alive. + name._owner = self + + return name + + def add_extensions(self, extensions): + """ + Add extensions to the certificate signing request. + + :param extensions: The X.509 extensions to add. + :type extensions: iterable of :py:class:`X509Extension` + :return: :py:const:`None` + """ + stack = _lib.sk_X509_EXTENSION_new_null() + if stack == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + stack = _ffi.gc(stack, _lib.sk_X509_EXTENSION_free) + + for ext in extensions: + if not isinstance(ext, X509Extension): + raise ValueError("One of the elements is not an X509Extension") + + # TODO push can fail (here and elsewhere) + _lib.sk_X509_EXTENSION_push(stack, ext._extension) + + add_result = _lib.X509_REQ_add_extensions(self._req, stack) + if not add_result: + # TODO: This is untested. + _raise_current_error() + + def get_extensions(self): + """ + Get X.509 extensions in the certificate signing request. + + :return: The X.509 extensions in this request. + :rtype: :py:class:`list` of :py:class:`X509Extension` objects. + + .. versionadded:: 0.15 + """ + exts = [] + native_exts_obj = _lib.X509_REQ_get_extensions(self._req) + for i in range(_lib.sk_X509_EXTENSION_num(native_exts_obj)): + ext = X509Extension.__new__(X509Extension) + ext._extension = _lib.sk_X509_EXTENSION_value(native_exts_obj, i) + exts.append(ext) + return exts + + def sign(self, pkey, digest): + """ + Sign the certificate signing request with this key and digest type. + + :param pkey: The key pair to sign with. + :type pkey: :py:class:`PKey` + :param digest: The name of the message digest to use for the signature, + e.g. :py:data:`b"sha1"`. + :type digest: :py:class:`bytes` + :return: :py:const:`None` + """ + if pkey._only_public: + raise ValueError("Key has only public part") + + if not pkey._initialized: + raise ValueError("Key is uninitialized") + + digest_obj = _lib.EVP_get_digestbyname(_byte_string(digest)) + if digest_obj == _ffi.NULL: + raise ValueError("No such digest method") + + sign_result = _lib.X509_REQ_sign(self._req, pkey._pkey, digest_obj) + if not sign_result: + # TODO: This is untested. + _raise_current_error() + + def verify(self, pkey): + """ + Verifies the signature on this certificate signing request. + + :param key: A public key. + :type key: :py:class:`PKey` + :return: :py:data:`True` if the signature is correct. + :rtype: :py:class:`bool` + :raises Error: If the signature is invalid or there is a + problem verifying the signature. + """ + if not isinstance(pkey, PKey): + raise TypeError("pkey must be a PKey instance") + + result = _lib.X509_REQ_verify(self._req, pkey._pkey) + if result <= 0: + _raise_current_error() + + return result + + +X509ReqType = X509Req + + +class X509(object): + """ + An X.509 certificate. + """ + + def __init__(self): + # TODO Allocation failure? And why not __new__ instead of __init__? + x509 = _lib.X509_new() + self._x509 = _ffi.gc(x509, _lib.X509_free) + + def set_version(self, version): + """ + Set the version number of the certificate. + + :param version: The version number of the certificate. + :type version: :py:class:`int` + + :return: :py:const:`None` + """ + if not isinstance(version, int): + raise TypeError("version must be an integer") + + _lib.X509_set_version(self._x509, version) + + def get_version(self): + """ + Return the version number of the certificate. + + :return: The version number of the certificate. + :rtype: :py:class:`int` + """ + return _lib.X509_get_version(self._x509) + + def get_pubkey(self): + """ + Get the public key of the certificate. + + :return: The public key. + :rtype: :py:class:`PKey` + """ + pkey = PKey.__new__(PKey) + pkey._pkey = _lib.X509_get_pubkey(self._x509) + if pkey._pkey == _ffi.NULL: + _raise_current_error() + pkey._pkey = _ffi.gc(pkey._pkey, _lib.EVP_PKEY_free) + pkey._only_public = True + return pkey + + def set_pubkey(self, pkey): + """ + Set the public key of the certificate. + + :param pkey: The public key. + :type pkey: :py:class:`PKey` + + :return: :py:data:`None` + """ + if not isinstance(pkey, PKey): + raise TypeError("pkey must be a PKey instance") + + set_result = _lib.X509_set_pubkey(self._x509, pkey._pkey) + if not set_result: + _raise_current_error() + + def sign(self, pkey, digest): + """ + Sign the certificate with this key and digest type. + + :param pkey: The key to sign with. + :type pkey: :py:class:`PKey` + + :param digest: The name of the message digest to use. + :type digest: :py:class:`bytes` + + :return: :py:data:`None` + """ + if not isinstance(pkey, PKey): + raise TypeError("pkey must be a PKey instance") + + if pkey._only_public: + raise ValueError("Key only has public part") + + if not pkey._initialized: + raise ValueError("Key is uninitialized") + + evp_md = _lib.EVP_get_digestbyname(_byte_string(digest)) + if evp_md == _ffi.NULL: + raise ValueError("No such digest method") + + sign_result = _lib.X509_sign(self._x509, pkey._pkey, evp_md) + if not sign_result: + _raise_current_error() + + def get_signature_algorithm(self): + """ + Return the signature algorithm used in the certificate. + + :return: The name of the algorithm. + :rtype: :py:class:`bytes` + + :raises ValueError: If the signature algorithm is undefined. + + .. versionadded:: 0.13 + """ + alg = self._x509.cert_info.signature.algorithm + nid = _lib.OBJ_obj2nid(alg) + if nid == _lib.NID_undef: + raise ValueError("Undefined signature algorithm") + return _ffi.string(_lib.OBJ_nid2ln(nid)) + + def digest(self, digest_name): + """ + Return the digest of the X509 object. + + :param digest_name: The name of the digest algorithm to use. + :type digest_name: :py:class:`bytes` + + :return: The digest of the object, formatted as + :py:const:`b":"`-delimited hex pairs. + :rtype: :py:class:`bytes` + """ + digest = _lib.EVP_get_digestbyname(_byte_string(digest_name)) + if digest == _ffi.NULL: + raise ValueError("No such digest method") + + result_buffer = _ffi.new("char[]", _lib.EVP_MAX_MD_SIZE) + result_length = _ffi.new("unsigned int[]", 1) + result_length[0] = len(result_buffer) + + digest_result = _lib.X509_digest( + self._x509, digest, result_buffer, result_length) + + if not digest_result: + # TODO: This is untested. + _raise_current_error() + + return b":".join([ + b16encode(ch).upper() for ch + in _ffi.buffer(result_buffer, result_length[0])]) + + def subject_name_hash(self): + """ + Return the hash of the X509 subject. + + :return: The hash of the subject. + :rtype: :py:class:`bytes` + """ + return _lib.X509_subject_name_hash(self._x509) + + def set_serial_number(self, serial): + """ + Set the serial number of the certificate. + + :param serial: The new serial number. + :type serial: :py:class:`int` + + :return: :py:data`None` + """ + if not isinstance(serial, _integer_types): + raise TypeError("serial must be an integer") + + hex_serial = hex(serial)[2:] + if not isinstance(hex_serial, bytes): + hex_serial = hex_serial.encode('ascii') + + bignum_serial = _ffi.new("BIGNUM**") + + # BN_hex2bn stores the result in &bignum. Unless it doesn't feel like + # it. If bignum is still NULL after this call, then the return value + # is actually the result. I hope. -exarkun + small_serial = _lib.BN_hex2bn(bignum_serial, hex_serial) + + if bignum_serial[0] == _ffi.NULL: + set_result = _lib.ASN1_INTEGER_set( + _lib.X509_get_serialNumber(self._x509), small_serial) + if set_result: + # TODO Not tested + _raise_current_error() + else: + asn1_serial = _lib.BN_to_ASN1_INTEGER(bignum_serial[0], _ffi.NULL) + _lib.BN_free(bignum_serial[0]) + if asn1_serial == _ffi.NULL: + # TODO Not tested + _raise_current_error() + asn1_serial = _ffi.gc(asn1_serial, _lib.ASN1_INTEGER_free) + set_result = _lib.X509_set_serialNumber(self._x509, asn1_serial) + if not set_result: + # TODO Not tested + _raise_current_error() + + def get_serial_number(self): + """ + Return the serial number of this certificate. + + :return: The serial number. + :rtype: :py:class:`int` + """ + asn1_serial = _lib.X509_get_serialNumber(self._x509) + bignum_serial = _lib.ASN1_INTEGER_to_BN(asn1_serial, _ffi.NULL) + try: + hex_serial = _lib.BN_bn2hex(bignum_serial) + try: + hexstring_serial = _ffi.string(hex_serial) + serial = int(hexstring_serial, 16) + return serial + finally: + _lib.OPENSSL_free(hex_serial) + finally: + _lib.BN_free(bignum_serial) + + def gmtime_adj_notAfter(self, amount): + """ + Adjust the time stamp on which the certificate stops being valid. + + :param amount: The number of seconds by which to adjust the timestamp. + :type amount: :py:class:`int` + + :return: :py:const:`None` + """ + if not isinstance(amount, int): + raise TypeError("amount must be an integer") + + notAfter = _lib.X509_get_notAfter(self._x509) + _lib.X509_gmtime_adj(notAfter, amount) + + def gmtime_adj_notBefore(self, amount): + """ + Adjust the timestamp on which the certificate starts being valid. + + :param amount: The number of seconds by which to adjust the timestamp. + :return: :py:const:`None` + """ + if not isinstance(amount, int): + raise TypeError("amount must be an integer") + + notBefore = _lib.X509_get_notBefore(self._x509) + _lib.X509_gmtime_adj(notBefore, amount) + + def has_expired(self): + """ + Check whether the certificate has expired. + + :return: :py:const:`True` if the certificate has expired, + :py:const:`False` otherwise. + :rtype: :py:class:`bool` + """ + time_string = _native(self.get_notAfter()) + not_after = datetime.datetime.strptime(time_string, "%Y%m%d%H%M%SZ") + + return not_after < datetime.datetime.utcnow() + + def _get_boundary_time(self, which): + return _get_asn1_time(which(self._x509)) + + def get_notBefore(self): + """ + Get the timestamp at which the certificate starts being valid. + + The timestamp is formatted as an ASN.1 GENERALIZEDTIME:: + + YYYYMMDDhhmmssZ + YYYYMMDDhhmmss+hhmm + YYYYMMDDhhmmss-hhmm + + :return: A timestamp string, or :py:const:`None` if there is none. + :rtype: :py:class:`bytes` or :py:const:`None` + """ + return self._get_boundary_time(_lib.X509_get_notBefore) + + def _set_boundary_time(self, which, when): + return _set_asn1_time(which(self._x509), when) + + def set_notBefore(self, when): + """ + Set the timestamp at which the certificate starts being valid. + + The timestamp is formatted as an ASN.1 GENERALIZEDTIME:: + + YYYYMMDDhhmmssZ + YYYYMMDDhhmmss+hhmm + YYYYMMDDhhmmss-hhmm + + :param when: A timestamp string. + :type when: :py:class:`bytes` + + :return: :py:const:`None` + """ + return self._set_boundary_time(_lib.X509_get_notBefore, when) + + def get_notAfter(self): + """ + Get the timestamp at which the certificate stops being valid. + + The timestamp is formatted as an ASN.1 GENERALIZEDTIME:: + + YYYYMMDDhhmmssZ + YYYYMMDDhhmmss+hhmm + YYYYMMDDhhmmss-hhmm + + :return: A timestamp string, or :py:const:`None` if there is none. + :rtype: :py:class:`bytes` or :py:const:`None` + """ + return self._get_boundary_time(_lib.X509_get_notAfter) + + def set_notAfter(self, when): + """ + Set the timestamp at which the certificate stops being valid. + + The timestamp is formatted as an ASN.1 GENERALIZEDTIME:: + + YYYYMMDDhhmmssZ + YYYYMMDDhhmmss+hhmm + YYYYMMDDhhmmss-hhmm + + :param when: A timestamp string. + :type when: :py:class:`bytes` + + :return: :py:const:`None` + """ + return self._set_boundary_time(_lib.X509_get_notAfter, when) + + def _get_name(self, which): + name = X509Name.__new__(X509Name) + name._name = which(self._x509) + if name._name == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + # The name is owned by the X509 structure. As long as the X509Name + # Python object is alive, keep the X509 Python object alive. + name._owner = self + + return name + + def _set_name(self, which, name): + if not isinstance(name, X509Name): + raise TypeError("name must be an X509Name") + set_result = which(self._x509, name._name) + if not set_result: + # TODO: This is untested. + _raise_current_error() + + def get_issuer(self): + """ + Return the issuer of this certificate. + + This creates a new :class:`X509Name` that wraps the underlying issuer + name field on the certificate. Modifying it will modify the underlying + certificate, and will have the effect of modifying any other + :class:`X509Name` that refers to this issuer. + + :return: The issuer of this certificate. + :rtype: :class:`X509Name` + """ + return self._get_name(_lib.X509_get_issuer_name) + + def set_issuer(self, issuer): + """ + Set the issuer of this certificate. + + :param issuer: The issuer. + :type issuer: :py:class:`X509Name` + + :return: :py:const:`None` + """ + return self._set_name(_lib.X509_set_issuer_name, issuer) + + def get_subject(self): + """ + Return the subject of this certificate. + + This creates a new :class:`X509Name` that wraps the underlying subject + name field on the certificate. Modifying it will modify the underlying + certificate, and will have the effect of modifying any other + :class:`X509Name` that refers to this subject. + + :return: The subject of this certificate. + :rtype: :class:`X509Name` + """ + return self._get_name(_lib.X509_get_subject_name) + + def set_subject(self, subject): + """ + Set the subject of this certificate. + + :param subject: The subject. + :type subject: :py:class:`X509Name` + + :return: :py:const:`None` + """ + return self._set_name(_lib.X509_set_subject_name, subject) + + def get_extension_count(self): + """ + Get the number of extensions on this certificate. + + :return: The number of extensions. + :rtype: :py:class:`int` + + .. versionadded:: 0.12 + """ + return _lib.X509_get_ext_count(self._x509) + + def add_extensions(self, extensions): + """ + Add extensions to the certificate. + + :param extensions: The extensions to add. + :type extensions: An iterable of :py:class:`X509Extension` objects. + :return: :py:const:`None` + """ + for ext in extensions: + if not isinstance(ext, X509Extension): + raise ValueError("One of the elements is not an X509Extension") + + add_result = _lib.X509_add_ext(self._x509, ext._extension, -1) + if not add_result: + _raise_current_error() + + def get_extension(self, index): + """ + Get a specific extension of the certificate by index. + + Extensions on a certificate are kept in order. The index + parameter selects which extension will be returned. + + :param int index: The index of the extension to retrieve. + :return: The extension at the specified index. + :rtype: :py:class:`X509Extension` + :raises IndexError: If the extension index was out of bounds. + + .. versionadded:: 0.12 + """ + ext = X509Extension.__new__(X509Extension) + ext._extension = _lib.X509_get_ext(self._x509, index) + if ext._extension == _ffi.NULL: + raise IndexError("extension index out of bounds") + + extension = _lib.X509_EXTENSION_dup(ext._extension) + ext._extension = _ffi.gc(extension, _lib.X509_EXTENSION_free) + return ext + + +X509Type = X509 + + +class X509Store(object): + """ + An X509 certificate store. + """ + + def __init__(self): + store = _lib.X509_STORE_new() + self._store = _ffi.gc(store, _lib.X509_STORE_free) + + def add_cert(self, cert): + """ + Adds the certificate :py:data:`cert` to this store. + + This is the Python equivalent of OpenSSL's ``X509_STORE_add_cert``. + + :param X509 cert: The certificate to add to this store. + :raises TypeError: If the certificate is not an :py:class:`X509`. + :raises Error: If OpenSSL was unhappy with your certificate. + :return: :py:data:`None` if the certificate was added successfully. + """ + if not isinstance(cert, X509): + raise TypeError() + + result = _lib.X509_STORE_add_cert(self._store, cert._x509) + if not result: + _raise_current_error() + + +X509StoreType = X509Store + + +class X509StoreContextError(Exception): + """ + An exception raised when an error occurred while verifying a certificate + using `OpenSSL.X509StoreContext.verify_certificate`. + + :ivar certificate: The certificate which caused verificate failure. + :type certificate: :class:`X509` + """ + + def __init__(self, message, certificate): + super(X509StoreContextError, self).__init__(message) + self.certificate = certificate + + +class X509StoreContext(object): + """ + An X.509 store context. + + An :py:class:`X509StoreContext` is used to define some of the criteria for + certificate verification. The information encapsulated in this object + includes, but is not limited to, a set of trusted certificates, + verification parameters, and revoked certificates. + + .. note:: + + Currently, one can only set the trusted certificates on an + :py:class:`X509StoreContext`. Future versions of pyOpenSSL will expose + verification parameters and certificate revocation lists. + + :ivar _store_ctx: The underlying X509_STORE_CTX structure used by this + instance. It is dynamically allocated and automatically garbage + collected. + + :ivar _store: See the ``store`` ``__init__`` parameter. + + :ivar _cert: See the ``certificate`` ``__init__`` parameter. + + :param X509Store store: The certificates which will be trusted for the + purposes of any verifications. + + :param X509 certificate: The certificate to be verified. + """ + + def __init__(self, store, certificate): + store_ctx = _lib.X509_STORE_CTX_new() + self._store_ctx = _ffi.gc(store_ctx, _lib.X509_STORE_CTX_free) + self._store = store + self._cert = certificate + # Make the store context available for use after instantiating this + # class by initializing it now. Per testing, subsequent calls to + # :py:meth:`_init` have no adverse affect. + self._init() + + def _init(self): + """ + Set up the store context for a subsequent verification operation. + """ + ret = _lib.X509_STORE_CTX_init( + self._store_ctx, self._store._store, self._cert._x509, _ffi.NULL + ) + if ret <= 0: + _raise_current_error() + + def _cleanup(self): + """ + Internally cleans up the store context. + + The store context can then be reused with a new call to + :py:meth:`_init`. + """ + _lib.X509_STORE_CTX_cleanup(self._store_ctx) + + def _exception_from_context(self): + """ + Convert an OpenSSL native context error failure into a Python + exception. + + When a call to native OpenSSL X509_verify_cert fails, additional + information about the failure can be obtained from the store context. + """ + errors = [ + _lib.X509_STORE_CTX_get_error(self._store_ctx), + _lib.X509_STORE_CTX_get_error_depth(self._store_ctx), + _native(_ffi.string(_lib.X509_verify_cert_error_string( + _lib.X509_STORE_CTX_get_error(self._store_ctx)))), + ] + # A context error should always be associated with a certificate, so we + # expect this call to never return :class:`None`. + _x509 = _lib.X509_STORE_CTX_get_current_cert(self._store_ctx) + _cert = _lib.X509_dup(_x509) + pycert = X509.__new__(X509) + pycert._x509 = _ffi.gc(_cert, _lib.X509_free) + return X509StoreContextError(errors, pycert) + + def set_store(self, store): + """ + Set the context's trust store. + + .. versionadded:: 0.15 + + :param X509Store store: The certificates which will be trusted for the + purposes of any *future* verifications. + """ + self._store = store + + def verify_certificate(self): + """ + Verify a certificate in a context. + + .. versionadded:: 0.15 + + :param store_ctx: The :py:class:`X509StoreContext` to verify. + + :raises X509StoreContextError: If an error occurred when validating a + certificate in the context. Sets ``certificate`` attribute to + indicate which certificate caused the error. + """ + # Always re-initialize the store context in case + # :py:meth:`verify_certificate` is called multiple times. + self._init() + ret = _lib.X509_verify_cert(self._store_ctx) + self._cleanup() + if ret <= 0: + raise self._exception_from_context() + + +def load_certificate(type, buffer): + """ + Load a certificate from a buffer + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1) + + :param buffer: The buffer the certificate is stored in + :type buffer: :py:class:`bytes` + + :return: The X509 object + """ + if isinstance(buffer, _text_type): + buffer = buffer.encode("ascii") + + bio = _new_mem_buf(buffer) + + if type == FILETYPE_PEM: + x509 = _lib.PEM_read_bio_X509(bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) + elif type == FILETYPE_ASN1: + x509 = _lib.d2i_X509_bio(bio, _ffi.NULL) + else: + raise ValueError( + "type argument must be FILETYPE_PEM or FILETYPE_ASN1") + + if x509 == _ffi.NULL: + _raise_current_error() + + cert = X509.__new__(X509) + cert._x509 = _ffi.gc(x509, _lib.X509_free) + return cert + + +def dump_certificate(type, cert): + """ + Dump a certificate to a buffer + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1, or + FILETYPE_TEXT) + :param cert: The certificate to dump + :return: The buffer with the dumped certificate in + """ + bio = _new_mem_buf() + + if type == FILETYPE_PEM: + result_code = _lib.PEM_write_bio_X509(bio, cert._x509) + elif type == FILETYPE_ASN1: + result_code = _lib.i2d_X509_bio(bio, cert._x509) + elif type == FILETYPE_TEXT: + result_code = _lib.X509_print_ex(bio, cert._x509, 0, 0) + else: + raise ValueError( + "type argument must be FILETYPE_PEM, FILETYPE_ASN1, or " + "FILETYPE_TEXT") + + assert result_code == 1 + return _bio_to_string(bio) + + +def dump_publickey(type, pkey): + """ + Dump a public key to a buffer. + + :param type: The file type (one of :data:`FILETYPE_PEM` or + :data:`FILETYPE_ASN1`). + :param PKey pkey: The public key to dump + :return: The buffer with the dumped key in it. + :rtype: bytes + """ + bio = _new_mem_buf() + if type == FILETYPE_PEM: + write_bio = _lib.PEM_write_bio_PUBKEY + elif type == FILETYPE_ASN1: + write_bio = _lib.i2d_PUBKEY_bio + else: + raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1") + + result_code = write_bio(bio, pkey._pkey) + if result_code != 1: # pragma: no cover + _raise_current_error() + + return _bio_to_string(bio) + + +def dump_privatekey(type, pkey, cipher=None, passphrase=None): + """ + Dump a private key to a buffer + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1, or + FILETYPE_TEXT) + :param pkey: The PKey to dump + :param cipher: (optional) if encrypted PEM format, the cipher to + use + :param passphrase: (optional) if encrypted PEM format, this can be either + the passphrase to use, or a callback for providing the + passphrase. + :return: The buffer with the dumped key in + :rtype: :py:data:`bytes` + """ + bio = _new_mem_buf() + + if cipher is not None: + if passphrase is None: + raise TypeError( + "if a value is given for cipher " + "one must also be given for passphrase") + cipher_obj = _lib.EVP_get_cipherbyname(_byte_string(cipher)) + if cipher_obj == _ffi.NULL: + raise ValueError("Invalid cipher name") + else: + cipher_obj = _ffi.NULL + + helper = _PassphraseHelper(type, passphrase) + if type == FILETYPE_PEM: + result_code = _lib.PEM_write_bio_PrivateKey( + bio, pkey._pkey, cipher_obj, _ffi.NULL, 0, + helper.callback, helper.callback_args) + helper.raise_if_problem() + elif type == FILETYPE_ASN1: + result_code = _lib.i2d_PrivateKey_bio(bio, pkey._pkey) + elif type == FILETYPE_TEXT: + rsa = _lib.EVP_PKEY_get1_RSA(pkey._pkey) + result_code = _lib.RSA_print(bio, rsa, 0) + # TODO RSA_free(rsa)? + else: + raise ValueError( + "type argument must be FILETYPE_PEM, FILETYPE_ASN1, or " + "FILETYPE_TEXT") + + if result_code == 0: + _raise_current_error() + + return _bio_to_string(bio) + + +class Revoked(object): + """ + A certificate revocation. + """ + # http://www.openssl.org/docs/apps/x509v3_config.html#CRL_distribution_points_ + # which differs from crl_reasons of crypto/x509v3/v3_enum.c that matches + # OCSP_crl_reason_str. We use the latter, just like the command line + # program. + _crl_reasons = [ + b"unspecified", + b"keyCompromise", + b"CACompromise", + b"affiliationChanged", + b"superseded", + b"cessationOfOperation", + b"certificateHold", + # b"removeFromCRL", + ] + + def __init__(self): + revoked = _lib.X509_REVOKED_new() + self._revoked = _ffi.gc(revoked, _lib.X509_REVOKED_free) + + def set_serial(self, hex_str): + """ + Set the serial number. + + The serial number is formatted as a hexadecimal number encoded in + ASCII. + + :param hex_str: The new serial number. + :type hex_str: :py:class:`bytes` + + :return: :py:const:`None` + """ + bignum_serial = _ffi.gc(_lib.BN_new(), _lib.BN_free) + bignum_ptr = _ffi.new("BIGNUM**") + bignum_ptr[0] = bignum_serial + bn_result = _lib.BN_hex2bn(bignum_ptr, hex_str) + if not bn_result: + raise ValueError("bad hex string") + + asn1_serial = _ffi.gc( + _lib.BN_to_ASN1_INTEGER(bignum_serial, _ffi.NULL), + _lib.ASN1_INTEGER_free) + _lib.X509_REVOKED_set_serialNumber(self._revoked, asn1_serial) + + def get_serial(self): + """ + Get the serial number. + + The serial number is formatted as a hexadecimal number encoded in + ASCII. + + :return: The serial number. + :rtype: :py:class:`bytes` + """ + bio = _new_mem_buf() + + result = _lib.i2a_ASN1_INTEGER(bio, self._revoked.serialNumber) + if result < 0: + # TODO: This is untested. + _raise_current_error() + + return _bio_to_string(bio) + + def _delete_reason(self): + stack = self._revoked.extensions + for i in range(_lib.sk_X509_EXTENSION_num(stack)): + ext = _lib.sk_X509_EXTENSION_value(stack, i) + obj = _lib.X509_EXTENSION_get_object(ext) + if _lib.OBJ_obj2nid(obj) == _lib.NID_crl_reason: + _lib.X509_EXTENSION_free(ext) + _lib.sk_X509_EXTENSION_delete(stack, i) + break + + def set_reason(self, reason): + """ + Set the reason of this revocation. + + If :py:data:`reason` is :py:const:`None`, delete the reason instead. + + :param reason: The reason string. + :type reason: :py:class:`bytes` or :py:class:`NoneType` + + :return: :py:const:`None` + + .. seealso:: + + :py:meth:`all_reasons`, which gives you a list of all supported + reasons which you might pass to this method. + """ + if reason is None: + self._delete_reason() + elif not isinstance(reason, bytes): + raise TypeError("reason must be None or a byte string") + else: + reason = reason.lower().replace(b' ', b'') + reason_code = [r.lower() for r in self._crl_reasons].index(reason) + + new_reason_ext = _lib.ASN1_ENUMERATED_new() + if new_reason_ext == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + new_reason_ext = _ffi.gc(new_reason_ext, _lib.ASN1_ENUMERATED_free) + + set_result = _lib.ASN1_ENUMERATED_set(new_reason_ext, reason_code) + if set_result == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + self._delete_reason() + add_result = _lib.X509_REVOKED_add1_ext_i2d( + self._revoked, _lib.NID_crl_reason, new_reason_ext, 0, 0) + + if not add_result: + # TODO: This is untested. + _raise_current_error() + + def get_reason(self): + """ + Set the reason of this revocation. + + :return: The reason, or :py:const:`None` if there is none. + :rtype: :py:class:`bytes` or :py:class:`NoneType` + + .. seealso:: + + :py:meth:`all_reasons`, which gives you a list of all supported + reasons this method might return. + """ + extensions = self._revoked.extensions + for i in range(_lib.sk_X509_EXTENSION_num(extensions)): + ext = _lib.sk_X509_EXTENSION_value(extensions, i) + obj = _lib.X509_EXTENSION_get_object(ext) + if _lib.OBJ_obj2nid(obj) == _lib.NID_crl_reason: + bio = _new_mem_buf() + + print_result = _lib.X509V3_EXT_print(bio, ext, 0, 0) + if not print_result: + print_result = _lib.M_ASN1_OCTET_STRING_print( + bio, _lib.X509_EXTENSION_get_data(ext) + ) + if print_result == 0: + # TODO: This is untested. + _raise_current_error() + + return _bio_to_string(bio) + + def all_reasons(self): + """ + Return a list of all the supported reason strings. + + This list is a copy; modifying it does not change the supported reason + strings. + + :return: A list of reason strings. + :rtype: :py:class:`list` of :py:class:`bytes` + """ + return self._crl_reasons[:] + + def set_rev_date(self, when): + """ + Set the revocation timestamp. + + :param when: The timestamp of the revocation, as ASN.1 GENERALIZEDTIME. + :type when: :py:class:`bytes` + :return: :py:const:`None` + """ + return _set_asn1_time(self._revoked.revocationDate, when) + + def get_rev_date(self): + """ + Get the revocation timestamp. + + :return: The timestamp of the revocation, as ASN.1 GENERALIZEDTIME. + :rtype: :py:class:`bytes` + """ + return _get_asn1_time(self._revoked.revocationDate) + + +class CRL(object): + """ + A certificate revocation list. + """ + + def __init__(self): + """ + Create a new empty certificate revocation list. + """ + crl = _lib.X509_CRL_new() + self._crl = _ffi.gc(crl, _lib.X509_CRL_free) + + def get_revoked(self): + """ + Return the revocations in this certificate revocation list. + + These revocations will be provided by value, not by reference. + That means it's okay to mutate them: it won't affect this CRL. + + :return: The revocations in this CRL. + :rtype: :py:class:`tuple` of :py:class:`Revocation` + """ + results = [] + revoked_stack = self._crl.crl.revoked + for i in range(_lib.sk_X509_REVOKED_num(revoked_stack)): + revoked = _lib.sk_X509_REVOKED_value(revoked_stack, i) + revoked_copy = _lib.Cryptography_X509_REVOKED_dup(revoked) + pyrev = Revoked.__new__(Revoked) + pyrev._revoked = _ffi.gc(revoked_copy, _lib.X509_REVOKED_free) + results.append(pyrev) + if results: + return tuple(results) + + def add_revoked(self, revoked): + """ + Add a revoked (by value not reference) to the CRL structure + + This revocation will be added by value, not by reference. That + means it's okay to mutate it after adding: it won't affect + this CRL. + + :param revoked: The new revocation. + :type revoked: :class:`Revoked` + + :return: :py:const:`None` + """ + copy = _lib.Cryptography_X509_REVOKED_dup(revoked._revoked) + if copy == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + add_result = _lib.X509_CRL_add0_revoked(self._crl, copy) + if add_result == 0: + # TODO: This is untested. + _raise_current_error() + + def export(self, cert, key, type=FILETYPE_PEM, days=100, + digest=_UNSPECIFIED): + """ + Export a CRL as a string. + + :param cert: The certificate used to sign the CRL. + :type cert: :py:class:`X509` + + :param key: The key used to sign the CRL. + :type key: :py:class:`PKey` + + :param type: The export format, either :py:data:`FILETYPE_PEM`, + :py:data:`FILETYPE_ASN1`, or :py:data:`FILETYPE_TEXT`. + + :param int days: The number of days until the next update of this CRL. + + :param bytes digest: The name of the message digest to use (eg + ``b"sha1"``). + + :return: :py:data:`bytes` + """ + if not isinstance(cert, X509): + raise TypeError("cert must be an X509 instance") + if not isinstance(key, PKey): + raise TypeError("key must be a PKey instance") + if not isinstance(type, int): + raise TypeError("type must be an integer") + + if digest is _UNSPECIFIED: + _warn( + "The default message digest (md5) is deprecated. " + "Pass the name of a message digest explicitly.", + category=DeprecationWarning, + stacklevel=2, + ) + digest = b"md5" + + digest_obj = _lib.EVP_get_digestbyname(digest) + if digest_obj == _ffi.NULL: + raise ValueError("No such digest method") + + bio = _lib.BIO_new(_lib.BIO_s_mem()) + if bio == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + # A scratch time object to give different values to different CRL + # fields + sometime = _lib.ASN1_TIME_new() + if sometime == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + _lib.X509_gmtime_adj(sometime, 0) + _lib.X509_CRL_set_lastUpdate(self._crl, sometime) + + _lib.X509_gmtime_adj(sometime, days * 24 * 60 * 60) + _lib.X509_CRL_set_nextUpdate(self._crl, sometime) + + _lib.X509_CRL_set_issuer_name( + self._crl, _lib.X509_get_subject_name(cert._x509) + ) + + sign_result = _lib.X509_CRL_sign(self._crl, key._pkey, digest_obj) + if not sign_result: + _raise_current_error() + + return dump_crl(type, self) + + +CRLType = CRL + + +class PKCS7(object): + def type_is_signed(self): + """ + Check if this NID_pkcs7_signed object + + :return: True if the PKCS7 is of type signed + """ + if _lib.PKCS7_type_is_signed(self._pkcs7): + return True + return False + + def type_is_enveloped(self): + """ + Check if this NID_pkcs7_enveloped object + + :returns: True if the PKCS7 is of type enveloped + """ + if _lib.PKCS7_type_is_enveloped(self._pkcs7): + return True + return False + + def type_is_signedAndEnveloped(self): + """ + Check if this NID_pkcs7_signedAndEnveloped object + + :returns: True if the PKCS7 is of type signedAndEnveloped + """ + if _lib.PKCS7_type_is_signedAndEnveloped(self._pkcs7): + return True + return False + + def type_is_data(self): + """ + Check if this NID_pkcs7_data object + + :return: True if the PKCS7 is of type data + """ + if _lib.PKCS7_type_is_data(self._pkcs7): + return True + return False + + def get_type_name(self): + """ + Returns the type name of the PKCS7 structure + + :return: A string with the typename + """ + nid = _lib.OBJ_obj2nid(self._pkcs7.type) + string_type = _lib.OBJ_nid2sn(nid) + return _ffi.string(string_type) + +PKCS7Type = PKCS7 + + +class PKCS12(object): + """ + A PKCS #12 archive. + """ + + def __init__(self): + self._pkey = None + self._cert = None + self._cacerts = None + self._friendlyname = None + + def get_certificate(self): + """ + Get the certificate in the PKCS #12 structure. + + :return: The certificate, or :py:const:`None` if there is none. + :rtype: :py:class:`X509` or :py:const:`None` + """ + return self._cert + + def set_certificate(self, cert): + """ + Set the certificate in the PKCS #12 structure. + + :param cert: The new certificate, or :py:const:`None` to unset it. + :type cert: :py:class:`X509` or :py:const:`None` + + :return: :py:const:`None` + """ + if not isinstance(cert, X509): + raise TypeError("cert must be an X509 instance") + self._cert = cert + + def get_privatekey(self): + """ + Get the private key in the PKCS #12 structure. + + :return: The private key, or :py:const:`None` if there is none. + :rtype: :py:class:`PKey` + """ + return self._pkey + + def set_privatekey(self, pkey): + """ + Set the certificate portion of the PKCS #12 structure. + + :param pkey: The new private key, or :py:const:`None` to unset it. + :type pkey: :py:class:`PKey` or :py:const:`None` + + :return: :py:const:`None` + """ + if not isinstance(pkey, PKey): + raise TypeError("pkey must be a PKey instance") + self._pkey = pkey + + def get_ca_certificates(self): + """ + Get the CA certificates in the PKCS #12 structure. + + :return: A tuple with the CA certificates in the chain, or + :py:const:`None` if there are none. + :rtype: :py:class:`tuple` of :py:class:`X509` or :py:const:`None` + """ + if self._cacerts is not None: + return tuple(self._cacerts) + + def set_ca_certificates(self, cacerts): + """ + Replace or set the CA certificates within the PKCS12 object. + + :param cacerts: The new CA certificates, or :py:const:`None` to unset + them. + :type cacerts: An iterable of :py:class:`X509` or :py:const:`None` + + :return: :py:const:`None` + """ + if cacerts is None: + self._cacerts = None + else: + cacerts = list(cacerts) + for cert in cacerts: + if not isinstance(cert, X509): + raise TypeError( + "iterable must only contain X509 instances" + ) + self._cacerts = cacerts + + def set_friendlyname(self, name): + """ + Set the friendly name in the PKCS #12 structure. + + :param name: The new friendly name, or :py:const:`None` to unset. + :type name: :py:class:`bytes` or :py:const:`None` + + :return: :py:const:`None` + """ + if name is None: + self._friendlyname = None + elif not isinstance(name, bytes): + raise TypeError( + "name must be a byte string or None (not %r)" % (name,) + ) + self._friendlyname = name + + def get_friendlyname(self): + """ + Get the friendly name in the PKCS# 12 structure. + + :returns: The friendly name, or :py:const:`None` if there is none. + :rtype: :py:class:`bytes` or :py:const:`None` + """ + return self._friendlyname + + def export(self, passphrase=None, iter=2048, maciter=1): + """ + Dump a PKCS12 object as a string. + + For more information, see the :c:func:`PKCS12_create` man page. + + :param passphrase: The passphrase used to encrypt the structure. Unlike + some other passphrase arguments, this *must* be a string, not a + callback. + :type passphrase: :py:data:`bytes` + + :param iter: Number of times to repeat the encryption step. + :type iter: :py:data:`int` + + :param maciter: Number of times to repeat the MAC step. + :type maciter: :py:data:`int` + + :return: The string representation of the PKCS #12 structure. + :rtype: + """ + passphrase = _text_to_bytes_and_warn("passphrase", passphrase) + + if self._cacerts is None: + cacerts = _ffi.NULL + else: + cacerts = _lib.sk_X509_new_null() + cacerts = _ffi.gc(cacerts, _lib.sk_X509_free) + for cert in self._cacerts: + _lib.sk_X509_push(cacerts, cert._x509) + + if passphrase is None: + passphrase = _ffi.NULL + + friendlyname = self._friendlyname + if friendlyname is None: + friendlyname = _ffi.NULL + + if self._pkey is None: + pkey = _ffi.NULL + else: + pkey = self._pkey._pkey + + if self._cert is None: + cert = _ffi.NULL + else: + cert = self._cert._x509 + + pkcs12 = _lib.PKCS12_create( + passphrase, friendlyname, pkey, cert, cacerts, + _lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC, + _lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC, + iter, maciter, 0) + if pkcs12 == _ffi.NULL: + _raise_current_error() + pkcs12 = _ffi.gc(pkcs12, _lib.PKCS12_free) + + bio = _new_mem_buf() + _lib.i2d_PKCS12_bio(bio, pkcs12) + return _bio_to_string(bio) + + +PKCS12Type = PKCS12 + + +class NetscapeSPKI(object): + """ + A Netscape SPKI object. + """ + + def __init__(self): + spki = _lib.NETSCAPE_SPKI_new() + self._spki = _ffi.gc(spki, _lib.NETSCAPE_SPKI_free) + + def sign(self, pkey, digest): + """ + Sign the certificate request with this key and digest type. + + :param pkey: The private key to sign with. + :type pkey: :py:class:`PKey` + + :param digest: The message digest to use. + :type digest: :py:class:`bytes` + + :return: :py:const:`None` + """ + if pkey._only_public: + raise ValueError("Key has only public part") + + if not pkey._initialized: + raise ValueError("Key is uninitialized") + + digest_obj = _lib.EVP_get_digestbyname(_byte_string(digest)) + if digest_obj == _ffi.NULL: + raise ValueError("No such digest method") + + sign_result = _lib.NETSCAPE_SPKI_sign( + self._spki, pkey._pkey, digest_obj + ) + if not sign_result: + # TODO: This is untested. + _raise_current_error() + + def verify(self, key): + """ + Verifies a signature on a certificate request. + + :param key: The public key that signature is supposedly from. + :type pkey: :py:class:`PKey` + + :return: :py:const:`True` if the signature is correct. + :rtype: :py:class:`bool` + + :raises Error: If the signature is invalid, or there was a problem + verifying the signature. + """ + answer = _lib.NETSCAPE_SPKI_verify(self._spki, key._pkey) + if answer <= 0: + _raise_current_error() + return True + + def b64_encode(self): + """ + Generate a base64 encoded representation of this SPKI object. + + :return: The base64 encoded string. + :rtype: :py:class:`bytes` + """ + encoded = _lib.NETSCAPE_SPKI_b64_encode(self._spki) + result = _ffi.string(encoded) + _lib.OPENSSL_free(encoded) + return result + + def get_pubkey(self): + """ + Get the public key of this certificate. + + :return: The public key. + :rtype: :py:class:`PKey` + """ + pkey = PKey.__new__(PKey) + pkey._pkey = _lib.NETSCAPE_SPKI_get_pubkey(self._spki) + if pkey._pkey == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + pkey._pkey = _ffi.gc(pkey._pkey, _lib.EVP_PKEY_free) + pkey._only_public = True + return pkey + + def set_pubkey(self, pkey): + """ + Set the public key of the certificate + + :param pkey: The public key + :return: :py:const:`None` + """ + set_result = _lib.NETSCAPE_SPKI_set_pubkey(self._spki, pkey._pkey) + if not set_result: + # TODO: This is untested. + _raise_current_error() + + +NetscapeSPKIType = NetscapeSPKI + + +class _PassphraseHelper(object): + def __init__(self, type, passphrase, more_args=False, truncate=False): + if type != FILETYPE_PEM and passphrase is not None: + raise ValueError( + "only FILETYPE_PEM key format supports encryption" + ) + self._passphrase = passphrase + self._more_args = more_args + self._truncate = truncate + self._problems = [] + + @property + def callback(self): + if self._passphrase is None: + return _ffi.NULL + elif isinstance(self._passphrase, bytes): + return _ffi.NULL + elif callable(self._passphrase): + return _ffi.callback("pem_password_cb", self._read_passphrase) + else: + raise TypeError("Last argument must be string or callable") + + @property + def callback_args(self): + if self._passphrase is None: + return _ffi.NULL + elif isinstance(self._passphrase, bytes): + return self._passphrase + elif callable(self._passphrase): + return _ffi.NULL + else: + raise TypeError("Last argument must be string or callable") + + def raise_if_problem(self, exceptionType=Error): + try: + _exception_from_error_queue(exceptionType) + except exceptionType as e: + from_queue = e + if self._problems: + raise self._problems[0] + return from_queue + + def _read_passphrase(self, buf, size, rwflag, userdata): + try: + if self._more_args: + result = self._passphrase(size, rwflag, userdata) + else: + result = self._passphrase(rwflag) + if not isinstance(result, bytes): + raise ValueError("String expected") + if len(result) > size: + if self._truncate: + result = result[:size] + else: + raise ValueError( + "passphrase returned by callback is too long" + ) + for i in range(len(result)): + buf[i] = result[i:i + 1] + return len(result) + except Exception as e: + self._problems.append(e) + return 0 + + +def load_publickey(type, buffer): + """ + Load a public key from a buffer. + + :param type: The file type (one of :data:`FILETYPE_PEM`, + :data:`FILETYPE_ASN1`). + :param buffer: The buffer the key is stored in. + :type buffer: A Python string object, either unicode or bytestring. + :return: The PKey object. + :rtype: :class:`PKey` + """ + if isinstance(buffer, _text_type): + buffer = buffer.encode("ascii") + + bio = _new_mem_buf(buffer) + + if type == FILETYPE_PEM: + evp_pkey = _lib.PEM_read_bio_PUBKEY( + bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) + elif type == FILETYPE_ASN1: + evp_pkey = _lib.d2i_PUBKEY_bio(bio, _ffi.NULL) + else: + raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1") + + if evp_pkey == _ffi.NULL: + _raise_current_error() + + pkey = PKey.__new__(PKey) + pkey._pkey = _ffi.gc(evp_pkey, _lib.EVP_PKEY_free) + return pkey + + +def load_privatekey(type, buffer, passphrase=None): + """ + Load a private key from a buffer + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1) + :param buffer: The buffer the key is stored in + :param passphrase: (optional) if encrypted PEM format, this can be + either the passphrase to use, or a callback for + providing the passphrase. + + :return: The PKey object + """ + if isinstance(buffer, _text_type): + buffer = buffer.encode("ascii") + + bio = _new_mem_buf(buffer) + + helper = _PassphraseHelper(type, passphrase) + if type == FILETYPE_PEM: + evp_pkey = _lib.PEM_read_bio_PrivateKey( + bio, _ffi.NULL, helper.callback, helper.callback_args) + helper.raise_if_problem() + elif type == FILETYPE_ASN1: + evp_pkey = _lib.d2i_PrivateKey_bio(bio, _ffi.NULL) + else: + raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1") + + if evp_pkey == _ffi.NULL: + _raise_current_error() + + pkey = PKey.__new__(PKey) + pkey._pkey = _ffi.gc(evp_pkey, _lib.EVP_PKEY_free) + return pkey + + +def dump_certificate_request(type, req): + """ + Dump a certificate request to a buffer + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1) + :param req: The certificate request to dump + :return: The buffer with the dumped certificate request in + """ + bio = _new_mem_buf() + + if type == FILETYPE_PEM: + result_code = _lib.PEM_write_bio_X509_REQ(bio, req._req) + elif type == FILETYPE_ASN1: + result_code = _lib.i2d_X509_REQ_bio(bio, req._req) + elif type == FILETYPE_TEXT: + result_code = _lib.X509_REQ_print_ex(bio, req._req, 0, 0) + else: + raise ValueError( + "type argument must be FILETYPE_PEM, FILETYPE_ASN1, or " + "FILETYPE_TEXT" + ) + + if result_code == 0: + # TODO: This is untested. + _raise_current_error() + + return _bio_to_string(bio) + + +def load_certificate_request(type, buffer): + """ + Load a certificate request from a buffer + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1) + :param buffer: The buffer the certificate request is stored in + :return: The X509Req object + """ + if isinstance(buffer, _text_type): + buffer = buffer.encode("ascii") + + bio = _new_mem_buf(buffer) + + if type == FILETYPE_PEM: + req = _lib.PEM_read_bio_X509_REQ(bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) + elif type == FILETYPE_ASN1: + req = _lib.d2i_X509_REQ_bio(bio, _ffi.NULL) + else: + raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1") + + if req == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + x509req = X509Req.__new__(X509Req) + x509req._req = _ffi.gc(req, _lib.X509_REQ_free) + return x509req + + +def sign(pkey, data, digest): + """ + Sign data with a digest + + :param pkey: Pkey to sign with + :param data: data to be signed + :param digest: message digest to use + :return: signature + """ + data = _text_to_bytes_and_warn("data", data) + + digest_obj = _lib.EVP_get_digestbyname(_byte_string(digest)) + if digest_obj == _ffi.NULL: + raise ValueError("No such digest method") + + md_ctx = _ffi.new("EVP_MD_CTX*") + md_ctx = _ffi.gc(md_ctx, _lib.EVP_MD_CTX_cleanup) + + _lib.EVP_SignInit(md_ctx, digest_obj) + _lib.EVP_SignUpdate(md_ctx, data, len(data)) + + pkey_length = (PKey.bits(pkey) + 7) // 8 + signature_buffer = _ffi.new("unsigned char[]", pkey_length) + signature_length = _ffi.new("unsigned int*") + final_result = _lib.EVP_SignFinal( + md_ctx, signature_buffer, signature_length, pkey._pkey) + + if final_result != 1: + # TODO: This is untested. + _raise_current_error() + + return _ffi.buffer(signature_buffer, signature_length[0])[:] + + +def verify(cert, signature, data, digest): + """ + Verify a signature. + + :param cert: signing certificate (X509 object) + :param signature: signature returned by sign function + :param data: data to be verified + :param digest: message digest to use + :return: :py:const:`None` if the signature is correct, raise exception + otherwise + """ + data = _text_to_bytes_and_warn("data", data) + + digest_obj = _lib.EVP_get_digestbyname(_byte_string(digest)) + if digest_obj == _ffi.NULL: + raise ValueError("No such digest method") + + pkey = _lib.X509_get_pubkey(cert._x509) + if pkey == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + pkey = _ffi.gc(pkey, _lib.EVP_PKEY_free) + + md_ctx = _ffi.new("EVP_MD_CTX*") + md_ctx = _ffi.gc(md_ctx, _lib.EVP_MD_CTX_cleanup) + + _lib.EVP_VerifyInit(md_ctx, digest_obj) + _lib.EVP_VerifyUpdate(md_ctx, data, len(data)) + verify_result = _lib.EVP_VerifyFinal( + md_ctx, signature, len(signature), pkey + ) + + if verify_result != 1: + _raise_current_error() + + +def dump_crl(type, crl): + """ + Dump a certificate revocation list to a buffer. + + :param type: The file type (one of ``FILETYPE_PEM``, ``FILETYPE_ASN1``, or + ``FILETYPE_TEXT``). + :param CRL crl: The CRL to dump. + + :return: The buffer with the CRL. + :rtype: :data:`bytes` + """ + bio = _new_mem_buf() + + if type == FILETYPE_PEM: + ret = _lib.PEM_write_bio_X509_CRL(bio, crl._crl) + elif type == FILETYPE_ASN1: + ret = _lib.i2d_X509_CRL_bio(bio, crl._crl) + elif type == FILETYPE_TEXT: + ret = _lib.X509_CRL_print(bio, crl._crl) + else: + raise ValueError( + "type argument must be FILETYPE_PEM, FILETYPE_ASN1, or " + "FILETYPE_TEXT") + + assert ret == 1 + return _bio_to_string(bio) + + +def load_crl(type, buffer): + """ + Load a certificate revocation list from a buffer + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1) + :param buffer: The buffer the CRL is stored in + + :return: The PKey object + """ + if isinstance(buffer, _text_type): + buffer = buffer.encode("ascii") + + bio = _new_mem_buf(buffer) + + if type == FILETYPE_PEM: + crl = _lib.PEM_read_bio_X509_CRL(bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) + elif type == FILETYPE_ASN1: + crl = _lib.d2i_X509_CRL_bio(bio, _ffi.NULL) + else: + raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1") + + if crl == _ffi.NULL: + _raise_current_error() + + result = CRL.__new__(CRL) + result._crl = crl + return result + + +def load_pkcs7_data(type, buffer): + """ + Load pkcs7 data from a buffer + + :param type: The file type (one of FILETYPE_PEM or FILETYPE_ASN1) + :param buffer: The buffer with the pkcs7 data. + :return: The PKCS7 object + """ + if isinstance(buffer, _text_type): + buffer = buffer.encode("ascii") + + bio = _new_mem_buf(buffer) + + if type == FILETYPE_PEM: + pkcs7 = _lib.PEM_read_bio_PKCS7(bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) + elif type == FILETYPE_ASN1: + pkcs7 = _lib.d2i_PKCS7_bio(bio, _ffi.NULL) + else: + # TODO: This is untested. + _raise_current_error() + raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1") + + if pkcs7 == _ffi.NULL: + _raise_current_error() + + pypkcs7 = PKCS7.__new__(PKCS7) + pypkcs7._pkcs7 = _ffi.gc(pkcs7, _lib.PKCS7_free) + return pypkcs7 + + +def load_pkcs12(buffer, passphrase=None): + """ + Load a PKCS12 object from a buffer + + :param buffer: The buffer the certificate is stored in + :param passphrase: (Optional) The password to decrypt the PKCS12 lump + :returns: The PKCS12 object + """ + passphrase = _text_to_bytes_and_warn("passphrase", passphrase) + + if isinstance(buffer, _text_type): + buffer = buffer.encode("ascii") + + bio = _new_mem_buf(buffer) + + # Use null passphrase if passphrase is None or empty string. With PKCS#12 + # password based encryption no password and a zero length password are two + # different things, but OpenSSL implementation will try both to figure out + # which one works. + if not passphrase: + passphrase = _ffi.NULL + + p12 = _lib.d2i_PKCS12_bio(bio, _ffi.NULL) + if p12 == _ffi.NULL: + _raise_current_error() + p12 = _ffi.gc(p12, _lib.PKCS12_free) + + pkey = _ffi.new("EVP_PKEY**") + cert = _ffi.new("X509**") + cacerts = _ffi.new("Cryptography_STACK_OF_X509**") + + parse_result = _lib.PKCS12_parse(p12, passphrase, pkey, cert, cacerts) + if not parse_result: + _raise_current_error() + + cacerts = _ffi.gc(cacerts[0], _lib.sk_X509_free) + + # openssl 1.0.0 sometimes leaves an X509_check_private_key error in the + # queue for no particular reason. This error isn't interesting to anyone + # outside this function. It's not even interesting to us. Get rid of it. + try: + _raise_current_error() + except Error: + pass + + if pkey[0] == _ffi.NULL: + pykey = None + else: + pykey = PKey.__new__(PKey) + pykey._pkey = _ffi.gc(pkey[0], _lib.EVP_PKEY_free) + + if cert[0] == _ffi.NULL: + pycert = None + friendlyname = None + else: + pycert = X509.__new__(X509) + pycert._x509 = _ffi.gc(cert[0], _lib.X509_free) + + friendlyname_length = _ffi.new("int*") + friendlyname_buffer = _lib.X509_alias_get0( + cert[0], friendlyname_length + ) + friendlyname = _ffi.buffer( + friendlyname_buffer, friendlyname_length[0] + )[:] + if friendlyname_buffer == _ffi.NULL: + friendlyname = None + + pycacerts = [] + for i in range(_lib.sk_X509_num(cacerts)): + pycacert = X509.__new__(X509) + pycacert._x509 = _lib.sk_X509_value(cacerts, i) + pycacerts.append(pycacert) + if not pycacerts: + pycacerts = None + + pkcs12 = PKCS12.__new__(PKCS12) + pkcs12._pkey = pykey + pkcs12._cert = pycert + pkcs12._cacerts = pycacerts + pkcs12._friendlyname = friendlyname + return pkcs12 + + +# There are no direct unit tests for this initialization. It is tested +# indirectly since it is necessary for functions like dump_privatekey when +# using encryption. +# +# Thus OpenSSL.test.test_crypto.FunctionTests.test_dump_privatekey_passphrase +# and some other similar tests may fail without this (though they may not if +# the Python runtime has already done some initialization of the underlying +# OpenSSL library (and is linked against the same one that cryptography is +# using)). +_lib.OpenSSL_add_all_algorithms() + +# This is similar but exercised mainly by exception_from_error_queue. It calls +# both ERR_load_crypto_strings() and ERR_load_SSL_strings(). +_lib.SSL_load_error_strings() + + +# Set the default string mask to match OpenSSL upstream (since 2005) and +# RFC5280 recommendations. +_lib.ASN1_STRING_set_default_mask_asc(b'utf8only') diff --git a/lib/python3.4/site-packages/OpenSSL/rand.py b/lib/python3.4/site-packages/OpenSSL/rand.py new file mode 100644 index 0000000..087b243 --- /dev/null +++ b/lib/python3.4/site-packages/OpenSSL/rand.py @@ -0,0 +1,211 @@ +""" +PRNG management routines, thin wrappers. +""" + +import os +import warnings + +from functools import partial + +from six import integer_types as _integer_types + +from OpenSSL._util import ( + ffi as _ffi, + lib as _lib, + exception_from_error_queue as _exception_from_error_queue, + path_string as _path_string) + + +class Error(Exception): + """ + An error occurred in an :mod:`OpenSSL.rand` API. + + If the current RAND method supports any errors, this is raised when needed. + The default method does not raise this when the entropy pool is depleted. + + Whenever this exception is raised directly, it has a list of error messages + from the OpenSSL error queue, where each item is a tuple *(lib, function, + reason)*. Here *lib*, *function* and *reason* are all strings, describing + where and what the problem is. + + See :manpage:`err(3)` for more information. + """ + +_raise_current_error = partial(_exception_from_error_queue, Error) + +_unspecified = object() + +_builtin_bytes = bytes + + +def bytes(num_bytes): + """ + Get some random bytes from the PRNG as a string. + + This is a wrapper for the C function ``RAND_bytes``. + + :param num_bytes: The number of bytes to fetch. + + :return: A string of random bytes. + """ + if not isinstance(num_bytes, _integer_types): + raise TypeError("num_bytes must be an integer") + + if num_bytes < 0: + raise ValueError("num_bytes must not be negative") + + result_buffer = _ffi.new("char[]", num_bytes) + result_code = _lib.RAND_bytes(result_buffer, num_bytes) + if result_code == -1: + # TODO: No tests for this code path. Triggering a RAND_bytes failure + # might involve supplying a custom ENGINE? That's hard. + _raise_current_error() + + return _ffi.buffer(result_buffer)[:] + + +def add(buffer, entropy): + """ + Mix bytes from *string* into the PRNG state. + + The *entropy* argument is (the lower bound of) an estimate of how much + randomness is contained in *string*, measured in bytes. + + For more information, see e.g. :rfc:`1750`. + + :param buffer: Buffer with random data. + :param entropy: The entropy (in bytes) measurement of the buffer. + + :return: :obj:`None` + """ + if not isinstance(buffer, _builtin_bytes): + raise TypeError("buffer must be a byte string") + + if not isinstance(entropy, int): + raise TypeError("entropy must be an integer") + + # TODO Nothing tests this call actually being made, or made properly. + _lib.RAND_add(buffer, len(buffer), entropy) + + +def seed(buffer): + """ + Equivalent to calling :func:`add` with *entropy* as the length of *buffer*. + + :param buffer: Buffer with random data + + :return: :obj:`None` + """ + if not isinstance(buffer, _builtin_bytes): + raise TypeError("buffer must be a byte string") + + # TODO Nothing tests this call actually being made, or made properly. + _lib.RAND_seed(buffer, len(buffer)) + + +def status(): + """ + Check whether the PRNG has been seeded with enough data. + + :return: :obj:`True` if the PRNG is seeded enough, :obj:`False` otherwise. + """ + return _lib.RAND_status() + + +def egd(path, bytes=_unspecified): + """ + Query the system random source and seed the PRNG. + + Does *not* actually query the EGD. + + .. deprecated:: 16.0.0 + EGD was only necessary for some commercial UNIX systems that all + reached their ends of life more than a decade ago. See + `pyca/cryptography#1636 + `_. + + :param path: Ignored. + :param bytes: (optional) The number of bytes to read, default is 255. + + :returns: ``len(bytes)`` or 255 if not specified. + """ + warnings.warn("OpenSSL.rand.egd() is deprecated as of 16.0.0.", + DeprecationWarning) + + if not isinstance(path, _builtin_bytes): + raise TypeError("path must be a byte string") + + if bytes is _unspecified: + bytes = 255 + elif not isinstance(bytes, int): + raise TypeError("bytes must be an integer") + + seed(os.urandom(bytes)) + return bytes + + +def cleanup(): + """ + Erase the memory used by the PRNG. + + This is a wrapper for the C function ``RAND_cleanup``. + + :return: :obj:`None` + """ + # TODO Nothing tests this call actually being made, or made properly. + _lib.RAND_cleanup() + + +def load_file(filename, maxbytes=_unspecified): + """ + Read *maxbytes* of data from *filename* and seed the PRNG with it. + + Read the whole file if *maxbytes* is not specified or negative. + + :param filename: The file to read data from (``bytes`` or ``unicode``). + :param maxbytes: (optional) The number of bytes to read. Default is to + read the entire file. + + :return: The number of bytes read + """ + filename = _path_string(filename) + + if maxbytes is _unspecified: + maxbytes = -1 + elif not isinstance(maxbytes, int): + raise TypeError("maxbytes must be an integer") + + return _lib.RAND_load_file(filename, maxbytes) + + +def write_file(filename): + """ + Write a number of random bytes (currently 1024) to the file *path*. This + file can then be used with :func:`load_file` to seed the PRNG again. + + :param filename: The file to write data to (``bytes`` or ``unicode``). + + :return: The number of bytes written. + """ + filename = _path_string(filename) + return _lib.RAND_write_file(filename) + + +# TODO There are no tests for screen at all +def screen(): + """ + Add the current contents of the screen to the PRNG state. + + Availability: Windows. + + :return: None + """ + _lib.RAND_screen() + +if getattr(_lib, 'RAND_screen', None) is None: + del screen + + +# TODO There are no tests for the RAND strings being loaded, whatever that +# means. +_lib.ERR_load_RAND_strings() diff --git a/lib/python3.4/site-packages/OpenSSL/tsafe.py b/lib/python3.4/site-packages/OpenSSL/tsafe.py new file mode 100644 index 0000000..1cc0d2b --- /dev/null +++ b/lib/python3.4/site-packages/OpenSSL/tsafe.py @@ -0,0 +1,24 @@ +from threading import RLock as _RLock + +from OpenSSL import SSL as _ssl + + +class Connection: + def __init__(self, *args): + self._ssl_conn = _ssl.Connection(*args) + self._lock = _RLock() + + for f in ('get_context', 'pending', 'send', 'write', 'recv', 'read', + 'renegotiate', 'bind', 'listen', 'connect', 'accept', + 'setblocking', 'fileno', 'shutdown', 'close', 'get_cipher_list', + 'getpeername', 'getsockname', 'getsockopt', 'setsockopt', + 'makefile', 'get_app_data', 'set_app_data', 'state_string', + 'sock_shutdown', 'get_peer_certificate', 'get_peer_cert_chain', + 'want_read', 'want_write', 'set_connect_state', + 'set_accept_state', 'connect_ex', 'sendall'): + exec("""def %s(self, *args): + self._lock.acquire() + try: + return self._ssl_conn.%s(*args) + finally: + self._lock.release()\n""" % (f, f)) diff --git a/lib/python3.4/site-packages/OpenSSL/version.py b/lib/python3.4/site-packages/OpenSSL/version.py new file mode 100644 index 0000000..715051d --- /dev/null +++ b/lib/python3.4/site-packages/OpenSSL/version.py @@ -0,0 +1,22 @@ +# Copyright (C) AB Strakt +# Copyright (C) Jean-Paul Calderone +# See LICENSE for details. + +""" +pyOpenSSL - A simple wrapper around the OpenSSL library +""" + +__all__ = [ + "__author__", "__copyright__", "__email__", "__license__", "__summary__", + "__title__", "__uri__", "__version__", +] + +__version__ = "16.0.0" + +__title__ = "pyOpenSSL" +__uri__ = "https://pyopenssl.readthedocs.org/" +__summary__ = "Python wrapper module around the OpenSSL library" +__author__ = "The pyOpenSSL developers" +__email__ = "cryptography-dev@python.org" +__license__ = "Apache License, Version 2.0" +__copyright__ = "Copyright 2001-2016 {0}".format(__author__) diff --git a/lib/python3.4/site-packages/SQLAlchemy-1.0.12-py3.4.egg-info/PKG-INFO b/lib/python3.4/site-packages/SQLAlchemy-1.0.12-py3.4.egg-info/PKG-INFO new file mode 100644 index 0000000..c95fb48 --- /dev/null +++ b/lib/python3.4/site-packages/SQLAlchemy-1.0.12-py3.4.egg-info/PKG-INFO @@ -0,0 +1,155 @@ +Metadata-Version: 1.1 +Name: SQLAlchemy +Version: 1.0.12 +Summary: Database Abstraction Library +Home-page: http://www.sqlalchemy.org +Author: Mike Bayer +Author-email: mike_mp@zzzcomputing.com +License: MIT License +Description: SQLAlchemy + ========== + + The Python SQL Toolkit and Object Relational Mapper + + Introduction + ------------- + + SQLAlchemy is the Python SQL toolkit and Object Relational Mapper + that gives application developers the full power and + flexibility of SQL. SQLAlchemy provides a full suite + of well known enterprise-level persistence patterns, + designed for efficient and high-performing database + access, adapted into a simple and Pythonic domain + language. + + Major SQLAlchemy features include: + + * An industrial strength ORM, built + from the core on the identity map, unit of work, + and data mapper patterns. These patterns + allow transparent persistence of objects + using a declarative configuration system. + Domain models + can be constructed and manipulated naturally, + and changes are synchronized with the + current transaction automatically. + * A relationally-oriented query system, exposing + the full range of SQL's capabilities + explicitly, including joins, subqueries, + correlation, and most everything else, + in terms of the object model. + Writing queries with the ORM uses the same + techniques of relational composition you use + when writing SQL. While you can drop into + literal SQL at any time, it's virtually never + needed. + * A comprehensive and flexible system + of eager loading for related collections and objects. + Collections are cached within a session, + and can be loaded on individual access, all + at once using joins, or by query per collection + across the full result set. + * A Core SQL construction system and DBAPI + interaction layer. The SQLAlchemy Core is + separate from the ORM and is a full database + abstraction layer in its own right, and includes + an extensible Python-based SQL expression + language, schema metadata, connection pooling, + type coercion, and custom types. + * All primary and foreign key constraints are + assumed to be composite and natural. Surrogate + integer primary keys are of course still the + norm, but SQLAlchemy never assumes or hardcodes + to this model. + * Database introspection and generation. Database + schemas can be "reflected" in one step into + Python structures representing database metadata; + those same structures can then generate + CREATE statements right back out - all within + the Core, independent of the ORM. + + SQLAlchemy's philosophy: + + * SQL databases behave less and less like object + collections the more size and performance start to + matter; object collections behave less and less like + tables and rows the more abstraction starts to matter. + SQLAlchemy aims to accommodate both of these + principles. + * An ORM doesn't need to hide the "R". A relational + database provides rich, set-based functionality + that should be fully exposed. SQLAlchemy's + ORM provides an open-ended set of patterns + that allow a developer to construct a custom + mediation layer between a domain model and + a relational schema, turning the so-called + "object relational impedance" issue into + a distant memory. + * The developer, in all cases, makes all decisions + regarding the design, structure, and naming conventions + of both the object model as well as the relational + schema. SQLAlchemy only provides the means + to automate the execution of these decisions. + * With SQLAlchemy, there's no such thing as + "the ORM generated a bad query" - you + retain full control over the structure of + queries, including how joins are organized, + how subqueries and correlation is used, what + columns are requested. Everything SQLAlchemy + does is ultimately the result of a developer- + initiated decision. + * Don't use an ORM if the problem doesn't need one. + SQLAlchemy consists of a Core and separate ORM + component. The Core offers a full SQL expression + language that allows Pythonic construction + of SQL constructs that render directly to SQL + strings for a target database, returning + result sets that are essentially enhanced DBAPI + cursors. + * Transactions should be the norm. With SQLAlchemy's + ORM, nothing goes to permanent storage until + commit() is called. SQLAlchemy encourages applications + to create a consistent means of delineating + the start and end of a series of operations. + * Never render a literal value in a SQL statement. + Bound parameters are used to the greatest degree + possible, allowing query optimizers to cache + query plans effectively and making SQL injection + attacks a non-issue. + + Documentation + ------------- + + Latest documentation is at: + + http://www.sqlalchemy.org/docs/ + + Installation / Requirements + --------------------------- + + Full documentation for installation is at + `Installation `_. + + Getting Help / Development / Bug reporting + ------------------------------------------ + + Please refer to the `SQLAlchemy Community Guide `_. + + License + ------- + + SQLAlchemy is distributed under the `MIT license + `_. + + +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: Jython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Database :: Front-Ends +Classifier: Operating System :: OS Independent diff --git a/lib/python3.4/site-packages/SQLAlchemy-1.0.12-py3.4.egg-info/SOURCES.txt b/lib/python3.4/site-packages/SQLAlchemy-1.0.12-py3.4.egg-info/SOURCES.txt new file mode 100644 index 0000000..f69f69f --- /dev/null +++ b/lib/python3.4/site-packages/SQLAlchemy-1.0.12-py3.4.egg-info/SOURCES.txt @@ -0,0 +1,786 @@ +AUTHORS +CHANGES +LICENSE +MANIFEST.in +README.dialects.rst +README.rst +README.unittests.rst +setup.cfg +setup.py +sqla_nose.py +tox.ini +doc/contents.html +doc/copyright.html +doc/genindex.html +doc/glossary.html +doc/index.html +doc/intro.html +doc/search.html +doc/searchindex.js +doc/_images/sqla_arch_small.png +doc/_images/sqla_engine_arch.png +doc/_modules/index.html +doc/_modules/examples/adjacency_list/adjacency_list.html +doc/_modules/examples/association/basic_association.html +doc/_modules/examples/association/dict_of_sets_with_default.html +doc/_modules/examples/association/proxied_association.html +doc/_modules/examples/custom_attributes/custom_management.html +doc/_modules/examples/custom_attributes/listen_for_events.html +doc/_modules/examples/dogpile_caching/advanced.html +doc/_modules/examples/dogpile_caching/caching_query.html +doc/_modules/examples/dogpile_caching/environment.html +doc/_modules/examples/dogpile_caching/fixture_data.html +doc/_modules/examples/dogpile_caching/helloworld.html +doc/_modules/examples/dogpile_caching/local_session_caching.html +doc/_modules/examples/dogpile_caching/model.html +doc/_modules/examples/dogpile_caching/relationship_caching.html +doc/_modules/examples/dynamic_dict/dynamic_dict.html +doc/_modules/examples/elementtree/adjacency_list.html +doc/_modules/examples/elementtree/optimized_al.html +doc/_modules/examples/elementtree/pickle.html +doc/_modules/examples/generic_associations/discriminator_on_association.html +doc/_modules/examples/generic_associations/generic_fk.html +doc/_modules/examples/generic_associations/table_per_association.html +doc/_modules/examples/generic_associations/table_per_related.html +doc/_modules/examples/graphs/directed_graph.html +doc/_modules/examples/inheritance/concrete.html +doc/_modules/examples/inheritance/joined.html +doc/_modules/examples/inheritance/single.html +doc/_modules/examples/join_conditions/cast.html +doc/_modules/examples/join_conditions/threeway.html +doc/_modules/examples/large_collection/large_collection.html +doc/_modules/examples/materialized_paths/materialized_paths.html +doc/_modules/examples/nested_sets/nested_sets.html +doc/_modules/examples/performance/__main__.html +doc/_modules/examples/performance/bulk_inserts.html +doc/_modules/examples/performance/bulk_updates.html +doc/_modules/examples/performance/large_resultsets.html +doc/_modules/examples/performance/short_selects.html +doc/_modules/examples/performance/single_inserts.html +doc/_modules/examples/postgis/postgis.html +doc/_modules/examples/sharding/attribute_shard.html +doc/_modules/examples/versioned_history/history_meta.html +doc/_modules/examples/versioned_history/test_versioning.html +doc/_modules/examples/versioned_rows/versioned_map.html +doc/_modules/examples/versioned_rows/versioned_rows.html +doc/_modules/examples/vertical/dictlike-polymorphic.html +doc/_modules/examples/vertical/dictlike.html +doc/_static/basic.css +doc/_static/changelog.css +doc/_static/comment-bright.png +doc/_static/comment-close.png +doc/_static/comment.png +doc/_static/detectmobile.js +doc/_static/docs.css +doc/_static/doctools.js +doc/_static/down-pressed.png +doc/_static/down.png +doc/_static/file.png +doc/_static/init.js +doc/_static/jquery-1.11.1.js +doc/_static/jquery.js +doc/_static/minus.png +doc/_static/plus.png +doc/_static/pygments.css +doc/_static/searchtools.js +doc/_static/sphinx_paramlinks.css +doc/_static/underscore-1.3.1.js +doc/_static/underscore.js +doc/_static/up-pressed.png +doc/_static/up.png +doc/_static/websupport.js +doc/build/Makefile +doc/build/conf.py +doc/build/contents.rst +doc/build/copyright.rst +doc/build/corrections.py +doc/build/glossary.rst +doc/build/index.rst +doc/build/intro.rst +doc/build/requirements.txt +doc/build/sqla_arch_small.png +doc/build/changelog/changelog_01.rst +doc/build/changelog/changelog_02.rst +doc/build/changelog/changelog_03.rst +doc/build/changelog/changelog_04.rst +doc/build/changelog/changelog_05.rst +doc/build/changelog/changelog_06.rst +doc/build/changelog/changelog_07.rst +doc/build/changelog/changelog_08.rst +doc/build/changelog/changelog_09.rst +doc/build/changelog/changelog_10.rst +doc/build/changelog/index.rst +doc/build/changelog/migration_04.rst +doc/build/changelog/migration_05.rst +doc/build/changelog/migration_06.rst +doc/build/changelog/migration_07.rst +doc/build/changelog/migration_08.rst +doc/build/changelog/migration_09.rst +doc/build/changelog/migration_10.rst +doc/build/core/api_basics.rst +doc/build/core/compiler.rst +doc/build/core/connections.rst +doc/build/core/constraints.rst +doc/build/core/custom_types.rst +doc/build/core/ddl.rst +doc/build/core/defaults.rst +doc/build/core/dml.rst +doc/build/core/engines.rst +doc/build/core/engines_connections.rst +doc/build/core/event.rst +doc/build/core/events.rst +doc/build/core/exceptions.rst +doc/build/core/expression_api.rst +doc/build/core/functions.rst +doc/build/core/index.rst +doc/build/core/inspection.rst +doc/build/core/interfaces.rst +doc/build/core/internals.rst +doc/build/core/metadata.rst +doc/build/core/pooling.rst +doc/build/core/reflection.rst +doc/build/core/schema.rst +doc/build/core/selectable.rst +doc/build/core/serializer.rst +doc/build/core/sqla_engine_arch.png +doc/build/core/sqlelement.rst +doc/build/core/tutorial.rst +doc/build/core/type_api.rst +doc/build/core/type_basics.rst +doc/build/core/types.rst +doc/build/dialects/firebird.rst +doc/build/dialects/index.rst +doc/build/dialects/mssql.rst +doc/build/dialects/mysql.rst +doc/build/dialects/oracle.rst +doc/build/dialects/postgresql.rst +doc/build/dialects/sqlite.rst +doc/build/dialects/sybase.rst +doc/build/faq/connections.rst +doc/build/faq/index.rst +doc/build/faq/metadata_schema.rst +doc/build/faq/ormconfiguration.rst +doc/build/faq/performance.rst +doc/build/faq/sessions.rst +doc/build/faq/sqlexpressions.rst +doc/build/orm/backref.rst +doc/build/orm/basic_relationships.rst +doc/build/orm/cascades.rst +doc/build/orm/classical.rst +doc/build/orm/collections.rst +doc/build/orm/composites.rst +doc/build/orm/constructors.rst +doc/build/orm/contextual.rst +doc/build/orm/deprecated.rst +doc/build/orm/events.rst +doc/build/orm/examples.rst +doc/build/orm/exceptions.rst +doc/build/orm/extending.rst +doc/build/orm/index.rst +doc/build/orm/inheritance.rst +doc/build/orm/internals.rst +doc/build/orm/join_conditions.rst +doc/build/orm/loading.rst +doc/build/orm/loading_columns.rst +doc/build/orm/loading_objects.rst +doc/build/orm/loading_relationships.rst +doc/build/orm/mapped_attributes.rst +doc/build/orm/mapped_sql_expr.rst +doc/build/orm/mapper_config.rst +doc/build/orm/mapping_api.rst +doc/build/orm/mapping_columns.rst +doc/build/orm/mapping_styles.rst +doc/build/orm/nonstandard_mappings.rst +doc/build/orm/persistence_techniques.rst +doc/build/orm/query.rst +doc/build/orm/relationship_api.rst +doc/build/orm/relationship_persistence.rst +doc/build/orm/relationships.rst +doc/build/orm/scalar_mapping.rst +doc/build/orm/self_referential.rst +doc/build/orm/session.rst +doc/build/orm/session_api.rst +doc/build/orm/session_basics.rst +doc/build/orm/session_events.rst +doc/build/orm/session_state_management.rst +doc/build/orm/session_transaction.rst +doc/build/orm/tutorial.rst +doc/build/orm/versioning.rst +doc/build/orm/extensions/associationproxy.rst +doc/build/orm/extensions/automap.rst +doc/build/orm/extensions/baked.rst +doc/build/orm/extensions/horizontal_shard.rst +doc/build/orm/extensions/hybrid.rst +doc/build/orm/extensions/index.rst +doc/build/orm/extensions/instrumentation.rst +doc/build/orm/extensions/mutable.rst +doc/build/orm/extensions/orderinglist.rst +doc/build/orm/extensions/declarative/api.rst +doc/build/orm/extensions/declarative/basic_use.rst +doc/build/orm/extensions/declarative/index.rst +doc/build/orm/extensions/declarative/inheritance.rst +doc/build/orm/extensions/declarative/mixins.rst +doc/build/orm/extensions/declarative/relationships.rst +doc/build/orm/extensions/declarative/table_config.rst +doc/build/texinputs/Makefile +doc/build/texinputs/sphinx.sty +doc/changelog/changelog_01.html +doc/changelog/changelog_02.html +doc/changelog/changelog_03.html +doc/changelog/changelog_04.html +doc/changelog/changelog_05.html +doc/changelog/changelog_06.html +doc/changelog/changelog_07.html +doc/changelog/changelog_08.html +doc/changelog/changelog_09.html +doc/changelog/changelog_10.html +doc/changelog/index.html +doc/changelog/migration_04.html +doc/changelog/migration_05.html +doc/changelog/migration_06.html +doc/changelog/migration_07.html +doc/changelog/migration_08.html +doc/changelog/migration_09.html +doc/changelog/migration_10.html +doc/core/api_basics.html +doc/core/compiler.html +doc/core/connections.html +doc/core/constraints.html +doc/core/custom_types.html +doc/core/ddl.html +doc/core/defaults.html +doc/core/dml.html +doc/core/engines.html +doc/core/engines_connections.html +doc/core/event.html +doc/core/events.html +doc/core/exceptions.html +doc/core/expression_api.html +doc/core/functions.html +doc/core/index.html +doc/core/inspection.html +doc/core/interfaces.html +doc/core/internals.html +doc/core/metadata.html +doc/core/pooling.html +doc/core/reflection.html +doc/core/schema.html +doc/core/selectable.html +doc/core/serializer.html +doc/core/sqlelement.html +doc/core/tutorial.html +doc/core/type_api.html +doc/core/type_basics.html +doc/core/types.html +doc/dialects/firebird.html +doc/dialects/index.html +doc/dialects/mssql.html +doc/dialects/mysql.html +doc/dialects/oracle.html +doc/dialects/postgresql.html +doc/dialects/sqlite.html +doc/dialects/sybase.html +doc/faq/connections.html +doc/faq/index.html +doc/faq/metadata_schema.html +doc/faq/ormconfiguration.html +doc/faq/performance.html +doc/faq/sessions.html +doc/faq/sqlexpressions.html +doc/orm/backref.html +doc/orm/basic_relationships.html +doc/orm/cascades.html +doc/orm/classical.html +doc/orm/collections.html +doc/orm/composites.html +doc/orm/constructors.html +doc/orm/contextual.html +doc/orm/deprecated.html +doc/orm/events.html +doc/orm/examples.html +doc/orm/exceptions.html +doc/orm/extending.html +doc/orm/index.html +doc/orm/inheritance.html +doc/orm/internals.html +doc/orm/join_conditions.html +doc/orm/loading.html +doc/orm/loading_columns.html +doc/orm/loading_objects.html +doc/orm/loading_relationships.html +doc/orm/mapped_attributes.html +doc/orm/mapped_sql_expr.html +doc/orm/mapper_config.html +doc/orm/mapping_api.html +doc/orm/mapping_columns.html +doc/orm/mapping_styles.html +doc/orm/nonstandard_mappings.html +doc/orm/persistence_techniques.html +doc/orm/query.html +doc/orm/relationship_api.html +doc/orm/relationship_persistence.html +doc/orm/relationships.html +doc/orm/scalar_mapping.html +doc/orm/self_referential.html +doc/orm/session.html +doc/orm/session_api.html +doc/orm/session_basics.html +doc/orm/session_events.html +doc/orm/session_state_management.html +doc/orm/session_transaction.html +doc/orm/tutorial.html +doc/orm/versioning.html +doc/orm/extensions/associationproxy.html +doc/orm/extensions/automap.html +doc/orm/extensions/baked.html +doc/orm/extensions/horizontal_shard.html +doc/orm/extensions/hybrid.html +doc/orm/extensions/index.html +doc/orm/extensions/instrumentation.html +doc/orm/extensions/mutable.html +doc/orm/extensions/orderinglist.html +doc/orm/extensions/declarative/api.html +doc/orm/extensions/declarative/basic_use.html +doc/orm/extensions/declarative/index.html +doc/orm/extensions/declarative/inheritance.html +doc/orm/extensions/declarative/mixins.html +doc/orm/extensions/declarative/relationships.html +doc/orm/extensions/declarative/table_config.html +examples/__init__.py +examples/adjacency_list/__init__.py +examples/adjacency_list/adjacency_list.py +examples/association/__init__.py +examples/association/basic_association.py +examples/association/dict_of_sets_with_default.py +examples/association/proxied_association.py +examples/custom_attributes/__init__.py +examples/custom_attributes/custom_management.py +examples/custom_attributes/listen_for_events.py +examples/dogpile_caching/__init__.py +examples/dogpile_caching/advanced.py +examples/dogpile_caching/caching_query.py +examples/dogpile_caching/environment.py +examples/dogpile_caching/fixture_data.py +examples/dogpile_caching/helloworld.py +examples/dogpile_caching/local_session_caching.py +examples/dogpile_caching/model.py +examples/dogpile_caching/relationship_caching.py +examples/dynamic_dict/__init__.py +examples/dynamic_dict/dynamic_dict.py +examples/elementtree/__init__.py +examples/elementtree/adjacency_list.py +examples/elementtree/optimized_al.py +examples/elementtree/pickle.py +examples/elementtree/test.xml +examples/elementtree/test2.xml +examples/elementtree/test3.xml +examples/generic_associations/__init__.py +examples/generic_associations/discriminator_on_association.py +examples/generic_associations/generic_fk.py +examples/generic_associations/table_per_association.py +examples/generic_associations/table_per_related.py +examples/graphs/__init__.py +examples/graphs/directed_graph.py +examples/inheritance/__init__.py +examples/inheritance/concrete.py +examples/inheritance/joined.py +examples/inheritance/single.py +examples/join_conditions/__init__.py +examples/join_conditions/cast.py +examples/join_conditions/threeway.py +examples/large_collection/__init__.py +examples/large_collection/large_collection.py +examples/materialized_paths/__init__.py +examples/materialized_paths/materialized_paths.py +examples/nested_sets/__init__.py +examples/nested_sets/nested_sets.py +examples/performance/__init__.py +examples/performance/__main__.py +examples/performance/bulk_inserts.py +examples/performance/bulk_updates.py +examples/performance/large_resultsets.py +examples/performance/short_selects.py +examples/performance/single_inserts.py +examples/postgis/__init__.py +examples/postgis/postgis.py +examples/sharding/__init__.py +examples/sharding/attribute_shard.py +examples/versioned_history/__init__.py +examples/versioned_history/history_meta.py +examples/versioned_history/test_versioning.py +examples/versioned_rows/__init__.py +examples/versioned_rows/versioned_map.py +examples/versioned_rows/versioned_rows.py +examples/vertical/__init__.py +examples/vertical/dictlike-polymorphic.py +examples/vertical/dictlike.py +lib/SQLAlchemy.egg-info/PKG-INFO +lib/SQLAlchemy.egg-info/SOURCES.txt +lib/SQLAlchemy.egg-info/dependency_links.txt +lib/SQLAlchemy.egg-info/top_level.txt +lib/sqlalchemy/__init__.py +lib/sqlalchemy/events.py +lib/sqlalchemy/exc.py +lib/sqlalchemy/inspection.py +lib/sqlalchemy/interfaces.py +lib/sqlalchemy/log.py +lib/sqlalchemy/pool.py +lib/sqlalchemy/processors.py +lib/sqlalchemy/schema.py +lib/sqlalchemy/types.py +lib/sqlalchemy/cextension/processors.c +lib/sqlalchemy/cextension/resultproxy.c +lib/sqlalchemy/cextension/utils.c +lib/sqlalchemy/connectors/__init__.py +lib/sqlalchemy/connectors/mxodbc.py +lib/sqlalchemy/connectors/pyodbc.py +lib/sqlalchemy/connectors/zxJDBC.py +lib/sqlalchemy/databases/__init__.py +lib/sqlalchemy/dialects/__init__.py +lib/sqlalchemy/dialects/postgres.py +lib/sqlalchemy/dialects/type_migration_guidelines.txt +lib/sqlalchemy/dialects/firebird/__init__.py +lib/sqlalchemy/dialects/firebird/base.py +lib/sqlalchemy/dialects/firebird/fdb.py +lib/sqlalchemy/dialects/firebird/kinterbasdb.py +lib/sqlalchemy/dialects/mssql/__init__.py +lib/sqlalchemy/dialects/mssql/adodbapi.py +lib/sqlalchemy/dialects/mssql/base.py +lib/sqlalchemy/dialects/mssql/information_schema.py +lib/sqlalchemy/dialects/mssql/mxodbc.py +lib/sqlalchemy/dialects/mssql/pymssql.py +lib/sqlalchemy/dialects/mssql/pyodbc.py +lib/sqlalchemy/dialects/mssql/zxjdbc.py +lib/sqlalchemy/dialects/mysql/__init__.py +lib/sqlalchemy/dialects/mysql/base.py +lib/sqlalchemy/dialects/mysql/cymysql.py +lib/sqlalchemy/dialects/mysql/gaerdbms.py +lib/sqlalchemy/dialects/mysql/mysqlconnector.py +lib/sqlalchemy/dialects/mysql/mysqldb.py +lib/sqlalchemy/dialects/mysql/oursql.py +lib/sqlalchemy/dialects/mysql/pymysql.py +lib/sqlalchemy/dialects/mysql/pyodbc.py +lib/sqlalchemy/dialects/mysql/zxjdbc.py +lib/sqlalchemy/dialects/oracle/__init__.py +lib/sqlalchemy/dialects/oracle/base.py +lib/sqlalchemy/dialects/oracle/cx_oracle.py +lib/sqlalchemy/dialects/oracle/zxjdbc.py +lib/sqlalchemy/dialects/postgresql/__init__.py +lib/sqlalchemy/dialects/postgresql/base.py +lib/sqlalchemy/dialects/postgresql/constraints.py +lib/sqlalchemy/dialects/postgresql/hstore.py +lib/sqlalchemy/dialects/postgresql/json.py +lib/sqlalchemy/dialects/postgresql/pg8000.py +lib/sqlalchemy/dialects/postgresql/psycopg2.py +lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py +lib/sqlalchemy/dialects/postgresql/pypostgresql.py +lib/sqlalchemy/dialects/postgresql/ranges.py +lib/sqlalchemy/dialects/postgresql/zxjdbc.py +lib/sqlalchemy/dialects/sqlite/__init__.py +lib/sqlalchemy/dialects/sqlite/base.py +lib/sqlalchemy/dialects/sqlite/pysqlcipher.py +lib/sqlalchemy/dialects/sqlite/pysqlite.py +lib/sqlalchemy/dialects/sybase/__init__.py +lib/sqlalchemy/dialects/sybase/base.py +lib/sqlalchemy/dialects/sybase/mxodbc.py +lib/sqlalchemy/dialects/sybase/pyodbc.py +lib/sqlalchemy/dialects/sybase/pysybase.py +lib/sqlalchemy/engine/__init__.py +lib/sqlalchemy/engine/base.py +lib/sqlalchemy/engine/default.py +lib/sqlalchemy/engine/interfaces.py +lib/sqlalchemy/engine/reflection.py +lib/sqlalchemy/engine/result.py +lib/sqlalchemy/engine/strategies.py +lib/sqlalchemy/engine/threadlocal.py +lib/sqlalchemy/engine/url.py +lib/sqlalchemy/engine/util.py +lib/sqlalchemy/event/__init__.py +lib/sqlalchemy/event/api.py +lib/sqlalchemy/event/attr.py +lib/sqlalchemy/event/base.py +lib/sqlalchemy/event/legacy.py +lib/sqlalchemy/event/registry.py +lib/sqlalchemy/ext/__init__.py +lib/sqlalchemy/ext/associationproxy.py +lib/sqlalchemy/ext/automap.py +lib/sqlalchemy/ext/baked.py +lib/sqlalchemy/ext/compiler.py +lib/sqlalchemy/ext/horizontal_shard.py +lib/sqlalchemy/ext/hybrid.py +lib/sqlalchemy/ext/instrumentation.py +lib/sqlalchemy/ext/mutable.py +lib/sqlalchemy/ext/orderinglist.py +lib/sqlalchemy/ext/serializer.py +lib/sqlalchemy/ext/declarative/__init__.py +lib/sqlalchemy/ext/declarative/api.py +lib/sqlalchemy/ext/declarative/base.py +lib/sqlalchemy/ext/declarative/clsregistry.py +lib/sqlalchemy/orm/__init__.py +lib/sqlalchemy/orm/attributes.py +lib/sqlalchemy/orm/base.py +lib/sqlalchemy/orm/collections.py +lib/sqlalchemy/orm/dependency.py +lib/sqlalchemy/orm/deprecated_interfaces.py +lib/sqlalchemy/orm/descriptor_props.py +lib/sqlalchemy/orm/dynamic.py +lib/sqlalchemy/orm/evaluator.py +lib/sqlalchemy/orm/events.py +lib/sqlalchemy/orm/exc.py +lib/sqlalchemy/orm/identity.py +lib/sqlalchemy/orm/instrumentation.py +lib/sqlalchemy/orm/interfaces.py +lib/sqlalchemy/orm/loading.py +lib/sqlalchemy/orm/mapper.py +lib/sqlalchemy/orm/path_registry.py +lib/sqlalchemy/orm/persistence.py +lib/sqlalchemy/orm/properties.py +lib/sqlalchemy/orm/query.py +lib/sqlalchemy/orm/relationships.py +lib/sqlalchemy/orm/scoping.py +lib/sqlalchemy/orm/session.py +lib/sqlalchemy/orm/state.py +lib/sqlalchemy/orm/strategies.py +lib/sqlalchemy/orm/strategy_options.py +lib/sqlalchemy/orm/sync.py +lib/sqlalchemy/orm/unitofwork.py +lib/sqlalchemy/orm/util.py +lib/sqlalchemy/sql/__init__.py +lib/sqlalchemy/sql/annotation.py +lib/sqlalchemy/sql/base.py +lib/sqlalchemy/sql/compiler.py +lib/sqlalchemy/sql/crud.py +lib/sqlalchemy/sql/ddl.py +lib/sqlalchemy/sql/default_comparator.py +lib/sqlalchemy/sql/dml.py +lib/sqlalchemy/sql/elements.py +lib/sqlalchemy/sql/expression.py +lib/sqlalchemy/sql/functions.py +lib/sqlalchemy/sql/naming.py +lib/sqlalchemy/sql/operators.py +lib/sqlalchemy/sql/schema.py +lib/sqlalchemy/sql/selectable.py +lib/sqlalchemy/sql/sqltypes.py +lib/sqlalchemy/sql/type_api.py +lib/sqlalchemy/sql/util.py +lib/sqlalchemy/sql/visitors.py +lib/sqlalchemy/testing/__init__.py +lib/sqlalchemy/testing/assertions.py +lib/sqlalchemy/testing/assertsql.py +lib/sqlalchemy/testing/config.py +lib/sqlalchemy/testing/distutils_run.py +lib/sqlalchemy/testing/engines.py +lib/sqlalchemy/testing/entities.py +lib/sqlalchemy/testing/exclusions.py +lib/sqlalchemy/testing/fixtures.py +lib/sqlalchemy/testing/mock.py +lib/sqlalchemy/testing/pickleable.py +lib/sqlalchemy/testing/profiling.py +lib/sqlalchemy/testing/provision.py +lib/sqlalchemy/testing/replay_fixture.py +lib/sqlalchemy/testing/requirements.py +lib/sqlalchemy/testing/runner.py +lib/sqlalchemy/testing/schema.py +lib/sqlalchemy/testing/util.py +lib/sqlalchemy/testing/warnings.py +lib/sqlalchemy/testing/plugin/__init__.py +lib/sqlalchemy/testing/plugin/bootstrap.py +lib/sqlalchemy/testing/plugin/noseplugin.py +lib/sqlalchemy/testing/plugin/plugin_base.py +lib/sqlalchemy/testing/plugin/pytestplugin.py +lib/sqlalchemy/testing/suite/__init__.py +lib/sqlalchemy/testing/suite/test_ddl.py +lib/sqlalchemy/testing/suite/test_dialect.py +lib/sqlalchemy/testing/suite/test_insert.py +lib/sqlalchemy/testing/suite/test_reflection.py +lib/sqlalchemy/testing/suite/test_results.py +lib/sqlalchemy/testing/suite/test_select.py +lib/sqlalchemy/testing/suite/test_sequence.py +lib/sqlalchemy/testing/suite/test_types.py +lib/sqlalchemy/testing/suite/test_update_delete.py +lib/sqlalchemy/util/__init__.py +lib/sqlalchemy/util/_collections.py +lib/sqlalchemy/util/compat.py +lib/sqlalchemy/util/deprecations.py +lib/sqlalchemy/util/langhelpers.py +lib/sqlalchemy/util/queue.py +lib/sqlalchemy/util/topological.py +test/__init__.py +test/binary_data_one.dat +test/binary_data_two.dat +test/conftest.py +test/requirements.py +test/aaa_profiling/__init__.py +test/aaa_profiling/test_compiler.py +test/aaa_profiling/test_memusage.py +test/aaa_profiling/test_orm.py +test/aaa_profiling/test_pool.py +test/aaa_profiling/test_resultset.py +test/aaa_profiling/test_zoomark.py +test/aaa_profiling/test_zoomark_orm.py +test/base/__init__.py +test/base/test_dependency.py +test/base/test_events.py +test/base/test_except.py +test/base/test_inspect.py +test/base/test_tutorials.py +test/base/test_utils.py +test/dialect/__init__.py +test/dialect/test_firebird.py +test/dialect/test_mxodbc.py +test/dialect/test_oracle.py +test/dialect/test_pyodbc.py +test/dialect/test_sqlite.py +test/dialect/test_suite.py +test/dialect/test_sybase.py +test/dialect/mssql/__init__.py +test/dialect/mssql/test_compiler.py +test/dialect/mssql/test_engine.py +test/dialect/mssql/test_query.py +test/dialect/mssql/test_reflection.py +test/dialect/mssql/test_types.py +test/dialect/mysql/__init__.py +test/dialect/mysql/test_compiler.py +test/dialect/mysql/test_dialect.py +test/dialect/mysql/test_query.py +test/dialect/mysql/test_reflection.py +test/dialect/mysql/test_types.py +test/dialect/postgresql/__init__.py +test/dialect/postgresql/test_compiler.py +test/dialect/postgresql/test_dialect.py +test/dialect/postgresql/test_query.py +test/dialect/postgresql/test_reflection.py +test/dialect/postgresql/test_types.py +test/engine/__init__.py +test/engine/test_bind.py +test/engine/test_ddlevents.py +test/engine/test_execute.py +test/engine/test_logging.py +test/engine/test_parseconnect.py +test/engine/test_pool.py +test/engine/test_processors.py +test/engine/test_reconnect.py +test/engine/test_reflection.py +test/engine/test_transaction.py +test/ext/__init__.py +test/ext/test_associationproxy.py +test/ext/test_automap.py +test/ext/test_baked.py +test/ext/test_compiler.py +test/ext/test_extendedattr.py +test/ext/test_horizontal_shard.py +test/ext/test_hybrid.py +test/ext/test_mutable.py +test/ext/test_orderinglist.py +test/ext/test_serializer.py +test/ext/declarative/__init__.py +test/ext/declarative/test_basic.py +test/ext/declarative/test_clsregistry.py +test/ext/declarative/test_inheritance.py +test/ext/declarative/test_mixin.py +test/ext/declarative/test_reflection.py +test/orm/__init__.py +test/orm/_fixtures.py +test/orm/test_association.py +test/orm/test_assorted_eager.py +test/orm/test_attributes.py +test/orm/test_backref_mutations.py +test/orm/test_bind.py +test/orm/test_bulk.py +test/orm/test_bundle.py +test/orm/test_cascade.py +test/orm/test_collection.py +test/orm/test_compile.py +test/orm/test_composites.py +test/orm/test_cycles.py +test/orm/test_default_strategies.py +test/orm/test_defaults.py +test/orm/test_deferred.py +test/orm/test_deprecations.py +test/orm/test_descriptor.py +test/orm/test_dynamic.py +test/orm/test_eager_relations.py +test/orm/test_evaluator.py +test/orm/test_events.py +test/orm/test_expire.py +test/orm/test_froms.py +test/orm/test_generative.py +test/orm/test_hasparent.py +test/orm/test_immediate_load.py +test/orm/test_inspect.py +test/orm/test_instrumentation.py +test/orm/test_joins.py +test/orm/test_lazy_relations.py +test/orm/test_load_on_fks.py +test/orm/test_loading.py +test/orm/test_lockmode.py +test/orm/test_manytomany.py +test/orm/test_mapper.py +test/orm/test_merge.py +test/orm/test_naturalpks.py +test/orm/test_of_type.py +test/orm/test_onetoone.py +test/orm/test_options.py +test/orm/test_pickled.py +test/orm/test_query.py +test/orm/test_rel_fn.py +test/orm/test_relationships.py +test/orm/test_scoping.py +test/orm/test_selectable.py +test/orm/test_session.py +test/orm/test_subquery_relations.py +test/orm/test_sync.py +test/orm/test_transaction.py +test/orm/test_unitofwork.py +test/orm/test_unitofworkv2.py +test/orm/test_update_delete.py +test/orm/test_utils.py +test/orm/test_validators.py +test/orm/test_versioning.py +test/orm/inheritance/__init__.py +test/orm/inheritance/_poly_fixtures.py +test/orm/inheritance/test_abc_inheritance.py +test/orm/inheritance/test_abc_polymorphic.py +test/orm/inheritance/test_assorted_poly.py +test/orm/inheritance/test_basic.py +test/orm/inheritance/test_concrete.py +test/orm/inheritance/test_magazine.py +test/orm/inheritance/test_manytomany.py +test/orm/inheritance/test_poly_linked_list.py +test/orm/inheritance/test_poly_persistence.py +test/orm/inheritance/test_polymorphic_rel.py +test/orm/inheritance/test_productspec.py +test/orm/inheritance/test_relationship.py +test/orm/inheritance/test_selects.py +test/orm/inheritance/test_single.py +test/orm/inheritance/test_with_poly.py +test/perf/invalidate_stresstest.py +test/perf/orm2010.py +test/sql/__init__.py +test/sql/test_case_statement.py +test/sql/test_compiler.py +test/sql/test_constraints.py +test/sql/test_cte.py +test/sql/test_ddlemit.py +test/sql/test_defaults.py +test/sql/test_delete.py +test/sql/test_functions.py +test/sql/test_generative.py +test/sql/test_insert.py +test/sql/test_insert_exec.py +test/sql/test_inspect.py +test/sql/test_join_rewriting.py +test/sql/test_labels.py +test/sql/test_metadata.py +test/sql/test_operators.py +test/sql/test_query.py +test/sql/test_quote.py +test/sql/test_resultset.py +test/sql/test_returning.py +test/sql/test_rowcount.py +test/sql/test_selectable.py +test/sql/test_text.py +test/sql/test_type_expressions.py +test/sql/test_types.py +test/sql/test_unicode.py +test/sql/test_update.py \ No newline at end of file diff --git a/lib/python3.4/site-packages/SQLAlchemy-1.0.12-py3.4.egg-info/dependency_links.txt b/lib/python3.4/site-packages/SQLAlchemy-1.0.12-py3.4.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/lib/python3.4/site-packages/SQLAlchemy-1.0.12-py3.4.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/lib/python3.4/site-packages/SQLAlchemy-1.0.12-py3.4.egg-info/installed-files.txt b/lib/python3.4/site-packages/SQLAlchemy-1.0.12-py3.4.egg-info/installed-files.txt new file mode 100644 index 0000000..24f9234 --- /dev/null +++ b/lib/python3.4/site-packages/SQLAlchemy-1.0.12-py3.4.egg-info/installed-files.txt @@ -0,0 +1,374 @@ +../sqlalchemy/inspection.py +../sqlalchemy/events.py +../sqlalchemy/interfaces.py +../sqlalchemy/schema.py +../sqlalchemy/processors.py +../sqlalchemy/__init__.py +../sqlalchemy/types.py +../sqlalchemy/exc.py +../sqlalchemy/pool.py +../sqlalchemy/log.py +../sqlalchemy/sql/compiler.py +../sqlalchemy/sql/sqltypes.py +../sqlalchemy/sql/type_api.py +../sqlalchemy/sql/default_comparator.py +../sqlalchemy/sql/crud.py +../sqlalchemy/sql/schema.py +../sqlalchemy/sql/expression.py +../sqlalchemy/sql/__init__.py +../sqlalchemy/sql/dml.py +../sqlalchemy/sql/operators.py +../sqlalchemy/sql/base.py +../sqlalchemy/sql/selectable.py +../sqlalchemy/sql/util.py +../sqlalchemy/sql/visitors.py +../sqlalchemy/sql/elements.py +../sqlalchemy/sql/functions.py +../sqlalchemy/sql/naming.py +../sqlalchemy/sql/ddl.py +../sqlalchemy/sql/annotation.py +../sqlalchemy/engine/threadlocal.py +../sqlalchemy/engine/interfaces.py +../sqlalchemy/engine/reflection.py +../sqlalchemy/engine/__init__.py +../sqlalchemy/engine/url.py +../sqlalchemy/engine/strategies.py +../sqlalchemy/engine/base.py +../sqlalchemy/engine/util.py +../sqlalchemy/engine/result.py +../sqlalchemy/engine/default.py +../sqlalchemy/event/__init__.py +../sqlalchemy/event/registry.py +../sqlalchemy/event/base.py +../sqlalchemy/event/attr.py +../sqlalchemy/event/api.py +../sqlalchemy/event/legacy.py +../sqlalchemy/connectors/zxJDBC.py +../sqlalchemy/connectors/pyodbc.py +../sqlalchemy/connectors/__init__.py +../sqlalchemy/connectors/mxodbc.py +../sqlalchemy/orm/events.py +../sqlalchemy/orm/loading.py +../sqlalchemy/orm/scoping.py +../sqlalchemy/orm/unitofwork.py +../sqlalchemy/orm/instrumentation.py +../sqlalchemy/orm/strategy_options.py +../sqlalchemy/orm/interfaces.py +../sqlalchemy/orm/dynamic.py +../sqlalchemy/orm/properties.py +../sqlalchemy/orm/__init__.py +../sqlalchemy/orm/mapper.py +../sqlalchemy/orm/relationships.py +../sqlalchemy/orm/strategies.py +../sqlalchemy/orm/sync.py +../sqlalchemy/orm/state.py +../sqlalchemy/orm/base.py +../sqlalchemy/orm/util.py +../sqlalchemy/orm/exc.py +../sqlalchemy/orm/identity.py +../sqlalchemy/orm/evaluator.py +../sqlalchemy/orm/collections.py +../sqlalchemy/orm/descriptor_props.py +../sqlalchemy/orm/persistence.py +../sqlalchemy/orm/deprecated_interfaces.py +../sqlalchemy/orm/attributes.py +../sqlalchemy/orm/query.py +../sqlalchemy/orm/dependency.py +../sqlalchemy/orm/path_registry.py +../sqlalchemy/orm/session.py +../sqlalchemy/ext/compiler.py +../sqlalchemy/ext/automap.py +../sqlalchemy/ext/instrumentation.py +../sqlalchemy/ext/serializer.py +../sqlalchemy/ext/orderinglist.py +../sqlalchemy/ext/horizontal_shard.py +../sqlalchemy/ext/__init__.py +../sqlalchemy/ext/hybrid.py +../sqlalchemy/ext/associationproxy.py +../sqlalchemy/ext/mutable.py +../sqlalchemy/ext/baked.py +../sqlalchemy/ext/declarative/__init__.py +../sqlalchemy/ext/declarative/base.py +../sqlalchemy/ext/declarative/api.py +../sqlalchemy/ext/declarative/clsregistry.py +../sqlalchemy/databases/__init__.py +../sqlalchemy/util/topological.py +../sqlalchemy/util/queue.py +../sqlalchemy/util/__init__.py +../sqlalchemy/util/compat.py +../sqlalchemy/util/_collections.py +../sqlalchemy/util/deprecations.py +../sqlalchemy/util/langhelpers.py +../sqlalchemy/testing/engines.py +../sqlalchemy/testing/warnings.py +../sqlalchemy/testing/profiling.py +../sqlalchemy/testing/schema.py +../sqlalchemy/testing/replay_fixture.py +../sqlalchemy/testing/assertions.py +../sqlalchemy/testing/__init__.py +../sqlalchemy/testing/pickleable.py +../sqlalchemy/testing/exclusions.py +../sqlalchemy/testing/distutils_run.py +../sqlalchemy/testing/assertsql.py +../sqlalchemy/testing/util.py +../sqlalchemy/testing/mock.py +../sqlalchemy/testing/runner.py +../sqlalchemy/testing/fixtures.py +../sqlalchemy/testing/provision.py +../sqlalchemy/testing/requirements.py +../sqlalchemy/testing/entities.py +../sqlalchemy/testing/config.py +../sqlalchemy/testing/plugin/bootstrap.py +../sqlalchemy/testing/plugin/__init__.py +../sqlalchemy/testing/plugin/plugin_base.py +../sqlalchemy/testing/plugin/pytestplugin.py +../sqlalchemy/testing/plugin/noseplugin.py +../sqlalchemy/testing/suite/test_types.py +../sqlalchemy/testing/suite/test_ddl.py +../sqlalchemy/testing/suite/test_sequence.py +../sqlalchemy/testing/suite/test_update_delete.py +../sqlalchemy/testing/suite/__init__.py +../sqlalchemy/testing/suite/test_reflection.py +../sqlalchemy/testing/suite/test_dialect.py +../sqlalchemy/testing/suite/test_select.py +../sqlalchemy/testing/suite/test_results.py +../sqlalchemy/testing/suite/test_insert.py +../sqlalchemy/dialects/postgres.py +../sqlalchemy/dialects/__init__.py +../sqlalchemy/dialects/oracle/zxjdbc.py +../sqlalchemy/dialects/oracle/cx_oracle.py +../sqlalchemy/dialects/oracle/__init__.py +../sqlalchemy/dialects/oracle/base.py +../sqlalchemy/dialects/mssql/zxjdbc.py +../sqlalchemy/dialects/mssql/pyodbc.py +../sqlalchemy/dialects/mssql/__init__.py +../sqlalchemy/dialects/mssql/mxodbc.py +../sqlalchemy/dialects/mssql/base.py +../sqlalchemy/dialects/mssql/adodbapi.py +../sqlalchemy/dialects/mssql/information_schema.py +../sqlalchemy/dialects/mssql/pymssql.py +../sqlalchemy/dialects/sybase/pyodbc.py +../sqlalchemy/dialects/sybase/__init__.py +../sqlalchemy/dialects/sybase/mxodbc.py +../sqlalchemy/dialects/sybase/base.py +../sqlalchemy/dialects/sybase/pysybase.py +../sqlalchemy/dialects/sqlite/pysqlcipher.py +../sqlalchemy/dialects/sqlite/__init__.py +../sqlalchemy/dialects/sqlite/base.py +../sqlalchemy/dialects/sqlite/pysqlite.py +../sqlalchemy/dialects/postgresql/ranges.py +../sqlalchemy/dialects/postgresql/constraints.py +../sqlalchemy/dialects/postgresql/hstore.py +../sqlalchemy/dialects/postgresql/psycopg2.py +../sqlalchemy/dialects/postgresql/zxjdbc.py +../sqlalchemy/dialects/postgresql/__init__.py +../sqlalchemy/dialects/postgresql/psycopg2cffi.py +../sqlalchemy/dialects/postgresql/base.py +../sqlalchemy/dialects/postgresql/pypostgresql.py +../sqlalchemy/dialects/postgresql/pg8000.py +../sqlalchemy/dialects/postgresql/json.py +../sqlalchemy/dialects/mysql/zxjdbc.py +../sqlalchemy/dialects/mysql/pyodbc.py +../sqlalchemy/dialects/mysql/__init__.py +../sqlalchemy/dialects/mysql/pymysql.py +../sqlalchemy/dialects/mysql/mysqlconnector.py +../sqlalchemy/dialects/mysql/mysqldb.py +../sqlalchemy/dialects/mysql/oursql.py +../sqlalchemy/dialects/mysql/base.py +../sqlalchemy/dialects/mysql/cymysql.py +../sqlalchemy/dialects/mysql/gaerdbms.py +../sqlalchemy/dialects/firebird/kinterbasdb.py +../sqlalchemy/dialects/firebird/__init__.py +../sqlalchemy/dialects/firebird/base.py +../sqlalchemy/dialects/firebird/fdb.py +../sqlalchemy/__pycache__/inspection.cpython-34.pyc +../sqlalchemy/__pycache__/events.cpython-34.pyc +../sqlalchemy/__pycache__/interfaces.cpython-34.pyc +../sqlalchemy/__pycache__/schema.cpython-34.pyc +../sqlalchemy/__pycache__/processors.cpython-34.pyc +../sqlalchemy/__pycache__/__init__.cpython-34.pyc +../sqlalchemy/__pycache__/types.cpython-34.pyc +../sqlalchemy/__pycache__/exc.cpython-34.pyc +../sqlalchemy/__pycache__/pool.cpython-34.pyc +../sqlalchemy/__pycache__/log.cpython-34.pyc +../sqlalchemy/sql/__pycache__/compiler.cpython-34.pyc +../sqlalchemy/sql/__pycache__/sqltypes.cpython-34.pyc +../sqlalchemy/sql/__pycache__/type_api.cpython-34.pyc +../sqlalchemy/sql/__pycache__/default_comparator.cpython-34.pyc +../sqlalchemy/sql/__pycache__/crud.cpython-34.pyc +../sqlalchemy/sql/__pycache__/schema.cpython-34.pyc +../sqlalchemy/sql/__pycache__/expression.cpython-34.pyc +../sqlalchemy/sql/__pycache__/__init__.cpython-34.pyc +../sqlalchemy/sql/__pycache__/dml.cpython-34.pyc +../sqlalchemy/sql/__pycache__/operators.cpython-34.pyc +../sqlalchemy/sql/__pycache__/base.cpython-34.pyc +../sqlalchemy/sql/__pycache__/selectable.cpython-34.pyc +../sqlalchemy/sql/__pycache__/util.cpython-34.pyc +../sqlalchemy/sql/__pycache__/visitors.cpython-34.pyc +../sqlalchemy/sql/__pycache__/elements.cpython-34.pyc +../sqlalchemy/sql/__pycache__/functions.cpython-34.pyc +../sqlalchemy/sql/__pycache__/naming.cpython-34.pyc +../sqlalchemy/sql/__pycache__/ddl.cpython-34.pyc +../sqlalchemy/sql/__pycache__/annotation.cpython-34.pyc +../sqlalchemy/engine/__pycache__/threadlocal.cpython-34.pyc +../sqlalchemy/engine/__pycache__/interfaces.cpython-34.pyc +../sqlalchemy/engine/__pycache__/reflection.cpython-34.pyc +../sqlalchemy/engine/__pycache__/__init__.cpython-34.pyc +../sqlalchemy/engine/__pycache__/url.cpython-34.pyc +../sqlalchemy/engine/__pycache__/strategies.cpython-34.pyc +../sqlalchemy/engine/__pycache__/base.cpython-34.pyc +../sqlalchemy/engine/__pycache__/util.cpython-34.pyc +../sqlalchemy/engine/__pycache__/result.cpython-34.pyc +../sqlalchemy/engine/__pycache__/default.cpython-34.pyc +../sqlalchemy/event/__pycache__/__init__.cpython-34.pyc +../sqlalchemy/event/__pycache__/registry.cpython-34.pyc +../sqlalchemy/event/__pycache__/base.cpython-34.pyc +../sqlalchemy/event/__pycache__/attr.cpython-34.pyc +../sqlalchemy/event/__pycache__/api.cpython-34.pyc +../sqlalchemy/event/__pycache__/legacy.cpython-34.pyc +../sqlalchemy/connectors/__pycache__/zxJDBC.cpython-34.pyc +../sqlalchemy/connectors/__pycache__/pyodbc.cpython-34.pyc +../sqlalchemy/connectors/__pycache__/__init__.cpython-34.pyc +../sqlalchemy/connectors/__pycache__/mxodbc.cpython-34.pyc +../sqlalchemy/orm/__pycache__/events.cpython-34.pyc +../sqlalchemy/orm/__pycache__/loading.cpython-34.pyc +../sqlalchemy/orm/__pycache__/scoping.cpython-34.pyc +../sqlalchemy/orm/__pycache__/unitofwork.cpython-34.pyc +../sqlalchemy/orm/__pycache__/instrumentation.cpython-34.pyc +../sqlalchemy/orm/__pycache__/strategy_options.cpython-34.pyc +../sqlalchemy/orm/__pycache__/interfaces.cpython-34.pyc +../sqlalchemy/orm/__pycache__/dynamic.cpython-34.pyc +../sqlalchemy/orm/__pycache__/properties.cpython-34.pyc +../sqlalchemy/orm/__pycache__/__init__.cpython-34.pyc +../sqlalchemy/orm/__pycache__/mapper.cpython-34.pyc +../sqlalchemy/orm/__pycache__/relationships.cpython-34.pyc +../sqlalchemy/orm/__pycache__/strategies.cpython-34.pyc +../sqlalchemy/orm/__pycache__/sync.cpython-34.pyc +../sqlalchemy/orm/__pycache__/state.cpython-34.pyc +../sqlalchemy/orm/__pycache__/base.cpython-34.pyc +../sqlalchemy/orm/__pycache__/util.cpython-34.pyc +../sqlalchemy/orm/__pycache__/exc.cpython-34.pyc +../sqlalchemy/orm/__pycache__/identity.cpython-34.pyc +../sqlalchemy/orm/__pycache__/evaluator.cpython-34.pyc +../sqlalchemy/orm/__pycache__/collections.cpython-34.pyc +../sqlalchemy/orm/__pycache__/descriptor_props.cpython-34.pyc +../sqlalchemy/orm/__pycache__/persistence.cpython-34.pyc +../sqlalchemy/orm/__pycache__/deprecated_interfaces.cpython-34.pyc +../sqlalchemy/orm/__pycache__/attributes.cpython-34.pyc +../sqlalchemy/orm/__pycache__/query.cpython-34.pyc +../sqlalchemy/orm/__pycache__/dependency.cpython-34.pyc +../sqlalchemy/orm/__pycache__/path_registry.cpython-34.pyc +../sqlalchemy/orm/__pycache__/session.cpython-34.pyc +../sqlalchemy/ext/__pycache__/compiler.cpython-34.pyc +../sqlalchemy/ext/__pycache__/automap.cpython-34.pyc +../sqlalchemy/ext/__pycache__/instrumentation.cpython-34.pyc +../sqlalchemy/ext/__pycache__/serializer.cpython-34.pyc +../sqlalchemy/ext/__pycache__/orderinglist.cpython-34.pyc +../sqlalchemy/ext/__pycache__/horizontal_shard.cpython-34.pyc +../sqlalchemy/ext/__pycache__/__init__.cpython-34.pyc +../sqlalchemy/ext/__pycache__/hybrid.cpython-34.pyc +../sqlalchemy/ext/__pycache__/associationproxy.cpython-34.pyc +../sqlalchemy/ext/__pycache__/mutable.cpython-34.pyc +../sqlalchemy/ext/__pycache__/baked.cpython-34.pyc +../sqlalchemy/ext/declarative/__pycache__/__init__.cpython-34.pyc +../sqlalchemy/ext/declarative/__pycache__/base.cpython-34.pyc +../sqlalchemy/ext/declarative/__pycache__/api.cpython-34.pyc +../sqlalchemy/ext/declarative/__pycache__/clsregistry.cpython-34.pyc +../sqlalchemy/databases/__pycache__/__init__.cpython-34.pyc +../sqlalchemy/util/__pycache__/topological.cpython-34.pyc +../sqlalchemy/util/__pycache__/queue.cpython-34.pyc +../sqlalchemy/util/__pycache__/__init__.cpython-34.pyc +../sqlalchemy/util/__pycache__/compat.cpython-34.pyc +../sqlalchemy/util/__pycache__/_collections.cpython-34.pyc +../sqlalchemy/util/__pycache__/deprecations.cpython-34.pyc +../sqlalchemy/util/__pycache__/langhelpers.cpython-34.pyc +../sqlalchemy/testing/__pycache__/engines.cpython-34.pyc +../sqlalchemy/testing/__pycache__/warnings.cpython-34.pyc +../sqlalchemy/testing/__pycache__/profiling.cpython-34.pyc +../sqlalchemy/testing/__pycache__/schema.cpython-34.pyc +../sqlalchemy/testing/__pycache__/replay_fixture.cpython-34.pyc +../sqlalchemy/testing/__pycache__/assertions.cpython-34.pyc +../sqlalchemy/testing/__pycache__/__init__.cpython-34.pyc +../sqlalchemy/testing/__pycache__/pickleable.cpython-34.pyc +../sqlalchemy/testing/__pycache__/exclusions.cpython-34.pyc +../sqlalchemy/testing/__pycache__/distutils_run.cpython-34.pyc +../sqlalchemy/testing/__pycache__/assertsql.cpython-34.pyc +../sqlalchemy/testing/__pycache__/util.cpython-34.pyc +../sqlalchemy/testing/__pycache__/mock.cpython-34.pyc +../sqlalchemy/testing/__pycache__/runner.cpython-34.pyc +../sqlalchemy/testing/__pycache__/fixtures.cpython-34.pyc +../sqlalchemy/testing/__pycache__/provision.cpython-34.pyc +../sqlalchemy/testing/__pycache__/requirements.cpython-34.pyc +../sqlalchemy/testing/__pycache__/entities.cpython-34.pyc +../sqlalchemy/testing/__pycache__/config.cpython-34.pyc +../sqlalchemy/testing/plugin/__pycache__/bootstrap.cpython-34.pyc +../sqlalchemy/testing/plugin/__pycache__/__init__.cpython-34.pyc +../sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-34.pyc +../sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-34.pyc +../sqlalchemy/testing/plugin/__pycache__/noseplugin.cpython-34.pyc +../sqlalchemy/testing/suite/__pycache__/test_types.cpython-34.pyc +../sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-34.pyc +../sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-34.pyc +../sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-34.pyc +../sqlalchemy/testing/suite/__pycache__/__init__.cpython-34.pyc +../sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-34.pyc +../sqlalchemy/testing/suite/__pycache__/test_dialect.cpython-34.pyc +../sqlalchemy/testing/suite/__pycache__/test_select.cpython-34.pyc +../sqlalchemy/testing/suite/__pycache__/test_results.cpython-34.pyc +../sqlalchemy/testing/suite/__pycache__/test_insert.cpython-34.pyc +../sqlalchemy/dialects/__pycache__/postgres.cpython-34.pyc +../sqlalchemy/dialects/__pycache__/__init__.cpython-34.pyc +../sqlalchemy/dialects/oracle/__pycache__/zxjdbc.cpython-34.pyc +../sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-34.pyc +../sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-34.pyc +../sqlalchemy/dialects/oracle/__pycache__/base.cpython-34.pyc +../sqlalchemy/dialects/mssql/__pycache__/zxjdbc.cpython-34.pyc +../sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-34.pyc +../sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-34.pyc +../sqlalchemy/dialects/mssql/__pycache__/mxodbc.cpython-34.pyc +../sqlalchemy/dialects/mssql/__pycache__/base.cpython-34.pyc +../sqlalchemy/dialects/mssql/__pycache__/adodbapi.cpython-34.pyc +../sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-34.pyc +../sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-34.pyc +../sqlalchemy/dialects/sybase/__pycache__/pyodbc.cpython-34.pyc +../sqlalchemy/dialects/sybase/__pycache__/__init__.cpython-34.pyc +../sqlalchemy/dialects/sybase/__pycache__/mxodbc.cpython-34.pyc +../sqlalchemy/dialects/sybase/__pycache__/base.cpython-34.pyc +../sqlalchemy/dialects/sybase/__pycache__/pysybase.cpython-34.pyc +../sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-34.pyc +../sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-34.pyc +../sqlalchemy/dialects/sqlite/__pycache__/base.cpython-34.pyc +../sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-34.pyc +../sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-34.pyc +../sqlalchemy/dialects/postgresql/__pycache__/constraints.cpython-34.pyc +../sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-34.pyc +../sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-34.pyc +../sqlalchemy/dialects/postgresql/__pycache__/zxjdbc.cpython-34.pyc +../sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-34.pyc +../sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-34.pyc +../sqlalchemy/dialects/postgresql/__pycache__/base.cpython-34.pyc +../sqlalchemy/dialects/postgresql/__pycache__/pypostgresql.cpython-34.pyc +../sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-34.pyc +../sqlalchemy/dialects/postgresql/__pycache__/json.cpython-34.pyc +../sqlalchemy/dialects/mysql/__pycache__/zxjdbc.cpython-34.pyc +../sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-34.pyc +../sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-34.pyc +../sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-34.pyc +../sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-34.pyc +../sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-34.pyc +../sqlalchemy/dialects/mysql/__pycache__/oursql.cpython-34.pyc +../sqlalchemy/dialects/mysql/__pycache__/base.cpython-34.pyc +../sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-34.pyc +../sqlalchemy/dialects/mysql/__pycache__/gaerdbms.cpython-34.pyc +../sqlalchemy/dialects/firebird/__pycache__/kinterbasdb.cpython-34.pyc +../sqlalchemy/dialects/firebird/__pycache__/__init__.cpython-34.pyc +../sqlalchemy/dialects/firebird/__pycache__/base.cpython-34.pyc +../sqlalchemy/dialects/firebird/__pycache__/fdb.cpython-34.pyc +../sqlalchemy/cprocessors.cpython-34m.so +../sqlalchemy/cresultproxy.cpython-34m.so +../sqlalchemy/cutils.cpython-34m.so +./ +PKG-INFO +dependency_links.txt +SOURCES.txt +top_level.txt diff --git a/lib/python3.4/site-packages/SQLAlchemy-1.0.12-py3.4.egg-info/top_level.txt b/lib/python3.4/site-packages/SQLAlchemy-1.0.12-py3.4.egg-info/top_level.txt new file mode 100644 index 0000000..39fb2be --- /dev/null +++ b/lib/python3.4/site-packages/SQLAlchemy-1.0.12-py3.4.egg-info/top_level.txt @@ -0,0 +1 @@ +sqlalchemy diff --git a/lib/python3.4/site-packages/_cffi_backend.cpython-34m.so b/lib/python3.4/site-packages/_cffi_backend.cpython-34m.so new file mode 100755 index 0000000..48d3b1c Binary files /dev/null and b/lib/python3.4/site-packages/_cffi_backend.cpython-34m.so differ diff --git a/lib/python3.4/site-packages/_markerlib/__init__.py b/lib/python3.4/site-packages/_markerlib/__init__.py new file mode 100644 index 0000000..e2b237b --- /dev/null +++ b/lib/python3.4/site-packages/_markerlib/__init__.py @@ -0,0 +1,16 @@ +try: + import ast + from _markerlib.markers import default_environment, compile, interpret +except ImportError: + if 'ast' in globals(): + raise + def default_environment(): + return {} + def compile(marker): + def marker_fn(environment=None, override=None): + # 'empty markers are True' heuristic won't install extra deps. + return not marker.strip() + marker_fn.__doc__ = marker + return marker_fn + def interpret(marker, environment=None, override=None): + return compile(marker)() diff --git a/lib/python3.4/site-packages/_markerlib/markers.py b/lib/python3.4/site-packages/_markerlib/markers.py new file mode 100644 index 0000000..fa83706 --- /dev/null +++ b/lib/python3.4/site-packages/_markerlib/markers.py @@ -0,0 +1,119 @@ +# -*- coding: utf-8 -*- +"""Interpret PEP 345 environment markers. + +EXPR [in|==|!=|not in] EXPR [or|and] ... + +where EXPR belongs to any of those: + + python_version = '%s.%s' % (sys.version_info[0], sys.version_info[1]) + python_full_version = sys.version.split()[0] + os.name = os.name + sys.platform = sys.platform + platform.version = platform.version() + platform.machine = platform.machine() + platform.python_implementation = platform.python_implementation() + a free string, like '2.6', or 'win32' +""" + +__all__ = ['default_environment', 'compile', 'interpret'] + +import ast +import os +import platform +import sys +import weakref + +_builtin_compile = compile + +try: + from platform import python_implementation +except ImportError: + if os.name == "java": + # Jython 2.5 has ast module, but not platform.python_implementation() function. + def python_implementation(): + return "Jython" + else: + raise + + +# restricted set of variables +_VARS = {'sys.platform': sys.platform, + 'python_version': '%s.%s' % sys.version_info[:2], + # FIXME parsing sys.platform is not reliable, but there is no other + # way to get e.g. 2.7.2+, and the PEP is defined with sys.version + 'python_full_version': sys.version.split(' ', 1)[0], + 'os.name': os.name, + 'platform.version': platform.version(), + 'platform.machine': platform.machine(), + 'platform.python_implementation': python_implementation(), + 'extra': None # wheel extension + } + +for var in list(_VARS.keys()): + if '.' in var: + _VARS[var.replace('.', '_')] = _VARS[var] + +def default_environment(): + """Return copy of default PEP 385 globals dictionary.""" + return dict(_VARS) + +class ASTWhitelist(ast.NodeTransformer): + def __init__(self, statement): + self.statement = statement # for error messages + + ALLOWED = (ast.Compare, ast.BoolOp, ast.Attribute, ast.Name, ast.Load, ast.Str) + # Bool operations + ALLOWED += (ast.And, ast.Or) + # Comparison operations + ALLOWED += (ast.Eq, ast.Gt, ast.GtE, ast.In, ast.Is, ast.IsNot, ast.Lt, ast.LtE, ast.NotEq, ast.NotIn) + + def visit(self, node): + """Ensure statement only contains allowed nodes.""" + if not isinstance(node, self.ALLOWED): + raise SyntaxError('Not allowed in environment markers.\n%s\n%s' % + (self.statement, + (' ' * node.col_offset) + '^')) + return ast.NodeTransformer.visit(self, node) + + def visit_Attribute(self, node): + """Flatten one level of attribute access.""" + new_node = ast.Name("%s.%s" % (node.value.id, node.attr), node.ctx) + return ast.copy_location(new_node, node) + +def parse_marker(marker): + tree = ast.parse(marker, mode='eval') + new_tree = ASTWhitelist(marker).generic_visit(tree) + return new_tree + +def compile_marker(parsed_marker): + return _builtin_compile(parsed_marker, '', 'eval', + dont_inherit=True) + +_cache = weakref.WeakValueDictionary() + +def compile(marker): + """Return compiled marker as a function accepting an environment dict.""" + try: + return _cache[marker] + except KeyError: + pass + if not marker.strip(): + def marker_fn(environment=None, override=None): + """""" + return True + else: + compiled_marker = compile_marker(parse_marker(marker)) + def marker_fn(environment=None, override=None): + """override updates environment""" + if override is None: + override = {} + if environment is None: + environment = default_environment() + environment.update(override) + return eval(compiled_marker, environment) + marker_fn.__doc__ = marker + _cache[marker] = marker_fn + return _cache[marker] + +def interpret(marker, environment=None): + return compile(marker)(environment) diff --git a/lib/python3.4/site-packages/cffi-1.7.0-py3.4.egg-info/PKG-INFO b/lib/python3.4/site-packages/cffi-1.7.0-py3.4.egg-info/PKG-INFO new file mode 100644 index 0000000..6ea9375 --- /dev/null +++ b/lib/python3.4/site-packages/cffi-1.7.0-py3.4.egg-info/PKG-INFO @@ -0,0 +1,31 @@ +Metadata-Version: 1.1 +Name: cffi +Version: 1.7.0 +Summary: Foreign Function Interface for Python calling C code. +Home-page: http://cffi.readthedocs.org +Author: Armin Rigo, Maciej Fijalkowski +Author-email: python-cffi@googlegroups.com +License: MIT +Description: + CFFI + ==== + + Foreign Function Interface for Python calling C code. + Please see the `Documentation `_. + + Contact + ------- + + `Mailing list `_ + +Platform: UNKNOWN +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy diff --git a/lib/python3.4/site-packages/cffi-1.7.0-py3.4.egg-info/SOURCES.txt b/lib/python3.4/site-packages/cffi-1.7.0-py3.4.egg-info/SOURCES.txt new file mode 100644 index 0000000..c151358 --- /dev/null +++ b/lib/python3.4/site-packages/cffi-1.7.0-py3.4.egg-info/SOURCES.txt @@ -0,0 +1,180 @@ +AUTHORS +LICENSE +MANIFEST.in +setup.cfg +setup.py +setup_base.py +c/_cffi_backend.c +c/call_python.c +c/cdlopen.c +c/cffi1_module.c +c/cglob.c +c/commontypes.c +c/ffi_obj.c +c/file_emulator.h +c/lib_obj.c +c/malloc_closure.h +c/minibuffer.h +c/misc_thread_common.h +c/misc_thread_posix.h +c/misc_win32.h +c/parse_c_type.c +c/realize_c_type.c +c/test_c.py +c/wchar_helper.h +c/libffi_msvc/ffi.c +c/libffi_msvc/ffi.h +c/libffi_msvc/ffi_common.h +c/libffi_msvc/fficonfig.h +c/libffi_msvc/ffitarget.h +c/libffi_msvc/prep_cif.c +c/libffi_msvc/types.c +c/libffi_msvc/win32.c +c/libffi_msvc/win64.asm +c/libffi_msvc/win64.obj +cffi/__init__.py +cffi/_cffi_include.h +cffi/_embedding.h +cffi/api.py +cffi/backend_ctypes.py +cffi/cffi_opcode.py +cffi/commontypes.py +cffi/cparser.py +cffi/ffiplatform.py +cffi/lock.py +cffi/model.py +cffi/parse_c_type.h +cffi/recompiler.py +cffi/setuptools_ext.py +cffi/vengine_cpy.py +cffi/vengine_gen.py +cffi/verifier.py +cffi.egg-info/PKG-INFO +cffi.egg-info/SOURCES.txt +cffi.egg-info/dependency_links.txt +cffi.egg-info/entry_points.txt +cffi.egg-info/not-zip-safe +cffi.egg-info/requires.txt +cffi.egg-info/top_level.txt +demo/_curses.py +demo/_curses_build.py +demo/_curses_setup.py +demo/api.py +demo/bsdopendirtype.py +demo/bsdopendirtype_build.py +demo/bsdopendirtype_setup.py +demo/btrfs-snap.py +demo/cffi-cocoa.py +demo/embedding.py +demo/embedding_test.c +demo/extern_python.py +demo/extern_python_varargs.py +demo/fastcsv.py +demo/gmp.py +demo/gmp_build.py +demo/manual.c +demo/manual2.py +demo/pwuid.py +demo/pwuid_build.py +demo/py.cleanup +demo/pyobj.py +demo/readdir.py +demo/readdir2.py +demo/readdir2_build.py +demo/readdir2_setup.py +demo/readdir_build.py +demo/readdir_ctypes.py +demo/readdir_setup.py +demo/recopendirtype.py +demo/recopendirtype_build.py +demo/setup_manual.py +demo/winclipboard.py +demo/winclipboard_build.py +demo/xclient.py +demo/xclient_build.py +doc/Makefile +doc/make.bat +doc/misc/design.rst +doc/misc/grant-cffi-1.0.rst +doc/misc/parse_c_type.rst +doc/source/cdef.rst +doc/source/conf.py +doc/source/embedding.rst +doc/source/index.rst +doc/source/installation.rst +doc/source/overview.rst +doc/source/ref.rst +doc/source/using.rst +doc/source/whatsnew.rst +testing/__init__.py +testing/support.py +testing/udir.py +testing/cffi0/__init__.py +testing/cffi0/backend_tests.py +testing/cffi0/callback_in_thread.py +testing/cffi0/test_cdata.py +testing/cffi0/test_ctypes.py +testing/cffi0/test_ffi_backend.py +testing/cffi0/test_function.py +testing/cffi0/test_model.py +testing/cffi0/test_ownlib.py +testing/cffi0/test_parsing.py +testing/cffi0/test_platform.py +testing/cffi0/test_unicode_literals.py +testing/cffi0/test_verify.py +testing/cffi0/test_verify2.py +testing/cffi0/test_version.py +testing/cffi0/test_vgen.py +testing/cffi0/test_vgen2.py +testing/cffi0/test_zdistutils.py +testing/cffi0/test_zintegration.py +testing/cffi0/snippets/distutils_module/setup.py +testing/cffi0/snippets/distutils_module/snip_basic_verify.py +testing/cffi0/snippets/distutils_package_1/setup.py +testing/cffi0/snippets/distutils_package_1/snip_basic_verify1/__init__.py +testing/cffi0/snippets/distutils_package_2/setup.py +testing/cffi0/snippets/distutils_package_2/snip_basic_verify2/__init__.py +testing/cffi0/snippets/infrastructure/setup.py +testing/cffi0/snippets/infrastructure/snip_infrastructure/__init__.py +testing/cffi0/snippets/setuptools_module/setup.py +testing/cffi0/snippets/setuptools_module/snip_setuptools_verify.py +testing/cffi0/snippets/setuptools_package_1/setup.py +testing/cffi0/snippets/setuptools_package_1/snip_setuptools_verify1/__init__.py +testing/cffi0/snippets/setuptools_package_2/setup.py +testing/cffi0/snippets/setuptools_package_2/snip_setuptools_verify2/__init__.py +testing/cffi1/__init__.py +testing/cffi1/test_cffi_binary.py +testing/cffi1/test_commontypes.py +testing/cffi1/test_dlopen.py +testing/cffi1/test_dlopen_unicode_literals.py +testing/cffi1/test_ffi_obj.py +testing/cffi1/test_new_ffi_1.py +testing/cffi1/test_parse_c_type.py +testing/cffi1/test_re_python.py +testing/cffi1/test_realize_c_type.py +testing/cffi1/test_recompiler.py +testing/cffi1/test_unicode_literals.py +testing/cffi1/test_verify1.py +testing/cffi1/test_zdist.py +testing/embedding/__init__.py +testing/embedding/add1-test.c +testing/embedding/add1.py +testing/embedding/add2-test.c +testing/embedding/add2.py +testing/embedding/add3.py +testing/embedding/add_recursive-test.c +testing/embedding/add_recursive.py +testing/embedding/empty.py +testing/embedding/perf-test.c +testing/embedding/perf.py +testing/embedding/test_basic.py +testing/embedding/test_performance.py +testing/embedding/test_recursive.py +testing/embedding/test_thread.py +testing/embedding/test_tlocal.py +testing/embedding/thread-test.h +testing/embedding/thread1-test.c +testing/embedding/thread2-test.c +testing/embedding/thread3-test.c +testing/embedding/tlocal-test.c +testing/embedding/tlocal.py \ No newline at end of file diff --git a/lib/python3.4/site-packages/cffi-1.7.0-py3.4.egg-info/dependency_links.txt b/lib/python3.4/site-packages/cffi-1.7.0-py3.4.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/lib/python3.4/site-packages/cffi-1.7.0-py3.4.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/lib/python3.4/site-packages/cffi-1.7.0-py3.4.egg-info/entry_points.txt b/lib/python3.4/site-packages/cffi-1.7.0-py3.4.egg-info/entry_points.txt new file mode 100644 index 0000000..eee7e0f --- /dev/null +++ b/lib/python3.4/site-packages/cffi-1.7.0-py3.4.egg-info/entry_points.txt @@ -0,0 +1,3 @@ +[distutils.setup_keywords] +cffi_modules = cffi.setuptools_ext:cffi_modules + diff --git a/lib/python3.4/site-packages/cffi-1.7.0-py3.4.egg-info/installed-files.txt b/lib/python3.4/site-packages/cffi-1.7.0-py3.4.egg-info/installed-files.txt new file mode 100644 index 0000000..81fa422 --- /dev/null +++ b/lib/python3.4/site-packages/cffi-1.7.0-py3.4.egg-info/installed-files.txt @@ -0,0 +1,40 @@ +../cffi/vengine_cpy.py +../cffi/backend_ctypes.py +../cffi/lock.py +../cffi/model.py +../cffi/cparser.py +../cffi/commontypes.py +../cffi/__init__.py +../cffi/recompiler.py +../cffi/ffiplatform.py +../cffi/verifier.py +../cffi/vengine_gen.py +../cffi/cffi_opcode.py +../cffi/setuptools_ext.py +../cffi/api.py +../cffi/_cffi_include.h +../cffi/parse_c_type.h +../cffi/_embedding.h +../cffi/__pycache__/vengine_cpy.cpython-34.pyc +../cffi/__pycache__/backend_ctypes.cpython-34.pyc +../cffi/__pycache__/lock.cpython-34.pyc +../cffi/__pycache__/model.cpython-34.pyc +../cffi/__pycache__/cparser.cpython-34.pyc +../cffi/__pycache__/commontypes.cpython-34.pyc +../cffi/__pycache__/__init__.cpython-34.pyc +../cffi/__pycache__/recompiler.cpython-34.pyc +../cffi/__pycache__/ffiplatform.cpython-34.pyc +../cffi/__pycache__/verifier.cpython-34.pyc +../cffi/__pycache__/vengine_gen.cpython-34.pyc +../cffi/__pycache__/cffi_opcode.cpython-34.pyc +../cffi/__pycache__/setuptools_ext.cpython-34.pyc +../cffi/__pycache__/api.cpython-34.pyc +../_cffi_backend.cpython-34m.so +./ +PKG-INFO +requires.txt +not-zip-safe +dependency_links.txt +SOURCES.txt +top_level.txt +entry_points.txt diff --git a/lib/python3.4/site-packages/cffi-1.7.0-py3.4.egg-info/not-zip-safe b/lib/python3.4/site-packages/cffi-1.7.0-py3.4.egg-info/not-zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/lib/python3.4/site-packages/cffi-1.7.0-py3.4.egg-info/not-zip-safe @@ -0,0 +1 @@ + diff --git a/lib/python3.4/site-packages/cffi-1.7.0-py3.4.egg-info/requires.txt b/lib/python3.4/site-packages/cffi-1.7.0-py3.4.egg-info/requires.txt new file mode 100644 index 0000000..dc1c9e1 --- /dev/null +++ b/lib/python3.4/site-packages/cffi-1.7.0-py3.4.egg-info/requires.txt @@ -0,0 +1 @@ +pycparser diff --git a/lib/python3.4/site-packages/cffi-1.7.0-py3.4.egg-info/top_level.txt b/lib/python3.4/site-packages/cffi-1.7.0-py3.4.egg-info/top_level.txt new file mode 100644 index 0000000..f645779 --- /dev/null +++ b/lib/python3.4/site-packages/cffi-1.7.0-py3.4.egg-info/top_level.txt @@ -0,0 +1,2 @@ +_cffi_backend +cffi diff --git a/lib/python3.4/site-packages/cffi/__init__.py b/lib/python3.4/site-packages/cffi/__init__.py new file mode 100644 index 0000000..10559d6 --- /dev/null +++ b/lib/python3.4/site-packages/cffi/__init__.py @@ -0,0 +1,13 @@ +__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError', + 'FFIError'] + +from .api import FFI, CDefError, FFIError +from .ffiplatform import VerificationError, VerificationMissing + +__version__ = "1.7.0" +__version_info__ = (1, 7, 0) + +# The verifier module file names are based on the CRC32 of a string that +# contains the following version number. It may be older than __version__ +# if nothing is clearly incompatible. +__version_verifier_modules__ = "0.8.6" diff --git a/lib/python3.4/site-packages/cffi/_cffi_include.h b/lib/python3.4/site-packages/cffi/_cffi_include.h new file mode 100644 index 0000000..a72c132 --- /dev/null +++ b/lib/python3.4/site-packages/cffi/_cffi_include.h @@ -0,0 +1,248 @@ +#define _CFFI_ +#include +#ifdef __cplusplus +extern "C" { +#endif +#include +#include "parse_c_type.h" + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ + typedef unsigned char _Bool; +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +#endif + +#ifdef __GNUC__ +# define _CFFI_UNUSED_FN __attribute__((unused)) +#else +# define _CFFI_UNUSED_FN /* nothing */ +#endif + +#ifdef __cplusplus +# ifndef _Bool +# define _Bool bool /* semi-hackish: C++ has no _Bool; bool is builtin */ +# endif +#endif + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + + +#if PY_MAJOR_VERSION >= 3 +# define PyInt_FromLong PyLong_FromLong +#endif + +#define _cffi_from_c_double PyFloat_FromDouble +#define _cffi_from_c_float PyFloat_FromDouble +#define _cffi_from_c_long PyInt_FromLong +#define _cffi_from_c_ulong PyLong_FromUnsignedLong +#define _cffi_from_c_longlong PyLong_FromLongLong +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong + +#define _cffi_to_c_double PyFloat_AsDouble +#define _cffi_to_c_float PyFloat_AsDouble + +#define _cffi_from_c_int(x, type) \ + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + sizeof(type) == sizeof(long) ? \ + PyLong_FromUnsignedLong((unsigned long)x) : \ + PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ + (sizeof(type) <= sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + PyLong_FromLongLong((long long)x))) + +#define _cffi_to_c_int(o, type) \ + ((type)( \ + sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + : (type)_cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ + : (type)_cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ + : (type)_cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ + : (type)_cffi_to_c_i64(o)) : \ + (Py_FatalError("unsupported size for type " #type), (type)0))) + +#define _cffi_to_c_i8 \ + ((int(*)(PyObject *))_cffi_exports[1]) +#define _cffi_to_c_u8 \ + ((int(*)(PyObject *))_cffi_exports[2]) +#define _cffi_to_c_i16 \ + ((int(*)(PyObject *))_cffi_exports[3]) +#define _cffi_to_c_u16 \ + ((int(*)(PyObject *))_cffi_exports[4]) +#define _cffi_to_c_i32 \ + ((int(*)(PyObject *))_cffi_exports[5]) +#define _cffi_to_c_u32 \ + ((unsigned int(*)(PyObject *))_cffi_exports[6]) +#define _cffi_to_c_i64 \ + ((long long(*)(PyObject *))_cffi_exports[7]) +#define _cffi_to_c_u64 \ + ((unsigned long long(*)(PyObject *))_cffi_exports[8]) +#define _cffi_to_c_char \ + ((int(*)(PyObject *))_cffi_exports[9]) +#define _cffi_from_c_pointer \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[10]) +#define _cffi_to_c_pointer \ + ((char *(*)(PyObject *, CTypeDescrObject *))_cffi_exports[11]) +#define _cffi_get_struct_layout \ + not used any more +#define _cffi_restore_errno \ + ((void(*)(void))_cffi_exports[13]) +#define _cffi_save_errno \ + ((void(*)(void))_cffi_exports[14]) +#define _cffi_from_c_char \ + ((PyObject *(*)(char))_cffi_exports[15]) +#define _cffi_from_c_deref \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[16]) +#define _cffi_to_c \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[17]) +#define _cffi_from_c_struct \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[18]) +#define _cffi_to_c_wchar_t \ + ((wchar_t(*)(PyObject *))_cffi_exports[19]) +#define _cffi_from_c_wchar_t \ + ((PyObject *(*)(wchar_t))_cffi_exports[20]) +#define _cffi_to_c_long_double \ + ((long double(*)(PyObject *))_cffi_exports[21]) +#define _cffi_to_c__Bool \ + ((_Bool(*)(PyObject *))_cffi_exports[22]) +#define _cffi_prepare_pointer_call_argument \ + ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23]) +#define _cffi_convert_array_from_object \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24]) +#define _CFFI_CPIDX 25 +#define _cffi_call_python \ + ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX]) +#define _CFFI_NUM_EXPORTS 26 + +typedef struct _ctypedescr CTypeDescrObject; + +static void *_cffi_exports[_CFFI_NUM_EXPORTS]; + +#define _cffi_type(index) ( \ + assert((((uintptr_t)_cffi_types[index]) & 1) == 0), \ + (CTypeDescrObject *)_cffi_types[index]) + +static PyObject *_cffi_init(const char *module_name, Py_ssize_t version, + const struct _cffi_type_context_s *ctx) +{ + PyObject *module, *o_arg, *new_module; + void *raw[] = { + (void *)module_name, + (void *)version, + (void *)_cffi_exports, + (void *)ctx, + }; + + module = PyImport_ImportModule("_cffi_backend"); + if (module == NULL) + goto failure; + + o_arg = PyLong_FromVoidPtr((void *)raw); + if (o_arg == NULL) + goto failure; + + new_module = PyObject_CallMethod( + module, (char *)"_init_cffi_1_0_external_module", (char *)"O", o_arg); + + Py_DECREF(o_arg); + Py_DECREF(module); + return new_module; + + failure: + Py_XDECREF(module); + return NULL; +} + +_CFFI_UNUSED_FN +static PyObject **_cffi_unpack_args(PyObject *args_tuple, Py_ssize_t expected, + const char *fnname) +{ + if (PyTuple_GET_SIZE(args_tuple) != expected) { + PyErr_Format(PyExc_TypeError, + "%.150s() takes exactly %zd arguments (%zd given)", + fnname, expected, PyTuple_GET_SIZE(args_tuple)); + return NULL; + } + return &PyTuple_GET_ITEM(args_tuple, 0); /* pointer to the first item, + the others follow */ +} + +/********** end CPython-specific section **********/ +#else +_CFFI_UNUSED_FN +static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *); +# define _cffi_call_python _cffi_call_python_org +#endif + + +#define _cffi_array_len(array) (sizeof(array) / sizeof((array)[0])) + +#define _cffi_prim_int(size, sign) \ + ((size) == 1 ? ((sign) ? _CFFI_PRIM_INT8 : _CFFI_PRIM_UINT8) : \ + (size) == 2 ? ((sign) ? _CFFI_PRIM_INT16 : _CFFI_PRIM_UINT16) : \ + (size) == 4 ? ((sign) ? _CFFI_PRIM_INT32 : _CFFI_PRIM_UINT32) : \ + (size) == 8 ? ((sign) ? _CFFI_PRIM_INT64 : _CFFI_PRIM_UINT64) : \ + _CFFI__UNKNOWN_PRIM) + +#define _cffi_prim_float(size) \ + ((size) == sizeof(float) ? _CFFI_PRIM_FLOAT : \ + (size) == sizeof(double) ? _CFFI_PRIM_DOUBLE : \ + (size) == sizeof(long double) ? _CFFI__UNKNOWN_LONG_DOUBLE : \ + _CFFI__UNKNOWN_FLOAT_PRIM) + +#define _cffi_check_int(got, got_nonpos, expected) \ + ((got_nonpos) == (expected <= 0) && \ + (got) == (unsigned long long)expected) + +#ifdef MS_WIN32 +# define _cffi_stdcall __stdcall +#else +# define _cffi_stdcall /* nothing */ +#endif + +#ifdef __cplusplus +} +#endif diff --git a/lib/python3.4/site-packages/cffi/_embedding.h b/lib/python3.4/site-packages/cffi/_embedding.h new file mode 100644 index 0000000..03a1550 --- /dev/null +++ b/lib/python3.4/site-packages/cffi/_embedding.h @@ -0,0 +1,517 @@ + +/***** Support code for embedding *****/ + +#if defined(_MSC_VER) +# define CFFI_DLLEXPORT __declspec(dllexport) +#elif defined(__GNUC__) +# define CFFI_DLLEXPORT __attribute__((visibility("default"))) +#else +# define CFFI_DLLEXPORT /* nothing */ +#endif + + +/* There are two global variables of type _cffi_call_python_fnptr: + + * _cffi_call_python, which we declare just below, is the one called + by ``extern "Python"`` implementations. + + * _cffi_call_python_org, which on CPython is actually part of the + _cffi_exports[] array, is the function pointer copied from + _cffi_backend. + + After initialization is complete, both are equal. However, the + first one remains equal to &_cffi_start_and_call_python until the + very end of initialization, when we are (or should be) sure that + concurrent threads also see a completely initialized world, and + only then is it changed. +*/ +#undef _cffi_call_python +typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *); +static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *); +static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python; + + +#ifndef _MSC_VER + /* --- Assuming a GCC not infinitely old --- */ +# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n) +# define cffi_write_barrier() __sync_synchronize() +# if !defined(__amd64__) && !defined(__x86_64__) && \ + !defined(__i386__) && !defined(__i386) +# define cffi_read_barrier() __sync_synchronize() +# else +# define cffi_read_barrier() (void)0 +# endif +#else + /* --- Windows threads version --- */ +# include +# define cffi_compare_and_swap(l,o,n) \ + (InterlockedCompareExchangePointer(l,n,o) == (o)) +# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0) +# define cffi_read_barrier() (void)0 +static volatile LONG _cffi_dummy; +#endif + +#ifdef WITH_THREAD +# ifndef _MSC_VER +# include + static pthread_mutex_t _cffi_embed_startup_lock; +# else + static CRITICAL_SECTION _cffi_embed_startup_lock; +# endif + static char _cffi_embed_startup_lock_ready = 0; +#endif + +static void _cffi_acquire_reentrant_mutex(void) +{ + static void *volatile lock = NULL; + + while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) { + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: pthread_mutex_init() should be very fast, and + this is only run at start-up anyway. */ + } + +#ifdef WITH_THREAD + if (!_cffi_embed_startup_lock_ready) { +# ifndef _MSC_VER + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&_cffi_embed_startup_lock, &attr); +# else + InitializeCriticalSection(&_cffi_embed_startup_lock); +# endif + _cffi_embed_startup_lock_ready = 1; + } +#endif + + while (!cffi_compare_and_swap(&lock, (void *)1, NULL)) + ; + +#ifndef _MSC_VER + pthread_mutex_lock(&_cffi_embed_startup_lock); +#else + EnterCriticalSection(&_cffi_embed_startup_lock); +#endif +} + +static void _cffi_release_reentrant_mutex(void) +{ +#ifndef _MSC_VER + pthread_mutex_unlock(&_cffi_embed_startup_lock); +#else + LeaveCriticalSection(&_cffi_embed_startup_lock); +#endif +} + + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + + +#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX] + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */ + +static void _cffi_py_initialize(void) +{ + /* XXX use initsigs=0, which "skips initialization registration of + signal handlers, which might be useful when Python is + embedded" according to the Python docs. But review and think + if it should be a user-controllable setting. + + XXX we should also give a way to write errors to a buffer + instead of to stderr. + + XXX if importing 'site' fails, CPython (any version) calls + exit(). Should we try to work around this behavior here? + */ + Py_InitializeEx(0); +} + +static int _cffi_initialize_python(void) +{ + /* This initializes Python, imports _cffi_backend, and then the + present .dll/.so is set up as a CPython C extension module. + */ + int result; + PyGILState_STATE state; + PyObject *pycode=NULL, *global_dict=NULL, *x; + +#if PY_MAJOR_VERSION >= 3 + /* see comments in _cffi_carefully_make_gil() about the + Python2/Python3 difference + */ +#else + /* Acquire the GIL. We have no threadstate here. If Python is + already initialized, it is possible that there is already one + existing for this thread, but it is not made current now. + */ + PyEval_AcquireLock(); + + _cffi_py_initialize(); + + /* The Py_InitializeEx() sometimes made a threadstate for us, but + not always. Indeed Py_InitializeEx() could be called and do + nothing. So do we have a threadstate, or not? We don't know, + but we can replace it with NULL in all cases. + */ + (void)PyThreadState_Swap(NULL); + + /* Now we can release the GIL and re-acquire immediately using the + logic of PyGILState(), which handles making or installing the + correct threadstate. + */ + PyEval_ReleaseLock(); +#endif + state = PyGILState_Ensure(); + + /* Call the initxxx() function from the present module. It will + create and initialize us as a CPython extension module, instead + of letting the startup Python code do it---it might reimport + the same .dll/.so and get maybe confused on some platforms. + It might also have troubles locating the .dll/.so again for all + I know. + */ + (void)_CFFI_PYTHON_STARTUP_FUNC(); + if (PyErr_Occurred()) + goto error; + + /* Now run the Python code provided to ffi.embedding_init_code(). + */ + pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE, + "", + Py_file_input); + if (pycode == NULL) + goto error; + global_dict = PyDict_New(); + if (global_dict == NULL) + goto error; + if (PyDict_SetItemString(global_dict, "__builtins__", + PyThreadState_GET()->interp->builtins) < 0) + goto error; + x = PyEval_EvalCode( +#if PY_MAJOR_VERSION < 3 + (PyCodeObject *) +#endif + pycode, global_dict, global_dict); + if (x == NULL) + goto error; + Py_DECREF(x); + + /* Done! Now if we've been called from + _cffi_start_and_call_python() in an ``extern "Python"``, we can + only hope that the Python code did correctly set up the + corresponding @ffi.def_extern() function. Otherwise, the + general logic of ``extern "Python"`` functions (inside the + _cffi_backend module) will find that the reference is still + missing and print an error. + */ + result = 0; + done: + Py_XDECREF(pycode); + Py_XDECREF(global_dict); + PyGILState_Release(state); + return result; + + error:; + { + /* Print as much information as potentially useful. + Debugging load-time failures with embedding is not fun + */ + PyObject *exception, *v, *tb, *f, *modules, *mod; + PyErr_Fetch(&exception, &v, &tb); + if (exception != NULL) { + PyErr_NormalizeException(&exception, &v, &tb); + PyErr_Display(exception, v, tb); + } + Py_XDECREF(exception); + Py_XDECREF(v); + Py_XDECREF(tb); + + f = PySys_GetObject((char *)"stderr"); + if (f != NULL && f != Py_None) { + PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME + "\ncompiled with cffi version: 1.7.0" + "\n_cffi_backend module: ", f); + modules = PyImport_GetModuleDict(); + mod = PyDict_GetItemString(modules, "_cffi_backend"); + if (mod == NULL) { + PyFile_WriteString("not loaded", f); + } + else { + v = PyObject_GetAttrString(mod, "__file__"); + PyFile_WriteObject(v, f, 0); + Py_XDECREF(v); + } + PyFile_WriteString("\nsys.path: ", f); + PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0); + PyFile_WriteString("\n\n", f); + } + } + result = -1; + goto done; +} + +PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */ + +static int _cffi_carefully_make_gil(void) +{ + /* This does the basic initialization of Python. It can be called + completely concurrently from unrelated threads. It assumes + that we don't hold the GIL before (if it exists), and we don't + hold it afterwards. + + What it really does is completely different in Python 2 and + Python 3. + + Python 2 + ======== + + Initialize the GIL, without initializing the rest of Python, + by calling PyEval_InitThreads(). + + PyEval_InitThreads() must not be called concurrently at all. + So we use a global variable as a simple spin lock. This global + variable must be from 'libpythonX.Y.so', not from this + cffi-based extension module, because it must be shared from + different cffi-based extension modules. We choose + _PyParser_TokenNames[0] as a completely arbitrary pointer value + that is never written to. The default is to point to the + string "ENDMARKER". We change it temporarily to point to the + next character in that string. (Yes, I know it's REALLY + obscure.) + + Python 3 + ======== + + In Python 3, PyEval_InitThreads() cannot be called before + Py_InitializeEx() any more. So this function calls + Py_InitializeEx() first. It uses the same obscure logic to + make sure we never call it concurrently. + + Arguably, this is less good on the spinlock, because + Py_InitializeEx() takes much longer to run than + PyEval_InitThreads(). But I didn't find a way around it. + */ + +#ifdef WITH_THREAD + char *volatile *lock = (char *volatile *)_PyParser_TokenNames; + char *old_value; + + while (1) { /* spin loop */ + old_value = *lock; + if (old_value[0] == 'E') { + assert(old_value[1] == 'N'); + if (cffi_compare_and_swap(lock, old_value, old_value + 1)) + break; + } + else { + assert(old_value[0] == 'N'); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +#endif + +#if PY_MAJOR_VERSION >= 3 + /* Python 3: call Py_InitializeEx() */ + { + PyGILState_STATE state = PyGILState_UNLOCKED; + if (!Py_IsInitialized()) + _cffi_py_initialize(); + else + state = PyGILState_Ensure(); + + PyEval_InitThreads(); + PyGILState_Release(state); + } +#else + /* Python 2: call PyEval_InitThreads() */ +# ifdef WITH_THREAD + if (!PyEval_ThreadsInitialized()) { + PyEval_InitThreads(); /* makes the GIL */ + PyEval_ReleaseLock(); /* then release it */ + } + /* else: there is already a GIL, but we still needed to do the + spinlock dance to make sure that we see it as fully ready */ +# endif +#endif + +#ifdef WITH_THREAD + /* release the lock */ + while (!cffi_compare_and_swap(lock, old_value + 1, old_value)) + ; +#endif + + return 0; +} + +/********** end CPython-specific section **********/ + + +#else + + +/********** PyPy-specific section **********/ + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */ + +static struct _cffi_pypy_init_s { + const char *name; + void (*func)(const void *[]); + const char *code; +} _cffi_pypy_init = { + _CFFI_MODULE_NAME, + _CFFI_PYTHON_STARTUP_FUNC, + _CFFI_PYTHON_STARTUP_CODE, +}; + +extern int pypy_carefully_make_gil(const char *); +extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *); + +static int _cffi_carefully_make_gil(void) +{ + return pypy_carefully_make_gil(_CFFI_MODULE_NAME); +} + +static int _cffi_initialize_python(void) +{ + return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init); +} + +/********** end PyPy-specific section **********/ + + +#endif + + +#ifdef __GNUC__ +__attribute__((noinline)) +#endif +static _cffi_call_python_fnptr _cffi_start_python(void) +{ + /* Delicate logic to initialize Python. This function can be + called multiple times concurrently, e.g. when the process calls + its first ``extern "Python"`` functions in multiple threads at + once. It can also be called recursively, in which case we must + ignore it. We also have to consider what occurs if several + different cffi-based extensions reach this code in parallel + threads---it is a different copy of the code, then, and we + can't have any shared global variable unless it comes from + 'libpythonX.Y.so'. + + Idea: + + * _cffi_carefully_make_gil(): "carefully" call + PyEval_InitThreads() (possibly with Py_InitializeEx() first). + + * then we use a (local) custom lock to make sure that a call to this + cffi-based extension will wait if another call to the *same* + extension is running the initialization in another thread. + It is reentrant, so that a recursive call will not block, but + only one from a different thread. + + * then we grab the GIL and (Python 2) we call Py_InitializeEx(). + At this point, concurrent calls to Py_InitializeEx() are not + possible: we have the GIL. + + * do the rest of the specific initialization, which may + temporarily release the GIL but not the custom lock. + Only release the custom lock when we are done. + */ + static char called = 0; + + if (_cffi_carefully_make_gil() != 0) + return NULL; + + _cffi_acquire_reentrant_mutex(); + + /* Here the GIL exists, but we don't have it. We're only protected + from concurrency by the reentrant mutex. */ + + /* This file only initializes the embedded module once, the first + time this is called, even if there are subinterpreters. */ + if (!called) { + called = 1; /* invoke _cffi_initialize_python() only once, + but don't set '_cffi_call_python' right now, + otherwise concurrent threads won't call + this function at all (we need them to wait) */ + if (_cffi_initialize_python() == 0) { + /* now initialization is finished. Switch to the fast-path. */ + + /* We would like nobody to see the new value of + '_cffi_call_python' without also seeing the rest of the + data initialized. However, this is not possible. But + the new value of '_cffi_call_python' is the function + 'cffi_call_python()' from _cffi_backend. So: */ + cffi_write_barrier(); + /* ^^^ we put a write barrier here, and a corresponding + read barrier at the start of cffi_call_python(). This + ensures that after that read barrier, we see everything + done here before the write barrier. + */ + + assert(_cffi_call_python_org != NULL); + _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org; + } + else { + /* initialization failed. Reset this to NULL, even if it was + already set to some other value. Future calls to + _cffi_start_python() are still forced to occur, and will + always return NULL from now on. */ + _cffi_call_python_org = NULL; + } + } + + _cffi_release_reentrant_mutex(); + + return (_cffi_call_python_fnptr)_cffi_call_python_org; +} + +static +void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args) +{ + _cffi_call_python_fnptr fnptr; + int current_err = errno; +#ifdef _MSC_VER + int current_lasterr = GetLastError(); +#endif + fnptr = _cffi_start_python(); + if (fnptr == NULL) { + fprintf(stderr, "function %s() called, but initialization code " + "failed. Returning 0.\n", externpy->name); + memset(args, 0, externpy->size_of_result); + } +#ifdef _MSC_VER + SetLastError(current_lasterr); +#endif + errno = current_err; + + if (fnptr != NULL) + fnptr(externpy, args); +} + + +/* The cffi_start_python() function makes sure Python is initialized + and our cffi module is set up. It can be called manually from the + user C code. The same effect is obtained automatically from any + dll-exported ``extern "Python"`` function. This function returns + -1 if initialization failed, 0 if all is OK. */ +_CFFI_UNUSED_FN +static int cffi_start_python(void) +{ + if (_cffi_call_python == &_cffi_start_and_call_python) { + if (_cffi_start_python() == NULL) + return -1; + } + cffi_read_barrier(); + return 0; +} + +#undef cffi_compare_and_swap +#undef cffi_write_barrier +#undef cffi_read_barrier diff --git a/lib/python3.4/site-packages/cffi/api.py b/lib/python3.4/site-packages/cffi/api.py new file mode 100644 index 0000000..e4d7e39 --- /dev/null +++ b/lib/python3.4/site-packages/cffi/api.py @@ -0,0 +1,875 @@ +import sys, types +from .lock import allocate_lock + +try: + callable +except NameError: + # Python 3.1 + from collections import Callable + callable = lambda x: isinstance(x, Callable) + +try: + basestring +except NameError: + # Python 3.x + basestring = str + + +class FFIError(Exception): + pass + +class CDefError(Exception): + def __str__(self): + try: + line = 'line %d: ' % (self.args[1].coord.line,) + except (AttributeError, TypeError, IndexError): + line = '' + return '%s%s' % (line, self.args[0]) + + +class FFI(object): + r''' + The main top-level class that you instantiate once, or once per module. + + Example usage: + + ffi = FFI() + ffi.cdef(""" + int printf(const char *, ...); + """) + + C = ffi.dlopen(None) # standard library + -or- + C = ffi.verify() # use a C compiler: verify the decl above is right + + C.printf("hello, %s!\n", ffi.new("char[]", "world")) + ''' + + def __init__(self, backend=None): + """Create an FFI instance. The 'backend' argument is used to + select a non-default backend, mostly for tests. + """ + from . import cparser, model + if backend is None: + # You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with + # _cffi_backend.so compiled. + import _cffi_backend as backend + from . import __version__ + assert backend.__version__ == __version__, \ + "version mismatch, %s != %s" % (backend.__version__, __version__) + # (If you insist you can also try to pass the option + # 'backend=backend_ctypes.CTypesBackend()', but don't + # rely on it! It's probably not going to work well.) + + self._backend = backend + self._lock = allocate_lock() + self._parser = cparser.Parser() + self._cached_btypes = {} + self._parsed_types = types.ModuleType('parsed_types').__dict__ + self._new_types = types.ModuleType('new_types').__dict__ + self._function_caches = [] + self._libraries = [] + self._cdefsources = [] + self._included_ffis = [] + self._windows_unicode = None + self._init_once_cache = {} + self._cdef_version = None + self._embedding = None + if hasattr(backend, 'set_ffi'): + backend.set_ffi(self) + for name in backend.__dict__: + if name.startswith('RTLD_'): + setattr(self, name, getattr(backend, name)) + # + with self._lock: + self.BVoidP = self._get_cached_btype(model.voidp_type) + self.BCharA = self._get_cached_btype(model.char_array_type) + if isinstance(backend, types.ModuleType): + # _cffi_backend: attach these constants to the class + if not hasattr(FFI, 'NULL'): + FFI.NULL = self.cast(self.BVoidP, 0) + FFI.CData, FFI.CType = backend._get_types() + else: + # ctypes backend: attach these constants to the instance + self.NULL = self.cast(self.BVoidP, 0) + self.CData, self.CType = backend._get_types() + + def cdef(self, csource, override=False, packed=False): + """Parse the given C source. This registers all declared functions, + types, and global variables. The functions and global variables can + then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'. + The types can be used in 'ffi.new()' and other functions. + If 'packed' is specified as True, all structs declared inside this + cdef are packed, i.e. laid out without any field alignment at all. + """ + self._cdef(csource, override=override, packed=packed) + + def embedding_api(self, csource, packed=False): + self._cdef(csource, packed=packed, dllexport=True) + if self._embedding is None: + self._embedding = '' + + def _cdef(self, csource, override=False, **options): + if not isinstance(csource, str): # unicode, on Python 2 + if not isinstance(csource, basestring): + raise TypeError("cdef() argument must be a string") + csource = csource.encode('ascii') + with self._lock: + self._cdef_version = object() + self._parser.parse(csource, override=override, **options) + self._cdefsources.append(csource) + if override: + for cache in self._function_caches: + cache.clear() + finishlist = self._parser._recomplete + if finishlist: + self._parser._recomplete = [] + for tp in finishlist: + tp.finish_backend_type(self, finishlist) + + def dlopen(self, name, flags=0): + """Load and return a dynamic library identified by 'name'. + The standard C library can be loaded by passing None. + Note that functions and types declared by 'ffi.cdef()' are not + linked to a particular library, just like C headers; in the + library we only look for the actual (untyped) symbols. + """ + assert isinstance(name, basestring) or name is None + with self._lock: + lib, function_cache = _make_ffi_library(self, name, flags) + self._function_caches.append(function_cache) + self._libraries.append(lib) + return lib + + def _typeof_locked(self, cdecl): + # call me with the lock! + key = cdecl + if key in self._parsed_types: + return self._parsed_types[key] + # + if not isinstance(cdecl, str): # unicode, on Python 2 + cdecl = cdecl.encode('ascii') + # + type = self._parser.parse_type(cdecl) + really_a_function_type = type.is_raw_function + if really_a_function_type: + type = type.as_function_pointer() + btype = self._get_cached_btype(type) + result = btype, really_a_function_type + self._parsed_types[key] = result + return result + + def _typeof(self, cdecl, consider_function_as_funcptr=False): + # string -> ctype object + try: + result = self._parsed_types[cdecl] + except KeyError: + with self._lock: + result = self._typeof_locked(cdecl) + # + btype, really_a_function_type = result + if really_a_function_type and not consider_function_as_funcptr: + raise CDefError("the type %r is a function type, not a " + "pointer-to-function type" % (cdecl,)) + return btype + + def typeof(self, cdecl): + """Parse the C type given as a string and return the + corresponding object. + It can also be used on 'cdata' instance to get its C type. + """ + if isinstance(cdecl, basestring): + return self._typeof(cdecl) + if isinstance(cdecl, self.CData): + return self._backend.typeof(cdecl) + if isinstance(cdecl, types.BuiltinFunctionType): + res = _builtin_function_type(cdecl) + if res is not None: + return res + if (isinstance(cdecl, types.FunctionType) + and hasattr(cdecl, '_cffi_base_type')): + with self._lock: + return self._get_cached_btype(cdecl._cffi_base_type) + raise TypeError(type(cdecl)) + + def sizeof(self, cdecl): + """Return the size in bytes of the argument. It can be a + string naming a C type, or a 'cdata' instance. + """ + if isinstance(cdecl, basestring): + BType = self._typeof(cdecl) + return self._backend.sizeof(BType) + else: + return self._backend.sizeof(cdecl) + + def alignof(self, cdecl): + """Return the natural alignment size in bytes of the C type + given as a string. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.alignof(cdecl) + + def offsetof(self, cdecl, *fields_or_indexes): + """Return the offset of the named field inside the given + structure or array, which must be given as a C type name. + You can give several field names in case of nested structures. + You can also give numeric values which correspond to array + items, in case of an array type. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._typeoffsetof(cdecl, *fields_or_indexes)[1] + + def new(self, cdecl, init=None): + """Allocate an instance according to the specified C type and + return a pointer to it. The specified C type must be either a + pointer or an array: ``new('X *')`` allocates an X and returns + a pointer to it, whereas ``new('X[n]')`` allocates an array of + n X'es and returns an array referencing it (which works + mostly like a pointer, like in C). You can also use + ``new('X[]', n)`` to allocate an array of a non-constant + length n. + + The memory is initialized following the rules of declaring a + global variable in C: by default it is zero-initialized, but + an explicit initializer can be given which can be used to + fill all or part of the memory. + + When the returned object goes out of scope, the memory + is freed. In other words the returned object has + ownership of the value of type 'cdecl' that it points to. This + means that the raw data can be used as long as this object is + kept alive, but must not be used for a longer time. Be careful + about that when copying the pointer to the memory somewhere + else, e.g. into another structure. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.newp(cdecl, init) + + def new_allocator(self, alloc=None, free=None, + should_clear_after_alloc=True): + """Return a new allocator, i.e. a function that behaves like ffi.new() + but uses the provided low-level 'alloc' and 'free' functions. + + 'alloc' is called with the size as argument. If it returns NULL, a + MemoryError is raised. 'free' is called with the result of 'alloc' + as argument. Both can be either Python function or directly C + functions. If 'free' is None, then no free function is called. + If both 'alloc' and 'free' are None, the default is used. + + If 'should_clear_after_alloc' is set to False, then the memory + returned by 'alloc' is assumed to be already cleared (or you are + fine with garbage); otherwise CFFI will clear it. + """ + compiled_ffi = self._backend.FFI() + allocator = compiled_ffi.new_allocator(alloc, free, + should_clear_after_alloc) + def allocate(cdecl, init=None): + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return allocator(cdecl, init) + return allocate + + def cast(self, cdecl, source): + """Similar to a C cast: returns an instance of the named C + type initialized with the given 'source'. The source is + casted between integers or pointers of any type. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.cast(cdecl, source) + + def string(self, cdata, maxlen=-1): + """Return a Python string (or unicode string) from the 'cdata'. + If 'cdata' is a pointer or array of characters or bytes, returns + the null-terminated string. The returned string extends until + the first null character, or at most 'maxlen' characters. If + 'cdata' is an array then 'maxlen' defaults to its length. + + If 'cdata' is a pointer or array of wchar_t, returns a unicode + string following the same rules. + + If 'cdata' is a single character or byte or a wchar_t, returns + it as a string or unicode string. + + If 'cdata' is an enum, returns the value of the enumerator as a + string, or 'NUMBER' if the value is out of range. + """ + return self._backend.string(cdata, maxlen) + + def unpack(self, cdata, length): + """Unpack an array of C data of the given length, + returning a Python string/unicode/list. + + If 'cdata' is a pointer to 'char', returns a byte string. + It does not stop at the first null. This is equivalent to: + ffi.buffer(cdata, length)[:] + + If 'cdata' is a pointer to 'wchar_t', returns a unicode string. + 'length' is measured in wchar_t's; it is not the size in bytes. + + If 'cdata' is a pointer to anything else, returns a list of + 'length' items. This is a faster equivalent to: + [cdata[i] for i in range(length)] + """ + return self._backend.unpack(cdata, length) + + def buffer(self, cdata, size=-1): + """Return a read-write buffer object that references the raw C data + pointed to by the given 'cdata'. The 'cdata' must be a pointer or + an array. Can be passed to functions expecting a buffer, or directly + manipulated with: + + buf[:] get a copy of it in a regular string, or + buf[idx] as a single character + buf[:] = ... + buf[idx] = ... change the content + """ + return self._backend.buffer(cdata, size) + + def from_buffer(self, python_buffer): + """Return a that points to the data of the + given Python object, which must support the buffer interface. + Note that this is not meant to be used on the built-in types + str or unicode (you can build 'char[]' arrays explicitly) + but only on objects containing large quantities of raw data + in some other format, like 'array.array' or numpy arrays. + """ + return self._backend.from_buffer(self.BCharA, python_buffer) + + def memmove(self, dest, src, n): + """ffi.memmove(dest, src, n) copies n bytes of memory from src to dest. + + Like the C function memmove(), the memory areas may overlap; + apart from that it behaves like the C function memcpy(). + + 'src' can be any cdata ptr or array, or any Python buffer object. + 'dest' can be any cdata ptr or array, or a writable Python buffer + object. The size to copy, 'n', is always measured in bytes. + + Unlike other methods, this one supports all Python buffer including + byte strings and bytearrays---but it still does not support + non-contiguous buffers. + """ + return self._backend.memmove(dest, src, n) + + def callback(self, cdecl, python_callable=None, error=None, onerror=None): + """Return a callback object or a decorator making such a + callback object. 'cdecl' must name a C function pointer type. + The callback invokes the specified 'python_callable' (which may + be provided either directly or via a decorator). Important: the + callback object must be manually kept alive for as long as the + callback may be invoked from the C level. + """ + def callback_decorator_wrap(python_callable): + if not callable(python_callable): + raise TypeError("the 'python_callable' argument " + "is not callable") + return self._backend.callback(cdecl, python_callable, + error, onerror) + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl, consider_function_as_funcptr=True) + if python_callable is None: + return callback_decorator_wrap # decorator mode + else: + return callback_decorator_wrap(python_callable) # direct mode + + def getctype(self, cdecl, replace_with=''): + """Return a string giving the C type 'cdecl', which may be itself + a string or a object. If 'replace_with' is given, it gives + extra text to append (or insert for more complicated C types), like + a variable name, or '*' to get actually the C type 'pointer-to-cdecl'. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + replace_with = replace_with.strip() + if (replace_with.startswith('*') + and '&[' in self._backend.getcname(cdecl, '&')): + replace_with = '(%s)' % replace_with + elif replace_with and not replace_with[0] in '[(': + replace_with = ' ' + replace_with + return self._backend.getcname(cdecl, replace_with) + + def gc(self, cdata, destructor): + """Return a new cdata object that points to the same + data. Later, when this new cdata object is garbage-collected, + 'destructor(old_cdata_object)' will be called. + """ + return self._backend.gcp(cdata, destructor) + + def _get_cached_btype(self, type): + assert self._lock.acquire(False) is False + # call me with the lock! + try: + BType = self._cached_btypes[type] + except KeyError: + finishlist = [] + BType = type.get_cached_btype(self, finishlist) + for type in finishlist: + type.finish_backend_type(self, finishlist) + return BType + + def verify(self, source='', tmpdir=None, **kwargs): + """Verify that the current ffi signatures compile on this + machine, and return a dynamic library object. The dynamic + library can be used to call functions and access global + variables declared in this 'ffi'. The library is compiled + by the C compiler: it gives you C-level API compatibility + (including calling macros). This is unlike 'ffi.dlopen()', + which requires binary compatibility in the signatures. + """ + from .verifier import Verifier, _caller_dir_pycache + # + # If set_unicode(True) was called, insert the UNICODE and + # _UNICODE macro declarations + if self._windows_unicode: + self._apply_windows_unicode(kwargs) + # + # Set the tmpdir here, and not in Verifier.__init__: it picks + # up the caller's directory, which we want to be the caller of + # ffi.verify(), as opposed to the caller of Veritier(). + tmpdir = tmpdir or _caller_dir_pycache() + # + # Make a Verifier() and use it to load the library. + self.verifier = Verifier(self, source, tmpdir, **kwargs) + lib = self.verifier.load_library() + # + # Save the loaded library for keep-alive purposes, even + # if the caller doesn't keep it alive itself (it should). + self._libraries.append(lib) + return lib + + def _get_errno(self): + return self._backend.get_errno() + def _set_errno(self, errno): + self._backend.set_errno(errno) + errno = property(_get_errno, _set_errno, None, + "the value of 'errno' from/to the C calls") + + def getwinerror(self, code=-1): + return self._backend.getwinerror(code) + + def _pointer_to(self, ctype): + from . import model + with self._lock: + return model.pointer_cache(self, ctype) + + def addressof(self, cdata, *fields_or_indexes): + """Return the address of a . + If 'fields_or_indexes' are given, returns the address of that + field or array item in the structure or array, recursively in + case of nested structures. + """ + ctype = self._backend.typeof(cdata) + if fields_or_indexes: + ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes) + else: + if ctype.kind == "pointer": + raise TypeError("addressof(pointer)") + offset = 0 + ctypeptr = self._pointer_to(ctype) + return self._backend.rawaddressof(ctypeptr, cdata, offset) + + def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes): + ctype, offset = self._backend.typeoffsetof(ctype, field_or_index) + for field1 in fields_or_indexes: + ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1) + offset += offset1 + return ctype, offset + + def include(self, ffi_to_include): + """Includes the typedefs, structs, unions and enums defined + in another FFI instance. Usage is similar to a #include in C, + where a part of the program might include types defined in + another part for its own usage. Note that the include() + method has no effect on functions, constants and global + variables, which must anyway be accessed directly from the + lib object returned by the original FFI instance. + """ + if not isinstance(ffi_to_include, FFI): + raise TypeError("ffi.include() expects an argument that is also of" + " type cffi.FFI, not %r" % ( + type(ffi_to_include).__name__,)) + if ffi_to_include is self: + raise ValueError("self.include(self)") + with ffi_to_include._lock: + with self._lock: + self._parser.include(ffi_to_include._parser) + self._cdefsources.append('[') + self._cdefsources.extend(ffi_to_include._cdefsources) + self._cdefsources.append(']') + self._included_ffis.append(ffi_to_include) + + def new_handle(self, x): + return self._backend.newp_handle(self.BVoidP, x) + + def from_handle(self, x): + return self._backend.from_handle(x) + + def set_unicode(self, enabled_flag): + """Windows: if 'enabled_flag' is True, enable the UNICODE and + _UNICODE defines in C, and declare the types like TCHAR and LPTCSTR + to be (pointers to) wchar_t. If 'enabled_flag' is False, + declare these types to be (pointers to) plain 8-bit characters. + This is mostly for backward compatibility; you usually want True. + """ + if self._windows_unicode is not None: + raise ValueError("set_unicode() can only be called once") + enabled_flag = bool(enabled_flag) + if enabled_flag: + self.cdef("typedef wchar_t TBYTE;" + "typedef wchar_t TCHAR;" + "typedef const wchar_t *LPCTSTR;" + "typedef const wchar_t *PCTSTR;" + "typedef wchar_t *LPTSTR;" + "typedef wchar_t *PTSTR;" + "typedef TBYTE *PTBYTE;" + "typedef TCHAR *PTCHAR;") + else: + self.cdef("typedef char TBYTE;" + "typedef char TCHAR;" + "typedef const char *LPCTSTR;" + "typedef const char *PCTSTR;" + "typedef char *LPTSTR;" + "typedef char *PTSTR;" + "typedef TBYTE *PTBYTE;" + "typedef TCHAR *PTCHAR;") + self._windows_unicode = enabled_flag + + def _apply_windows_unicode(self, kwds): + defmacros = kwds.get('define_macros', ()) + if not isinstance(defmacros, (list, tuple)): + raise TypeError("'define_macros' must be a list or tuple") + defmacros = list(defmacros) + [('UNICODE', '1'), + ('_UNICODE', '1')] + kwds['define_macros'] = defmacros + + def _apply_embedding_fix(self, kwds): + # must include an argument like "-lpython2.7" for the compiler + def ensure(key, value): + lst = kwds.setdefault(key, []) + if value not in lst: + lst.append(value) + # + if '__pypy__' in sys.builtin_module_names: + import os + if sys.platform == "win32": + # we need 'libpypy-c.lib'. Current distributions of + # pypy (>= 4.1) contain it as 'libs/python27.lib'. + pythonlib = "python27" + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'libs')) + else: + # we need 'libpypy-c.{so,dylib}', which should be by + # default located in 'sys.prefix/bin' for installed + # systems. + pythonlib = "pypy-c" + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) + # On uninstalled pypy's, the libpypy-c is typically found in + # .../pypy/goal/. + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal')) + else: + if sys.platform == "win32": + template = "python%d%d" + if hasattr(sys, 'gettotalrefcount'): + template += '_d' + else: + try: + import sysconfig + except ImportError: # 2.6 + from distutils import sysconfig + template = "python%d.%d" + if sysconfig.get_config_var('DEBUG_EXT'): + template += sysconfig.get_config_var('DEBUG_EXT') + pythonlib = (template % + (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) + if hasattr(sys, 'abiflags'): + pythonlib += sys.abiflags + ensure('libraries', pythonlib) + if sys.platform == "win32": + ensure('extra_link_args', '/MANIFEST') + + def set_source(self, module_name, source, source_extension='.c', **kwds): + if hasattr(self, '_assigned_source'): + raise ValueError("set_source() cannot be called several times " + "per ffi object") + if not isinstance(module_name, basestring): + raise TypeError("'module_name' must be a string") + self._assigned_source = (str(module_name), source, + source_extension, kwds) + + def distutils_extension(self, tmpdir='build', verbose=True): + from distutils.dir_util import mkpath + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + if hasattr(self, 'verifier'): # fallback, 'tmpdir' ignored + return self.verifier.get_extension() + raise ValueError("set_source() must be called before" + " distutils_extension()") + module_name, source, source_extension, kwds = self._assigned_source + if source is None: + raise TypeError("distutils_extension() is only for C extension " + "modules, not for dlopen()-style pure Python " + "modules") + mkpath(tmpdir) + ext, updated = recompile(self, module_name, + source, tmpdir=tmpdir, extradir=tmpdir, + source_extension=source_extension, + call_c_compiler=False, **kwds) + if verbose: + if updated: + sys.stderr.write("regenerated: %r\n" % (ext.sources[0],)) + else: + sys.stderr.write("not modified: %r\n" % (ext.sources[0],)) + return ext + + def emit_c_code(self, filename): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before emit_c_code()") + module_name, source, source_extension, kwds = self._assigned_source + if source is None: + raise TypeError("emit_c_code() is only for C extension modules, " + "not for dlopen()-style pure Python modules") + recompile(self, module_name, source, + c_file=filename, call_c_compiler=False, **kwds) + + def emit_python_code(self, filename): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before emit_c_code()") + module_name, source, source_extension, kwds = self._assigned_source + if source is not None: + raise TypeError("emit_python_code() is only for dlopen()-style " + "pure Python modules, not for C extension modules") + recompile(self, module_name, source, + c_file=filename, call_c_compiler=False, **kwds) + + def compile(self, tmpdir='.', verbose=0, target=None): + """The 'target' argument gives the final file name of the + compiled DLL. Use '*' to force distutils' choice, suitable for + regular CPython C API modules. Use a file name ending in '.*' + to ask for the system's default extension for dynamic libraries + (.so/.dll/.dylib). + + The default is '*' when building a non-embedded C API extension, + and (module_name + '.*') when building an embedded library. + """ + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before compile()") + module_name, source, source_extension, kwds = self._assigned_source + return recompile(self, module_name, source, tmpdir=tmpdir, + target=target, source_extension=source_extension, + compiler_verbose=verbose, **kwds) + + def init_once(self, func, tag): + # Read _init_once_cache[tag], which is either (False, lock) if + # we're calling the function now in some thread, or (True, result). + # Don't call setdefault() in most cases, to avoid allocating and + # immediately freeing a lock; but still use setdefaut() to avoid + # races. + try: + x = self._init_once_cache[tag] + except KeyError: + x = self._init_once_cache.setdefault(tag, (False, allocate_lock())) + # Common case: we got (True, result), so we return the result. + if x[0]: + return x[1] + # Else, it's a lock. Acquire it to serialize the following tests. + with x[1]: + # Read again from _init_once_cache the current status. + x = self._init_once_cache[tag] + if x[0]: + return x[1] + # Call the function and store the result back. + result = func() + self._init_once_cache[tag] = (True, result) + return result + + def embedding_init_code(self, pysource): + if self._embedding: + raise ValueError("embedding_init_code() can only be called once") + # fix 'pysource' before it gets dumped into the C file: + # - remove empty lines at the beginning, so it starts at "line 1" + # - dedent, if all non-empty lines are indented + # - check for SyntaxErrors + import re + match = re.match(r'\s*\n', pysource) + if match: + pysource = pysource[match.end():] + lines = pysource.splitlines() or [''] + prefix = re.match(r'\s*', lines[0]).group() + for i in range(1, len(lines)): + line = lines[i] + if line.rstrip(): + while not line.startswith(prefix): + prefix = prefix[:-1] + i = len(prefix) + lines = [line[i:]+'\n' for line in lines] + pysource = ''.join(lines) + # + compile(pysource, "cffi_init", "exec") + # + self._embedding = pysource + + def def_extern(self, *args, **kwds): + raise ValueError("ffi.def_extern() is only available on API-mode FFI " + "objects") + + def list_types(self): + """Returns the user type names known to this FFI instance. + This returns a tuple containing three lists of names: + (typedef_names, names_of_structs, names_of_unions) + """ + typedefs = [] + structs = [] + unions = [] + for key in self._parser._declarations: + if key.startswith('typedef '): + typedefs.append(key[8:]) + elif key.startswith('struct '): + structs.append(key[7:]) + elif key.startswith('union '): + unions.append(key[6:]) + typedefs.sort() + structs.sort() + unions.sort() + return (typedefs, structs, unions) + + +def _load_backend_lib(backend, name, flags): + if name is None: + if sys.platform != "win32": + return backend.load_library(None, flags) + name = "c" # Windows: load_library(None) fails, but this works + # (backward compatibility hack only) + try: + if '.' not in name and '/' not in name: + raise OSError("library not found: %r" % (name,)) + return backend.load_library(name, flags) + except OSError: + import ctypes.util + path = ctypes.util.find_library(name) + if path is None: + raise # propagate the original OSError + return backend.load_library(path, flags) + +def _make_ffi_library(ffi, libname, flags): + import os + backend = ffi._backend + backendlib = _load_backend_lib(backend, libname, flags) + # + def accessor_function(name): + key = 'function ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + try: + value = backendlib.load_function(BType, name) + except KeyError as e: + raise AttributeError('%s: %s' % (name, e)) + library.__dict__[name] = value + # + def accessor_variable(name): + key = 'variable ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + read_variable = backendlib.read_variable + write_variable = backendlib.write_variable + setattr(FFILibrary, name, property( + lambda self: read_variable(BType, name), + lambda self, value: write_variable(BType, name, value))) + # + def accessor_constant(name): + raise NotImplementedError("non-integer constant '%s' cannot be " + "accessed from a dlopen() library" % (name,)) + # + def accessor_int_constant(name): + library.__dict__[name] = ffi._parser._int_constants[name] + # + accessors = {} + accessors_version = [False] + # + def update_accessors(): + if accessors_version[0] is ffi._cdef_version: + return + # + from . import model + for key, (tp, _) in ffi._parser._declarations.items(): + if not isinstance(tp, model.EnumType): + tag, name = key.split(' ', 1) + if tag == 'function': + accessors[name] = accessor_function + elif tag == 'variable': + accessors[name] = accessor_variable + elif tag == 'constant': + accessors[name] = accessor_constant + else: + for i, enumname in enumerate(tp.enumerators): + def accessor_enum(name, tp=tp, i=i): + tp.check_not_partial() + library.__dict__[name] = tp.enumvalues[i] + accessors[enumname] = accessor_enum + for name in ffi._parser._int_constants: + accessors.setdefault(name, accessor_int_constant) + accessors_version[0] = ffi._cdef_version + # + def make_accessor(name): + with ffi._lock: + if name in library.__dict__ or name in FFILibrary.__dict__: + return # added by another thread while waiting for the lock + if name not in accessors: + update_accessors() + if name not in accessors: + raise AttributeError(name) + accessors[name](name) + # + class FFILibrary(object): + def __getattr__(self, name): + make_accessor(name) + return getattr(self, name) + def __setattr__(self, name, value): + try: + property = getattr(self.__class__, name) + except AttributeError: + make_accessor(name) + setattr(self, name, value) + else: + property.__set__(self, value) + def __dir__(self): + with ffi._lock: + update_accessors() + return accessors.keys() + # + if libname is not None: + try: + if not isinstance(libname, str): # unicode, on Python 2 + libname = libname.encode('utf-8') + FFILibrary.__name__ = 'FFILibrary_%s' % libname + except UnicodeError: + pass + library = FFILibrary() + return library, library.__dict__ + +def _builtin_function_type(func): + # a hack to make at least ffi.typeof(builtin_function) work, + # if the builtin function was obtained by 'vengine_cpy'. + import sys + try: + module = sys.modules[func.__module__] + ffi = module._cffi_original_ffi + types_of_builtin_funcs = module._cffi_types_of_builtin_funcs + tp = types_of_builtin_funcs[func] + except (KeyError, AttributeError, TypeError): + return None + else: + with ffi._lock: + return ffi._get_cached_btype(tp) diff --git a/lib/python3.4/site-packages/cffi/backend_ctypes.py b/lib/python3.4/site-packages/cffi/backend_ctypes.py new file mode 100644 index 0000000..b2579b3 --- /dev/null +++ b/lib/python3.4/site-packages/cffi/backend_ctypes.py @@ -0,0 +1,1097 @@ +import ctypes, ctypes.util, operator, sys +from . import model + +if sys.version_info < (3,): + bytechr = chr +else: + unicode = str + long = int + xrange = range + bytechr = lambda num: bytes([num]) + +class CTypesType(type): + pass + +class CTypesData(object): + __metaclass__ = CTypesType + __slots__ = ['__weakref__'] + __name__ = '' + + def __init__(self, *args): + raise TypeError("cannot instantiate %r" % (self.__class__,)) + + @classmethod + def _newp(cls, init): + raise TypeError("expected a pointer or array ctype, got '%s'" + % (cls._get_c_name(),)) + + @staticmethod + def _to_ctypes(value): + raise TypeError + + @classmethod + def _arg_to_ctypes(cls, *value): + try: + ctype = cls._ctype + except AttributeError: + raise TypeError("cannot create an instance of %r" % (cls,)) + if value: + res = cls._to_ctypes(*value) + if not isinstance(res, ctype): + res = cls._ctype(res) + else: + res = cls._ctype() + return res + + @classmethod + def _create_ctype_obj(cls, init): + if init is None: + return cls._arg_to_ctypes() + else: + return cls._arg_to_ctypes(init) + + @staticmethod + def _from_ctypes(ctypes_value): + raise TypeError + + @classmethod + def _get_c_name(cls, replace_with=''): + return cls._reftypename.replace(' &', replace_with) + + @classmethod + def _fix_class(cls): + cls.__name__ = 'CData<%s>' % (cls._get_c_name(),) + cls.__qualname__ = 'CData<%s>' % (cls._get_c_name(),) + cls.__module__ = 'ffi' + + def _get_own_repr(self): + raise NotImplementedError + + def _addr_repr(self, address): + if address == 0: + return 'NULL' + else: + if address < 0: + address += 1 << (8*ctypes.sizeof(ctypes.c_void_p)) + return '0x%x' % address + + def __repr__(self, c_name=None): + own = self._get_own_repr() + return '' % (c_name or self._get_c_name(), own) + + def _convert_to_address(self, BClass): + if BClass is None: + raise TypeError("cannot convert %r to an address" % ( + self._get_c_name(),)) + else: + raise TypeError("cannot convert %r to %r" % ( + self._get_c_name(), BClass._get_c_name())) + + @classmethod + def _get_size(cls): + return ctypes.sizeof(cls._ctype) + + def _get_size_of_instance(self): + return ctypes.sizeof(self._ctype) + + @classmethod + def _cast_from(cls, source): + raise TypeError("cannot cast to %r" % (cls._get_c_name(),)) + + def _cast_to_integer(self): + return self._convert_to_address(None) + + @classmethod + def _alignment(cls): + return ctypes.alignment(cls._ctype) + + def __iter__(self): + raise TypeError("cdata %r does not support iteration" % ( + self._get_c_name()),) + + def _make_cmp(name): + cmpfunc = getattr(operator, name) + def cmp(self, other): + if isinstance(other, CTypesData): + return cmpfunc(self._convert_to_address(None), + other._convert_to_address(None)) + else: + return NotImplemented + cmp.func_name = name + return cmp + + __eq__ = _make_cmp('__eq__') + __ne__ = _make_cmp('__ne__') + __lt__ = _make_cmp('__lt__') + __le__ = _make_cmp('__le__') + __gt__ = _make_cmp('__gt__') + __ge__ = _make_cmp('__ge__') + + def __hash__(self): + return hash(type(self)) ^ hash(self._convert_to_address(None)) + + def _to_string(self, maxlen): + raise TypeError("string(): %r" % (self,)) + + +class CTypesGenericPrimitive(CTypesData): + __slots__ = [] + + def __eq__(self, other): + return self is other + + def __ne__(self, other): + return self is not other + + def __hash__(self): + return object.__hash__(self) + + def _get_own_repr(self): + return repr(self._from_ctypes(self._value)) + + +class CTypesGenericArray(CTypesData): + __slots__ = [] + + @classmethod + def _newp(cls, init): + return cls(init) + + def __iter__(self): + for i in xrange(len(self)): + yield self[i] + + def _get_own_repr(self): + return self._addr_repr(ctypes.addressof(self._blob)) + + +class CTypesGenericPtr(CTypesData): + __slots__ = ['_address', '_as_ctype_ptr'] + _automatic_casts = False + kind = "pointer" + + @classmethod + def _newp(cls, init): + return cls(init) + + @classmethod + def _cast_from(cls, source): + if source is None: + address = 0 + elif isinstance(source, CTypesData): + address = source._cast_to_integer() + elif isinstance(source, (int, long)): + address = source + else: + raise TypeError("bad type for cast to %r: %r" % + (cls, type(source).__name__)) + return cls._new_pointer_at(address) + + @classmethod + def _new_pointer_at(cls, address): + self = cls.__new__(cls) + self._address = address + self._as_ctype_ptr = ctypes.cast(address, cls._ctype) + return self + + def _get_own_repr(self): + try: + return self._addr_repr(self._address) + except AttributeError: + return '???' + + def _cast_to_integer(self): + return self._address + + def __nonzero__(self): + return bool(self._address) + __bool__ = __nonzero__ + + @classmethod + def _to_ctypes(cls, value): + if not isinstance(value, CTypesData): + raise TypeError("unexpected %s object" % type(value).__name__) + address = value._convert_to_address(cls) + return ctypes.cast(address, cls._ctype) + + @classmethod + def _from_ctypes(cls, ctypes_ptr): + address = ctypes.cast(ctypes_ptr, ctypes.c_void_p).value or 0 + return cls._new_pointer_at(address) + + @classmethod + def _initialize(cls, ctypes_ptr, value): + if value: + ctypes_ptr.contents = cls._to_ctypes(value).contents + + def _convert_to_address(self, BClass): + if (BClass in (self.__class__, None) or BClass._automatic_casts + or self._automatic_casts): + return self._address + else: + return CTypesData._convert_to_address(self, BClass) + + +class CTypesBaseStructOrUnion(CTypesData): + __slots__ = ['_blob'] + + @classmethod + def _create_ctype_obj(cls, init): + # may be overridden + raise TypeError("cannot instantiate opaque type %s" % (cls,)) + + def _get_own_repr(self): + return self._addr_repr(ctypes.addressof(self._blob)) + + @classmethod + def _offsetof(cls, fieldname): + return getattr(cls._ctype, fieldname).offset + + def _convert_to_address(self, BClass): + if getattr(BClass, '_BItem', None) is self.__class__: + return ctypes.addressof(self._blob) + else: + return CTypesData._convert_to_address(self, BClass) + + @classmethod + def _from_ctypes(cls, ctypes_struct_or_union): + self = cls.__new__(cls) + self._blob = ctypes_struct_or_union + return self + + @classmethod + def _to_ctypes(cls, value): + return value._blob + + def __repr__(self, c_name=None): + return CTypesData.__repr__(self, c_name or self._get_c_name(' &')) + + +class CTypesBackend(object): + + PRIMITIVE_TYPES = { + 'char': ctypes.c_char, + 'short': ctypes.c_short, + 'int': ctypes.c_int, + 'long': ctypes.c_long, + 'long long': ctypes.c_longlong, + 'signed char': ctypes.c_byte, + 'unsigned char': ctypes.c_ubyte, + 'unsigned short': ctypes.c_ushort, + 'unsigned int': ctypes.c_uint, + 'unsigned long': ctypes.c_ulong, + 'unsigned long long': ctypes.c_ulonglong, + 'float': ctypes.c_float, + 'double': ctypes.c_double, + '_Bool': ctypes.c_bool, + } + + for _name in ['unsigned long long', 'unsigned long', + 'unsigned int', 'unsigned short', 'unsigned char']: + _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) + PRIMITIVE_TYPES['uint%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_void_p): + PRIMITIVE_TYPES['uintptr_t'] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_size_t): + PRIMITIVE_TYPES['size_t'] = PRIMITIVE_TYPES[_name] + + for _name in ['long long', 'long', 'int', 'short', 'signed char']: + _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) + PRIMITIVE_TYPES['int%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_void_p): + PRIMITIVE_TYPES['intptr_t'] = PRIMITIVE_TYPES[_name] + PRIMITIVE_TYPES['ptrdiff_t'] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_size_t): + PRIMITIVE_TYPES['ssize_t'] = PRIMITIVE_TYPES[_name] + + + def __init__(self): + self.RTLD_LAZY = 0 # not supported anyway by ctypes + self.RTLD_NOW = 0 + self.RTLD_GLOBAL = ctypes.RTLD_GLOBAL + self.RTLD_LOCAL = ctypes.RTLD_LOCAL + + def set_ffi(self, ffi): + self.ffi = ffi + + def _get_types(self): + return CTypesData, CTypesType + + def load_library(self, path, flags=0): + cdll = ctypes.CDLL(path, flags) + return CTypesLibrary(self, cdll) + + def new_void_type(self): + class CTypesVoid(CTypesData): + __slots__ = [] + _reftypename = 'void &' + @staticmethod + def _from_ctypes(novalue): + return None + @staticmethod + def _to_ctypes(novalue): + if novalue is not None: + raise TypeError("None expected, got %s object" % + (type(novalue).__name__,)) + return None + CTypesVoid._fix_class() + return CTypesVoid + + def new_primitive_type(self, name): + if name == 'wchar_t': + raise NotImplementedError(name) + ctype = self.PRIMITIVE_TYPES[name] + if name == 'char': + kind = 'char' + elif name in ('float', 'double'): + kind = 'float' + else: + if name in ('signed char', 'unsigned char'): + kind = 'byte' + elif name == '_Bool': + kind = 'bool' + else: + kind = 'int' + is_signed = (ctype(-1).value == -1) + # + def _cast_source_to_int(source): + if isinstance(source, (int, long, float)): + source = int(source) + elif isinstance(source, CTypesData): + source = source._cast_to_integer() + elif isinstance(source, bytes): + source = ord(source) + elif source is None: + source = 0 + else: + raise TypeError("bad type for cast to %r: %r" % + (CTypesPrimitive, type(source).__name__)) + return source + # + kind1 = kind + class CTypesPrimitive(CTypesGenericPrimitive): + __slots__ = ['_value'] + _ctype = ctype + _reftypename = '%s &' % name + kind = kind1 + + def __init__(self, value): + self._value = value + + @staticmethod + def _create_ctype_obj(init): + if init is None: + return ctype() + return ctype(CTypesPrimitive._to_ctypes(init)) + + if kind == 'int' or kind == 'byte': + @classmethod + def _cast_from(cls, source): + source = _cast_source_to_int(source) + source = ctype(source).value # cast within range + return cls(source) + def __int__(self): + return self._value + + if kind == 'bool': + @classmethod + def _cast_from(cls, source): + if not isinstance(source, (int, long, float)): + source = _cast_source_to_int(source) + return cls(bool(source)) + def __int__(self): + return self._value + + if kind == 'char': + @classmethod + def _cast_from(cls, source): + source = _cast_source_to_int(source) + source = bytechr(source & 0xFF) + return cls(source) + def __int__(self): + return ord(self._value) + + if kind == 'float': + @classmethod + def _cast_from(cls, source): + if isinstance(source, float): + pass + elif isinstance(source, CTypesGenericPrimitive): + if hasattr(source, '__float__'): + source = float(source) + else: + source = int(source) + else: + source = _cast_source_to_int(source) + source = ctype(source).value # fix precision + return cls(source) + def __int__(self): + return int(self._value) + def __float__(self): + return self._value + + _cast_to_integer = __int__ + + if kind == 'int' or kind == 'byte' or kind == 'bool': + @staticmethod + def _to_ctypes(x): + if not isinstance(x, (int, long)): + if isinstance(x, CTypesData): + x = int(x) + else: + raise TypeError("integer expected, got %s" % + type(x).__name__) + if ctype(x).value != x: + if not is_signed and x < 0: + raise OverflowError("%s: negative integer" % name) + else: + raise OverflowError("%s: integer out of bounds" + % name) + return x + + if kind == 'char': + @staticmethod + def _to_ctypes(x): + if isinstance(x, bytes) and len(x) == 1: + return x + if isinstance(x, CTypesPrimitive): # > + return x._value + raise TypeError("character expected, got %s" % + type(x).__name__) + def __nonzero__(self): + return ord(self._value) != 0 + else: + def __nonzero__(self): + return self._value != 0 + __bool__ = __nonzero__ + + if kind == 'float': + @staticmethod + def _to_ctypes(x): + if not isinstance(x, (int, long, float, CTypesData)): + raise TypeError("float expected, got %s" % + type(x).__name__) + return ctype(x).value + + @staticmethod + def _from_ctypes(value): + return getattr(value, 'value', value) + + @staticmethod + def _initialize(blob, init): + blob.value = CTypesPrimitive._to_ctypes(init) + + if kind == 'char': + def _to_string(self, maxlen): + return self._value + if kind == 'byte': + def _to_string(self, maxlen): + return chr(self._value & 0xff) + # + CTypesPrimitive._fix_class() + return CTypesPrimitive + + def new_pointer_type(self, BItem): + getbtype = self.ffi._get_cached_btype + if BItem is getbtype(model.PrimitiveType('char')): + kind = 'charp' + elif BItem in (getbtype(model.PrimitiveType('signed char')), + getbtype(model.PrimitiveType('unsigned char'))): + kind = 'bytep' + elif BItem is getbtype(model.void_type): + kind = 'voidp' + else: + kind = 'generic' + # + class CTypesPtr(CTypesGenericPtr): + __slots__ = ['_own'] + if kind == 'charp': + __slots__ += ['__as_strbuf'] + _BItem = BItem + if hasattr(BItem, '_ctype'): + _ctype = ctypes.POINTER(BItem._ctype) + _bitem_size = ctypes.sizeof(BItem._ctype) + else: + _ctype = ctypes.c_void_p + if issubclass(BItem, CTypesGenericArray): + _reftypename = BItem._get_c_name('(* &)') + else: + _reftypename = BItem._get_c_name(' * &') + + def __init__(self, init): + ctypeobj = BItem._create_ctype_obj(init) + if kind == 'charp': + self.__as_strbuf = ctypes.create_string_buffer( + ctypeobj.value + b'\x00') + self._as_ctype_ptr = ctypes.cast( + self.__as_strbuf, self._ctype) + else: + self._as_ctype_ptr = ctypes.pointer(ctypeobj) + self._address = ctypes.cast(self._as_ctype_ptr, + ctypes.c_void_p).value + self._own = True + + def __add__(self, other): + if isinstance(other, (int, long)): + return self._new_pointer_at(self._address + + other * self._bitem_size) + else: + return NotImplemented + + def __sub__(self, other): + if isinstance(other, (int, long)): + return self._new_pointer_at(self._address - + other * self._bitem_size) + elif type(self) is type(other): + return (self._address - other._address) // self._bitem_size + else: + return NotImplemented + + def __getitem__(self, index): + if getattr(self, '_own', False) and index != 0: + raise IndexError + return BItem._from_ctypes(self._as_ctype_ptr[index]) + + def __setitem__(self, index, value): + self._as_ctype_ptr[index] = BItem._to_ctypes(value) + + if kind == 'charp' or kind == 'voidp': + @classmethod + def _arg_to_ctypes(cls, *value): + if value and isinstance(value[0], bytes): + return ctypes.c_char_p(value[0]) + else: + return super(CTypesPtr, cls)._arg_to_ctypes(*value) + + if kind == 'charp' or kind == 'bytep': + def _to_string(self, maxlen): + if maxlen < 0: + maxlen = sys.maxsize + p = ctypes.cast(self._as_ctype_ptr, + ctypes.POINTER(ctypes.c_char)) + n = 0 + while n < maxlen and p[n] != b'\x00': + n += 1 + return b''.join([p[i] for i in range(n)]) + + def _get_own_repr(self): + if getattr(self, '_own', False): + return 'owning %d bytes' % ( + ctypes.sizeof(self._as_ctype_ptr.contents),) + return super(CTypesPtr, self)._get_own_repr() + # + if (BItem is self.ffi._get_cached_btype(model.void_type) or + BItem is self.ffi._get_cached_btype(model.PrimitiveType('char'))): + CTypesPtr._automatic_casts = True + # + CTypesPtr._fix_class() + return CTypesPtr + + def new_array_type(self, CTypesPtr, length): + if length is None: + brackets = ' &[]' + else: + brackets = ' &[%d]' % length + BItem = CTypesPtr._BItem + getbtype = self.ffi._get_cached_btype + if BItem is getbtype(model.PrimitiveType('char')): + kind = 'char' + elif BItem in (getbtype(model.PrimitiveType('signed char')), + getbtype(model.PrimitiveType('unsigned char'))): + kind = 'byte' + else: + kind = 'generic' + # + class CTypesArray(CTypesGenericArray): + __slots__ = ['_blob', '_own'] + if length is not None: + _ctype = BItem._ctype * length + else: + __slots__.append('_ctype') + _reftypename = BItem._get_c_name(brackets) + _declared_length = length + _CTPtr = CTypesPtr + + def __init__(self, init): + if length is None: + if isinstance(init, (int, long)): + len1 = init + init = None + elif kind == 'char' and isinstance(init, bytes): + len1 = len(init) + 1 # extra null + else: + init = tuple(init) + len1 = len(init) + self._ctype = BItem._ctype * len1 + self._blob = self._ctype() + self._own = True + if init is not None: + self._initialize(self._blob, init) + + @staticmethod + def _initialize(blob, init): + if isinstance(init, bytes): + init = [init[i:i+1] for i in range(len(init))] + else: + init = tuple(init) + if len(init) > len(blob): + raise IndexError("too many initializers") + addr = ctypes.cast(blob, ctypes.c_void_p).value + PTR = ctypes.POINTER(BItem._ctype) + itemsize = ctypes.sizeof(BItem._ctype) + for i, value in enumerate(init): + p = ctypes.cast(addr + i * itemsize, PTR) + BItem._initialize(p.contents, value) + + def __len__(self): + return len(self._blob) + + def __getitem__(self, index): + if not (0 <= index < len(self._blob)): + raise IndexError + return BItem._from_ctypes(self._blob[index]) + + def __setitem__(self, index, value): + if not (0 <= index < len(self._blob)): + raise IndexError + self._blob[index] = BItem._to_ctypes(value) + + if kind == 'char' or kind == 'byte': + def _to_string(self, maxlen): + if maxlen < 0: + maxlen = len(self._blob) + p = ctypes.cast(self._blob, + ctypes.POINTER(ctypes.c_char)) + n = 0 + while n < maxlen and p[n] != b'\x00': + n += 1 + return b''.join([p[i] for i in range(n)]) + + def _get_own_repr(self): + if getattr(self, '_own', False): + return 'owning %d bytes' % (ctypes.sizeof(self._blob),) + return super(CTypesArray, self)._get_own_repr() + + def _convert_to_address(self, BClass): + if BClass in (CTypesPtr, None) or BClass._automatic_casts: + return ctypes.addressof(self._blob) + else: + return CTypesData._convert_to_address(self, BClass) + + @staticmethod + def _from_ctypes(ctypes_array): + self = CTypesArray.__new__(CTypesArray) + self._blob = ctypes_array + return self + + @staticmethod + def _arg_to_ctypes(value): + return CTypesPtr._arg_to_ctypes(value) + + def __add__(self, other): + if isinstance(other, (int, long)): + return CTypesPtr._new_pointer_at( + ctypes.addressof(self._blob) + + other * ctypes.sizeof(BItem._ctype)) + else: + return NotImplemented + + @classmethod + def _cast_from(cls, source): + raise NotImplementedError("casting to %r" % ( + cls._get_c_name(),)) + # + CTypesArray._fix_class() + return CTypesArray + + def _new_struct_or_union(self, kind, name, base_ctypes_class): + # + class struct_or_union(base_ctypes_class): + pass + struct_or_union.__name__ = '%s_%s' % (kind, name) + kind1 = kind + # + class CTypesStructOrUnion(CTypesBaseStructOrUnion): + __slots__ = ['_blob'] + _ctype = struct_or_union + _reftypename = '%s &' % (name,) + _kind = kind = kind1 + # + CTypesStructOrUnion._fix_class() + return CTypesStructOrUnion + + def new_struct_type(self, name): + return self._new_struct_or_union('struct', name, ctypes.Structure) + + def new_union_type(self, name): + return self._new_struct_or_union('union', name, ctypes.Union) + + def complete_struct_or_union(self, CTypesStructOrUnion, fields, tp, + totalsize=-1, totalalignment=-1, sflags=0): + if totalsize >= 0 or totalalignment >= 0: + raise NotImplementedError("the ctypes backend of CFFI does not support " + "structures completed by verify(); please " + "compile and install the _cffi_backend module.") + struct_or_union = CTypesStructOrUnion._ctype + fnames = [fname for (fname, BField, bitsize) in fields] + btypes = [BField for (fname, BField, bitsize) in fields] + bitfields = [bitsize for (fname, BField, bitsize) in fields] + # + bfield_types = {} + cfields = [] + for (fname, BField, bitsize) in fields: + if bitsize < 0: + cfields.append((fname, BField._ctype)) + bfield_types[fname] = BField + else: + cfields.append((fname, BField._ctype, bitsize)) + bfield_types[fname] = Ellipsis + if sflags & 8: + struct_or_union._pack_ = 1 + struct_or_union._fields_ = cfields + CTypesStructOrUnion._bfield_types = bfield_types + # + @staticmethod + def _create_ctype_obj(init): + result = struct_or_union() + if init is not None: + initialize(result, init) + return result + CTypesStructOrUnion._create_ctype_obj = _create_ctype_obj + # + def initialize(blob, init): + if is_union: + if len(init) > 1: + raise ValueError("union initializer: %d items given, but " + "only one supported (use a dict if needed)" + % (len(init),)) + if not isinstance(init, dict): + if isinstance(init, (bytes, unicode)): + raise TypeError("union initializer: got a str") + init = tuple(init) + if len(init) > len(fnames): + raise ValueError("too many values for %s initializer" % + CTypesStructOrUnion._get_c_name()) + init = dict(zip(fnames, init)) + addr = ctypes.addressof(blob) + for fname, value in init.items(): + BField, bitsize = name2fieldtype[fname] + assert bitsize < 0, \ + "not implemented: initializer with bit fields" + offset = CTypesStructOrUnion._offsetof(fname) + PTR = ctypes.POINTER(BField._ctype) + p = ctypes.cast(addr + offset, PTR) + BField._initialize(p.contents, value) + is_union = CTypesStructOrUnion._kind == 'union' + name2fieldtype = dict(zip(fnames, zip(btypes, bitfields))) + # + for fname, BField, bitsize in fields: + if fname == '': + raise NotImplementedError("nested anonymous structs/unions") + if hasattr(CTypesStructOrUnion, fname): + raise ValueError("the field name %r conflicts in " + "the ctypes backend" % fname) + if bitsize < 0: + def getter(self, fname=fname, BField=BField, + offset=CTypesStructOrUnion._offsetof(fname), + PTR=ctypes.POINTER(BField._ctype)): + addr = ctypes.addressof(self._blob) + p = ctypes.cast(addr + offset, PTR) + return BField._from_ctypes(p.contents) + def setter(self, value, fname=fname, BField=BField): + setattr(self._blob, fname, BField._to_ctypes(value)) + # + if issubclass(BField, CTypesGenericArray): + setter = None + if BField._declared_length == 0: + def getter(self, fname=fname, BFieldPtr=BField._CTPtr, + offset=CTypesStructOrUnion._offsetof(fname), + PTR=ctypes.POINTER(BField._ctype)): + addr = ctypes.addressof(self._blob) + p = ctypes.cast(addr + offset, PTR) + return BFieldPtr._from_ctypes(p) + # + else: + def getter(self, fname=fname, BField=BField): + return BField._from_ctypes(getattr(self._blob, fname)) + def setter(self, value, fname=fname, BField=BField): + # xxx obscure workaround + value = BField._to_ctypes(value) + oldvalue = getattr(self._blob, fname) + setattr(self._blob, fname, value) + if value != getattr(self._blob, fname): + setattr(self._blob, fname, oldvalue) + raise OverflowError("value too large for bitfield") + setattr(CTypesStructOrUnion, fname, property(getter, setter)) + # + CTypesPtr = self.ffi._get_cached_btype(model.PointerType(tp)) + for fname in fnames: + if hasattr(CTypesPtr, fname): + raise ValueError("the field name %r conflicts in " + "the ctypes backend" % fname) + def getter(self, fname=fname): + return getattr(self[0], fname) + def setter(self, value, fname=fname): + setattr(self[0], fname, value) + setattr(CTypesPtr, fname, property(getter, setter)) + + def new_function_type(self, BArgs, BResult, has_varargs): + nameargs = [BArg._get_c_name() for BArg in BArgs] + if has_varargs: + nameargs.append('...') + nameargs = ', '.join(nameargs) + # + class CTypesFunctionPtr(CTypesGenericPtr): + __slots__ = ['_own_callback', '_name'] + _ctype = ctypes.CFUNCTYPE(getattr(BResult, '_ctype', None), + *[BArg._ctype for BArg in BArgs], + use_errno=True) + _reftypename = BResult._get_c_name('(* &)(%s)' % (nameargs,)) + + def __init__(self, init, error=None): + # create a callback to the Python callable init() + import traceback + assert not has_varargs, "varargs not supported for callbacks" + if getattr(BResult, '_ctype', None) is not None: + error = BResult._from_ctypes( + BResult._create_ctype_obj(error)) + else: + error = None + def callback(*args): + args2 = [] + for arg, BArg in zip(args, BArgs): + args2.append(BArg._from_ctypes(arg)) + try: + res2 = init(*args2) + res2 = BResult._to_ctypes(res2) + except: + traceback.print_exc() + res2 = error + if issubclass(BResult, CTypesGenericPtr): + if res2: + res2 = ctypes.cast(res2, ctypes.c_void_p).value + # .value: http://bugs.python.org/issue1574593 + else: + res2 = None + #print repr(res2) + return res2 + if issubclass(BResult, CTypesGenericPtr): + # The only pointers callbacks can return are void*s: + # http://bugs.python.org/issue5710 + callback_ctype = ctypes.CFUNCTYPE( + ctypes.c_void_p, + *[BArg._ctype for BArg in BArgs], + use_errno=True) + else: + callback_ctype = CTypesFunctionPtr._ctype + self._as_ctype_ptr = callback_ctype(callback) + self._address = ctypes.cast(self._as_ctype_ptr, + ctypes.c_void_p).value + self._own_callback = init + + @staticmethod + def _initialize(ctypes_ptr, value): + if value: + raise NotImplementedError("ctypes backend: not supported: " + "initializers for function pointers") + + def __repr__(self): + c_name = getattr(self, '_name', None) + if c_name: + i = self._reftypename.index('(* &)') + if self._reftypename[i-1] not in ' )*': + c_name = ' ' + c_name + c_name = self._reftypename.replace('(* &)', c_name) + return CTypesData.__repr__(self, c_name) + + def _get_own_repr(self): + if getattr(self, '_own_callback', None) is not None: + return 'calling %r' % (self._own_callback,) + return super(CTypesFunctionPtr, self)._get_own_repr() + + def __call__(self, *args): + if has_varargs: + assert len(args) >= len(BArgs) + extraargs = args[len(BArgs):] + args = args[:len(BArgs)] + else: + assert len(args) == len(BArgs) + ctypes_args = [] + for arg, BArg in zip(args, BArgs): + ctypes_args.append(BArg._arg_to_ctypes(arg)) + if has_varargs: + for i, arg in enumerate(extraargs): + if arg is None: + ctypes_args.append(ctypes.c_void_p(0)) # NULL + continue + if not isinstance(arg, CTypesData): + raise TypeError( + "argument %d passed in the variadic part " + "needs to be a cdata object (got %s)" % + (1 + len(BArgs) + i, type(arg).__name__)) + ctypes_args.append(arg._arg_to_ctypes(arg)) + result = self._as_ctype_ptr(*ctypes_args) + return BResult._from_ctypes(result) + # + CTypesFunctionPtr._fix_class() + return CTypesFunctionPtr + + def new_enum_type(self, name, enumerators, enumvalues, CTypesInt): + assert isinstance(name, str) + reverse_mapping = dict(zip(reversed(enumvalues), + reversed(enumerators))) + # + class CTypesEnum(CTypesInt): + __slots__ = [] + _reftypename = '%s &' % name + + def _get_own_repr(self): + value = self._value + try: + return '%d: %s' % (value, reverse_mapping[value]) + except KeyError: + return str(value) + + def _to_string(self, maxlen): + value = self._value + try: + return reverse_mapping[value] + except KeyError: + return str(value) + # + CTypesEnum._fix_class() + return CTypesEnum + + def get_errno(self): + return ctypes.get_errno() + + def set_errno(self, value): + ctypes.set_errno(value) + + def string(self, b, maxlen=-1): + return b._to_string(maxlen) + + def buffer(self, bptr, size=-1): + raise NotImplementedError("buffer() with ctypes backend") + + def sizeof(self, cdata_or_BType): + if isinstance(cdata_or_BType, CTypesData): + return cdata_or_BType._get_size_of_instance() + else: + assert issubclass(cdata_or_BType, CTypesData) + return cdata_or_BType._get_size() + + def alignof(self, BType): + assert issubclass(BType, CTypesData) + return BType._alignment() + + def newp(self, BType, source): + if not issubclass(BType, CTypesData): + raise TypeError + return BType._newp(source) + + def cast(self, BType, source): + return BType._cast_from(source) + + def callback(self, BType, source, error, onerror): + assert onerror is None # XXX not implemented + return BType(source, error) + + def gcp(self, cdata, destructor): + BType = self.typeof(cdata) + + if destructor is None: + if not (hasattr(BType, '_gcp_type') and + BType._gcp_type is BType): + raise TypeError("Can remove destructor only on a object " + "previously returned by ffi.gc()") + cdata._destructor = None + return None + + try: + gcp_type = BType._gcp_type + except AttributeError: + class CTypesDataGcp(BType): + __slots__ = ['_orig', '_destructor'] + def __del__(self): + if self._destructor is not None: + self._destructor(self._orig) + gcp_type = BType._gcp_type = CTypesDataGcp + new_cdata = self.cast(gcp_type, cdata) + new_cdata._orig = cdata + new_cdata._destructor = destructor + return new_cdata + + typeof = type + + def getcname(self, BType, replace_with): + return BType._get_c_name(replace_with) + + def typeoffsetof(self, BType, fieldname, num=0): + if isinstance(fieldname, str): + if num == 0 and issubclass(BType, CTypesGenericPtr): + BType = BType._BItem + if not issubclass(BType, CTypesBaseStructOrUnion): + raise TypeError("expected a struct or union ctype") + BField = BType._bfield_types[fieldname] + if BField is Ellipsis: + raise TypeError("not supported for bitfields") + return (BField, BType._offsetof(fieldname)) + elif isinstance(fieldname, (int, long)): + if issubclass(BType, CTypesGenericArray): + BType = BType._CTPtr + if not issubclass(BType, CTypesGenericPtr): + raise TypeError("expected an array or ptr ctype") + BItem = BType._BItem + offset = BItem._get_size() * fieldname + if offset > sys.maxsize: + raise OverflowError + return (BItem, offset) + else: + raise TypeError(type(fieldname)) + + def rawaddressof(self, BTypePtr, cdata, offset=None): + if isinstance(cdata, CTypesBaseStructOrUnion): + ptr = ctypes.pointer(type(cdata)._to_ctypes(cdata)) + elif isinstance(cdata, CTypesGenericPtr): + if offset is None or not issubclass(type(cdata)._BItem, + CTypesBaseStructOrUnion): + raise TypeError("unexpected cdata type") + ptr = type(cdata)._to_ctypes(cdata) + elif isinstance(cdata, CTypesGenericArray): + ptr = type(cdata)._to_ctypes(cdata) + else: + raise TypeError("expected a ") + if offset: + ptr = ctypes.cast( + ctypes.c_void_p( + ctypes.cast(ptr, ctypes.c_void_p).value + offset), + type(ptr)) + return BTypePtr._from_ctypes(ptr) + + +class CTypesLibrary(object): + + def __init__(self, backend, cdll): + self.backend = backend + self.cdll = cdll + + def load_function(self, BType, name): + c_func = getattr(self.cdll, name) + funcobj = BType._from_ctypes(c_func) + funcobj._name = name + return funcobj + + def read_variable(self, BType, name): + try: + ctypes_obj = BType._ctype.in_dll(self.cdll, name) + except AttributeError as e: + raise NotImplementedError(e) + return BType._from_ctypes(ctypes_obj) + + def write_variable(self, BType, name, value): + new_ctypes_obj = BType._to_ctypes(value) + ctypes_obj = BType._ctype.in_dll(self.cdll, name) + ctypes.memmove(ctypes.addressof(ctypes_obj), + ctypes.addressof(new_ctypes_obj), + ctypes.sizeof(BType._ctype)) diff --git a/lib/python3.4/site-packages/cffi/cffi_opcode.py b/lib/python3.4/site-packages/cffi/cffi_opcode.py new file mode 100644 index 0000000..dc40030 --- /dev/null +++ b/lib/python3.4/site-packages/cffi/cffi_opcode.py @@ -0,0 +1,179 @@ + +class CffiOp(object): + def __init__(self, op, arg): + self.op = op + self.arg = arg + + def as_c_expr(self): + if self.op is None: + assert isinstance(self.arg, str) + return '(_cffi_opcode_t)(%s)' % (self.arg,) + classname = CLASS_NAME[self.op] + return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg) + + def as_python_bytes(self): + if self.op is None and self.arg.isdigit(): + value = int(self.arg) # non-negative: '-' not in self.arg + if value >= 2**31: + raise OverflowError("cannot emit %r: limited to 2**31-1" + % (self.arg,)) + return format_four_bytes(value) + if isinstance(self.arg, str): + from .ffiplatform import VerificationError + raise VerificationError("cannot emit to Python: %r" % (self.arg,)) + return format_four_bytes((self.arg << 8) | self.op) + + def __str__(self): + classname = CLASS_NAME.get(self.op, self.op) + return '(%s %s)' % (classname, self.arg) + +def format_four_bytes(num): + return '\\x%02X\\x%02X\\x%02X\\x%02X' % ( + (num >> 24) & 0xFF, + (num >> 16) & 0xFF, + (num >> 8) & 0xFF, + (num ) & 0xFF) + +OP_PRIMITIVE = 1 +OP_POINTER = 3 +OP_ARRAY = 5 +OP_OPEN_ARRAY = 7 +OP_STRUCT_UNION = 9 +OP_ENUM = 11 +OP_FUNCTION = 13 +OP_FUNCTION_END = 15 +OP_NOOP = 17 +OP_BITFIELD = 19 +OP_TYPENAME = 21 +OP_CPYTHON_BLTN_V = 23 # varargs +OP_CPYTHON_BLTN_N = 25 # noargs +OP_CPYTHON_BLTN_O = 27 # O (i.e. a single arg) +OP_CONSTANT = 29 +OP_CONSTANT_INT = 31 +OP_GLOBAL_VAR = 33 +OP_DLOPEN_FUNC = 35 +OP_DLOPEN_CONST = 37 +OP_GLOBAL_VAR_F = 39 +OP_EXTERN_PYTHON = 41 + +PRIM_VOID = 0 +PRIM_BOOL = 1 +PRIM_CHAR = 2 +PRIM_SCHAR = 3 +PRIM_UCHAR = 4 +PRIM_SHORT = 5 +PRIM_USHORT = 6 +PRIM_INT = 7 +PRIM_UINT = 8 +PRIM_LONG = 9 +PRIM_ULONG = 10 +PRIM_LONGLONG = 11 +PRIM_ULONGLONG = 12 +PRIM_FLOAT = 13 +PRIM_DOUBLE = 14 +PRIM_LONGDOUBLE = 15 + +PRIM_WCHAR = 16 +PRIM_INT8 = 17 +PRIM_UINT8 = 18 +PRIM_INT16 = 19 +PRIM_UINT16 = 20 +PRIM_INT32 = 21 +PRIM_UINT32 = 22 +PRIM_INT64 = 23 +PRIM_UINT64 = 24 +PRIM_INTPTR = 25 +PRIM_UINTPTR = 26 +PRIM_PTRDIFF = 27 +PRIM_SIZE = 28 +PRIM_SSIZE = 29 +PRIM_INT_LEAST8 = 30 +PRIM_UINT_LEAST8 = 31 +PRIM_INT_LEAST16 = 32 +PRIM_UINT_LEAST16 = 33 +PRIM_INT_LEAST32 = 34 +PRIM_UINT_LEAST32 = 35 +PRIM_INT_LEAST64 = 36 +PRIM_UINT_LEAST64 = 37 +PRIM_INT_FAST8 = 38 +PRIM_UINT_FAST8 = 39 +PRIM_INT_FAST16 = 40 +PRIM_UINT_FAST16 = 41 +PRIM_INT_FAST32 = 42 +PRIM_UINT_FAST32 = 43 +PRIM_INT_FAST64 = 44 +PRIM_UINT_FAST64 = 45 +PRIM_INTMAX = 46 +PRIM_UINTMAX = 47 + +_NUM_PRIM = 48 +_UNKNOWN_PRIM = -1 +_UNKNOWN_FLOAT_PRIM = -2 +_UNKNOWN_LONG_DOUBLE = -3 + +_IO_FILE_STRUCT = -1 + +PRIMITIVE_TO_INDEX = { + 'char': PRIM_CHAR, + 'short': PRIM_SHORT, + 'int': PRIM_INT, + 'long': PRIM_LONG, + 'long long': PRIM_LONGLONG, + 'signed char': PRIM_SCHAR, + 'unsigned char': PRIM_UCHAR, + 'unsigned short': PRIM_USHORT, + 'unsigned int': PRIM_UINT, + 'unsigned long': PRIM_ULONG, + 'unsigned long long': PRIM_ULONGLONG, + 'float': PRIM_FLOAT, + 'double': PRIM_DOUBLE, + 'long double': PRIM_LONGDOUBLE, + '_Bool': PRIM_BOOL, + 'wchar_t': PRIM_WCHAR, + 'int8_t': PRIM_INT8, + 'uint8_t': PRIM_UINT8, + 'int16_t': PRIM_INT16, + 'uint16_t': PRIM_UINT16, + 'int32_t': PRIM_INT32, + 'uint32_t': PRIM_UINT32, + 'int64_t': PRIM_INT64, + 'uint64_t': PRIM_UINT64, + 'intptr_t': PRIM_INTPTR, + 'uintptr_t': PRIM_UINTPTR, + 'ptrdiff_t': PRIM_PTRDIFF, + 'size_t': PRIM_SIZE, + 'ssize_t': PRIM_SSIZE, + 'int_least8_t': PRIM_INT_LEAST8, + 'uint_least8_t': PRIM_UINT_LEAST8, + 'int_least16_t': PRIM_INT_LEAST16, + 'uint_least16_t': PRIM_UINT_LEAST16, + 'int_least32_t': PRIM_INT_LEAST32, + 'uint_least32_t': PRIM_UINT_LEAST32, + 'int_least64_t': PRIM_INT_LEAST64, + 'uint_least64_t': PRIM_UINT_LEAST64, + 'int_fast8_t': PRIM_INT_FAST8, + 'uint_fast8_t': PRIM_UINT_FAST8, + 'int_fast16_t': PRIM_INT_FAST16, + 'uint_fast16_t': PRIM_UINT_FAST16, + 'int_fast32_t': PRIM_INT_FAST32, + 'uint_fast32_t': PRIM_UINT_FAST32, + 'int_fast64_t': PRIM_INT_FAST64, + 'uint_fast64_t': PRIM_UINT_FAST64, + 'intmax_t': PRIM_INTMAX, + 'uintmax_t': PRIM_UINTMAX, + } + +F_UNION = 0x01 +F_CHECK_FIELDS = 0x02 +F_PACKED = 0x04 +F_EXTERNAL = 0x08 +F_OPAQUE = 0x10 + +G_FLAGS = dict([('_CFFI_' + _key, globals()[_key]) + for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED', + 'F_EXTERNAL', 'F_OPAQUE']]) + +CLASS_NAME = {} +for _name, _value in list(globals().items()): + if _name.startswith('OP_') and isinstance(_value, int): + CLASS_NAME[_value] = _name[3:] diff --git a/lib/python3.4/site-packages/cffi/commontypes.py b/lib/python3.4/site-packages/cffi/commontypes.py new file mode 100644 index 0000000..edf6c8e --- /dev/null +++ b/lib/python3.4/site-packages/cffi/commontypes.py @@ -0,0 +1,79 @@ +import sys +from . import api, model + + +COMMON_TYPES = {} + +try: + # fetch "bool" and all simple Windows types + from _cffi_backend import _get_common_types + _get_common_types(COMMON_TYPES) +except ImportError: + pass + +COMMON_TYPES['FILE'] = model.unknown_type('FILE', '_IO_FILE') +COMMON_TYPES['bool'] = '_Bool' # in case we got ImportError above + +for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + if _type.endswith('_t'): + COMMON_TYPES[_type] = _type +del _type + +_CACHE = {} + +def resolve_common_type(parser, commontype): + try: + return _CACHE[commontype] + except KeyError: + cdecl = COMMON_TYPES.get(commontype, commontype) + if not isinstance(cdecl, str): + result, quals = cdecl, 0 # cdecl is already a BaseType + elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + result, quals = model.PrimitiveType(cdecl), 0 + elif cdecl == 'set-unicode-needed': + raise api.FFIError("The Windows type %r is only available after " + "you call ffi.set_unicode()" % (commontype,)) + else: + if commontype == cdecl: + raise api.FFIError( + "Unsupported type: %r. Please look at " + "http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations " + "and file an issue if you think this type should really " + "be supported." % (commontype,)) + result, quals = parser.parse_type_and_quals(cdecl) # recursive + + assert isinstance(result, model.BaseTypeByIdentity) + _CACHE[commontype] = result, quals + return result, quals + + +# ____________________________________________________________ +# extra types for Windows (most of them are in commontypes.c) + + +def win_common_types(): + return { + "UNICODE_STRING": model.StructType( + "_UNICODE_STRING", + ["Length", + "MaximumLength", + "Buffer"], + [model.PrimitiveType("unsigned short"), + model.PrimitiveType("unsigned short"), + model.PointerType(model.PrimitiveType("wchar_t"))], + [-1, -1, -1]), + "PUNICODE_STRING": "UNICODE_STRING *", + "PCUNICODE_STRING": "const UNICODE_STRING *", + + "TBYTE": "set-unicode-needed", + "TCHAR": "set-unicode-needed", + "LPCTSTR": "set-unicode-needed", + "PCTSTR": "set-unicode-needed", + "LPTSTR": "set-unicode-needed", + "PTSTR": "set-unicode-needed", + "PTBYTE": "set-unicode-needed", + "PTCHAR": "set-unicode-needed", + } + +if sys.platform == 'win32': + COMMON_TYPES.update(win_common_types()) diff --git a/lib/python3.4/site-packages/cffi/cparser.py b/lib/python3.4/site-packages/cffi/cparser.py new file mode 100644 index 0000000..b49d8bd --- /dev/null +++ b/lib/python3.4/site-packages/cffi/cparser.py @@ -0,0 +1,849 @@ +from . import api, model +from .commontypes import COMMON_TYPES, resolve_common_type +try: + from . import _pycparser as pycparser +except ImportError: + import pycparser +import weakref, re, sys + +try: + if sys.version_info < (3,): + import thread as _thread + else: + import _thread + lock = _thread.allocate_lock() +except ImportError: + lock = None + +_r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$", + re.DOTALL | re.MULTILINE) +_r_define = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)" + r"\b((?:[^\n\\]|\\.)*?)$", + re.DOTALL | re.MULTILINE) +_r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}") +_r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$") +_r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]") +_r_words = re.compile(r"\w+|\S") +_parser_cache = None +_r_int_literal = re.compile(r"-?0?x?[0-9a-f]+[lu]*$", re.IGNORECASE) +_r_stdcall1 = re.compile(r"\b(__stdcall|WINAPI)\b") +_r_stdcall2 = re.compile(r"[(]\s*(__stdcall|WINAPI)\b") +_r_cdecl = re.compile(r"\b__cdecl\b") +_r_extern_python = re.compile(r'\bextern\s*"' + r'(Python|Python\s*\+\s*C|C\s*\+\s*Python)"\s*.') +_r_star_const_space = re.compile( # matches "* const " + r"[*]\s*((const|volatile|restrict)\b\s*)+") + +def _get_parser(): + global _parser_cache + if _parser_cache is None: + _parser_cache = pycparser.CParser() + return _parser_cache + +def _workaround_for_old_pycparser(csource): + # Workaround for a pycparser issue (fixed between pycparser 2.10 and + # 2.14): "char*const***" gives us a wrong syntax tree, the same as + # for "char***(*const)". This means we can't tell the difference + # afterwards. But "char(*const(***))" gives us the right syntax + # tree. The issue only occurs if there are several stars in + # sequence with no parenthesis inbetween, just possibly qualifiers. + # Attempt to fix it by adding some parentheses in the source: each + # time we see "* const" or "* const *", we add an opening + # parenthesis before each star---the hard part is figuring out where + # to close them. + parts = [] + while True: + match = _r_star_const_space.search(csource) + if not match: + break + #print repr(''.join(parts)+csource), '=>', + parts.append(csource[:match.start()]) + parts.append('('); closing = ')' + parts.append(match.group()) # e.g. "* const " + endpos = match.end() + if csource.startswith('*', endpos): + parts.append('('); closing += ')' + level = 0 + i = endpos + while i < len(csource): + c = csource[i] + if c == '(': + level += 1 + elif c == ')': + if level == 0: + break + level -= 1 + elif c in ',;=': + if level == 0: + break + i += 1 + csource = csource[endpos:i] + closing + csource[i:] + #print repr(''.join(parts)+csource) + parts.append(csource) + return ''.join(parts) + +def _preprocess_extern_python(csource): + # input: `extern "Python" int foo(int);` or + # `extern "Python" { int foo(int); }` + # output: + # void __cffi_extern_python_start; + # int foo(int); + # void __cffi_extern_python_stop; + # + # input: `extern "Python+C" int foo(int);` + # output: + # void __cffi_extern_python_plus_c_start; + # int foo(int); + # void __cffi_extern_python_stop; + parts = [] + while True: + match = _r_extern_python.search(csource) + if not match: + break + endpos = match.end() - 1 + #print + #print ''.join(parts)+csource + #print '=>' + parts.append(csource[:match.start()]) + if 'C' in match.group(1): + parts.append('void __cffi_extern_python_plus_c_start; ') + else: + parts.append('void __cffi_extern_python_start; ') + if csource[endpos] == '{': + # grouping variant + closing = csource.find('}', endpos) + if closing < 0: + raise api.CDefError("'extern \"Python\" {': no '}' found") + if csource.find('{', endpos + 1, closing) >= 0: + raise NotImplementedError("cannot use { } inside a block " + "'extern \"Python\" { ... }'") + parts.append(csource[endpos+1:closing]) + csource = csource[closing+1:] + else: + # non-grouping variant + semicolon = csource.find(';', endpos) + if semicolon < 0: + raise api.CDefError("'extern \"Python\": no ';' found") + parts.append(csource[endpos:semicolon+1]) + csource = csource[semicolon+1:] + parts.append(' void __cffi_extern_python_stop;') + #print ''.join(parts)+csource + #print + parts.append(csource) + return ''.join(parts) + +def _preprocess(csource): + # Remove comments. NOTE: this only work because the cdef() section + # should not contain any string literal! + csource = _r_comment.sub(' ', csource) + # Remove the "#define FOO x" lines + macros = {} + for match in _r_define.finditer(csource): + macroname, macrovalue = match.groups() + macrovalue = macrovalue.replace('\\\n', '').strip() + macros[macroname] = macrovalue + csource = _r_define.sub('', csource) + # + if pycparser.__version__ < '2.14': + csource = _workaround_for_old_pycparser(csource) + # + # BIG HACK: replace WINAPI or __stdcall with "volatile const". + # It doesn't make sense for the return type of a function to be + # "volatile volatile const", so we abuse it to detect __stdcall... + # Hack number 2 is that "int(volatile *fptr)();" is not valid C + # syntax, so we place the "volatile" before the opening parenthesis. + csource = _r_stdcall2.sub(' volatile volatile const(', csource) + csource = _r_stdcall1.sub(' volatile volatile const ', csource) + csource = _r_cdecl.sub(' ', csource) + # + # Replace `extern "Python"` with start/end markers + csource = _preprocess_extern_python(csource) + # + # Replace "[...]" with "[__dotdotdotarray__]" + csource = _r_partial_array.sub('[__dotdotdotarray__]', csource) + # + # Replace "...}" with "__dotdotdotNUM__}". This construction should + # occur only at the end of enums; at the end of structs we have "...;}" + # and at the end of vararg functions "...);". Also replace "=...[,}]" + # with ",__dotdotdotNUM__[,}]": this occurs in the enums too, when + # giving an unknown value. + matches = list(_r_partial_enum.finditer(csource)) + for number, match in enumerate(reversed(matches)): + p = match.start() + if csource[p] == '=': + p2 = csource.find('...', p, match.end()) + assert p2 > p + csource = '%s,__dotdotdot%d__ %s' % (csource[:p], number, + csource[p2+3:]) + else: + assert csource[p:p+3] == '...' + csource = '%s __dotdotdot%d__ %s' % (csource[:p], number, + csource[p+3:]) + # Replace all remaining "..." with the same name, "__dotdotdot__", + # which is declared with a typedef for the purpose of C parsing. + return csource.replace('...', ' __dotdotdot__ '), macros + +def _common_type_names(csource): + # Look in the source for what looks like usages of types from the + # list of common types. A "usage" is approximated here as the + # appearance of the word, minus a "definition" of the type, which + # is the last word in a "typedef" statement. Approximative only + # but should be fine for all the common types. + look_for_words = set(COMMON_TYPES) + look_for_words.add(';') + look_for_words.add(',') + look_for_words.add('(') + look_for_words.add(')') + look_for_words.add('typedef') + words_used = set() + is_typedef = False + paren = 0 + previous_word = '' + for word in _r_words.findall(csource): + if word in look_for_words: + if word == ';': + if is_typedef: + words_used.discard(previous_word) + look_for_words.discard(previous_word) + is_typedef = False + elif word == 'typedef': + is_typedef = True + paren = 0 + elif word == '(': + paren += 1 + elif word == ')': + paren -= 1 + elif word == ',': + if is_typedef and paren == 0: + words_used.discard(previous_word) + look_for_words.discard(previous_word) + else: # word in COMMON_TYPES + words_used.add(word) + previous_word = word + return words_used + + +class Parser(object): + + def __init__(self): + self._declarations = {} + self._included_declarations = set() + self._anonymous_counter = 0 + self._structnode2type = weakref.WeakKeyDictionary() + self._options = {} + self._int_constants = {} + self._recomplete = [] + self._uses_new_feature = None + + def _parse(self, csource): + csource, macros = _preprocess(csource) + # XXX: for more efficiency we would need to poke into the + # internals of CParser... the following registers the + # typedefs, because their presence or absence influences the + # parsing itself (but what they are typedef'ed to plays no role) + ctn = _common_type_names(csource) + typenames = [] + for name in sorted(self._declarations): + if name.startswith('typedef '): + name = name[8:] + typenames.append(name) + ctn.discard(name) + typenames += sorted(ctn) + # + csourcelines = ['typedef int %s;' % typename for typename in typenames] + csourcelines.append('typedef int __dotdotdot__;') + csourcelines.append(csource) + csource = '\n'.join(csourcelines) + if lock is not None: + lock.acquire() # pycparser is not thread-safe... + try: + ast = _get_parser().parse(csource) + except pycparser.c_parser.ParseError as e: + self.convert_pycparser_error(e, csource) + finally: + if lock is not None: + lock.release() + # csource will be used to find buggy source text + return ast, macros, csource + + def _convert_pycparser_error(self, e, csource): + # xxx look for ":NUM:" at the start of str(e) and try to interpret + # it as a line number + line = None + msg = str(e) + if msg.startswith(':') and ':' in msg[1:]: + linenum = msg[1:msg.find(':',1)] + if linenum.isdigit(): + linenum = int(linenum, 10) + csourcelines = csource.splitlines() + if 1 <= linenum <= len(csourcelines): + line = csourcelines[linenum-1] + return line + + def convert_pycparser_error(self, e, csource): + line = self._convert_pycparser_error(e, csource) + + msg = str(e) + if line: + msg = 'cannot parse "%s"\n%s' % (line.strip(), msg) + else: + msg = 'parse error\n%s' % (msg,) + raise api.CDefError(msg) + + def parse(self, csource, override=False, packed=False, dllexport=False): + prev_options = self._options + try: + self._options = {'override': override, + 'packed': packed, + 'dllexport': dllexport} + self._internal_parse(csource) + finally: + self._options = prev_options + + def _internal_parse(self, csource): + ast, macros, csource = self._parse(csource) + # add the macros + self._process_macros(macros) + # find the first "__dotdotdot__" and use that as a separator + # between the repeated typedefs and the real csource + iterator = iter(ast.ext) + for decl in iterator: + if decl.name == '__dotdotdot__': + break + # + try: + self._inside_extern_python = '__cffi_extern_python_stop' + for decl in iterator: + if isinstance(decl, pycparser.c_ast.Decl): + self._parse_decl(decl) + elif isinstance(decl, pycparser.c_ast.Typedef): + if not decl.name: + raise api.CDefError("typedef does not declare any name", + decl) + quals = 0 + if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType) + and decl.type.type.names[-1] == '__dotdotdot__'): + realtype = self._get_unknown_type(decl) + elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and + isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and + isinstance(decl.type.type.type, + pycparser.c_ast.IdentifierType) and + decl.type.type.type.names == ['__dotdotdot__']): + realtype = model.unknown_ptr_type(decl.name) + else: + realtype, quals = self._get_type_and_quals( + decl.type, name=decl.name) + self._declare('typedef ' + decl.name, realtype, quals=quals) + else: + raise api.CDefError("unrecognized construct", decl) + except api.FFIError as e: + msg = self._convert_pycparser_error(e, csource) + if msg: + e.args = (e.args[0] + "\n *** Err: %s" % msg,) + raise + + def _add_constants(self, key, val): + if key in self._int_constants: + if self._int_constants[key] == val: + return # ignore identical double declarations + raise api.FFIError( + "multiple declarations of constant: %s" % (key,)) + self._int_constants[key] = val + + def _add_integer_constant(self, name, int_str): + int_str = int_str.lower().rstrip("ul") + neg = int_str.startswith('-') + if neg: + int_str = int_str[1:] + # "010" is not valid oct in py3 + if (int_str.startswith("0") and int_str != '0' + and not int_str.startswith("0x")): + int_str = "0o" + int_str[1:] + pyvalue = int(int_str, 0) + if neg: + pyvalue = -pyvalue + self._add_constants(name, pyvalue) + self._declare('macro ' + name, pyvalue) + + def _process_macros(self, macros): + for key, value in macros.items(): + value = value.strip() + if _r_int_literal.match(value): + self._add_integer_constant(key, value) + elif value == '...': + self._declare('macro ' + key, value) + else: + raise api.CDefError( + 'only supports one of the following syntax:\n' + ' #define %s ... (literally dot-dot-dot)\n' + ' #define %s NUMBER (with NUMBER an integer' + ' constant, decimal/hex/octal)\n' + 'got:\n' + ' #define %s %s' + % (key, key, key, value)) + + def _declare_function(self, tp, quals, decl): + tp = self._get_type_pointer(tp, quals) + if self._options.get('dllexport'): + tag = 'dllexport_python ' + elif self._inside_extern_python == '__cffi_extern_python_start': + tag = 'extern_python ' + elif self._inside_extern_python == '__cffi_extern_python_plus_c_start': + tag = 'extern_python_plus_c ' + else: + tag = 'function ' + self._declare(tag + decl.name, tp) + + def _parse_decl(self, decl): + node = decl.type + if isinstance(node, pycparser.c_ast.FuncDecl): + tp, quals = self._get_type_and_quals(node, name=decl.name) + assert isinstance(tp, model.RawFunctionType) + self._declare_function(tp, quals, decl) + else: + if isinstance(node, pycparser.c_ast.Struct): + self._get_struct_union_enum_type('struct', node) + elif isinstance(node, pycparser.c_ast.Union): + self._get_struct_union_enum_type('union', node) + elif isinstance(node, pycparser.c_ast.Enum): + self._get_struct_union_enum_type('enum', node) + elif not decl.name: + raise api.CDefError("construct does not declare any variable", + decl) + # + if decl.name: + tp, quals = self._get_type_and_quals(node, + partial_length_ok=True) + if tp.is_raw_function: + self._declare_function(tp, quals, decl) + elif (tp.is_integer_type() and + hasattr(decl, 'init') and + hasattr(decl.init, 'value') and + _r_int_literal.match(decl.init.value)): + self._add_integer_constant(decl.name, decl.init.value) + elif (tp.is_integer_type() and + isinstance(decl.init, pycparser.c_ast.UnaryOp) and + decl.init.op == '-' and + hasattr(decl.init.expr, 'value') and + _r_int_literal.match(decl.init.expr.value)): + self._add_integer_constant(decl.name, + '-' + decl.init.expr.value) + elif (tp is model.void_type and + decl.name.startswith('__cffi_extern_python_')): + # hack: `extern "Python"` in the C source is replaced + # with "void __cffi_extern_python_start;" and + # "void __cffi_extern_python_stop;" + self._inside_extern_python = decl.name + else: + if self._inside_extern_python !='__cffi_extern_python_stop': + raise api.CDefError( + "cannot declare constants or " + "variables with 'extern \"Python\"'") + if (quals & model.Q_CONST) and not tp.is_array_type: + self._declare('constant ' + decl.name, tp, quals=quals) + else: + self._declare('variable ' + decl.name, tp, quals=quals) + + def parse_type(self, cdecl): + return self.parse_type_and_quals(cdecl)[0] + + def parse_type_and_quals(self, cdecl): + ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2] + assert not macros + exprnode = ast.ext[-1].type.args.params[0] + if isinstance(exprnode, pycparser.c_ast.ID): + raise api.CDefError("unknown identifier '%s'" % (exprnode.name,)) + return self._get_type_and_quals(exprnode.type) + + def _declare(self, name, obj, included=False, quals=0): + if name in self._declarations: + prevobj, prevquals = self._declarations[name] + if prevobj is obj and prevquals == quals: + return + if not self._options.get('override'): + raise api.FFIError( + "multiple declarations of %s (for interactive usage, " + "try cdef(xx, override=True))" % (name,)) + assert '__dotdotdot__' not in name.split() + self._declarations[name] = (obj, quals) + if included: + self._included_declarations.add(obj) + + def _extract_quals(self, type): + quals = 0 + if isinstance(type, (pycparser.c_ast.TypeDecl, + pycparser.c_ast.PtrDecl)): + if 'const' in type.quals: + quals |= model.Q_CONST + if 'volatile' in type.quals: + quals |= model.Q_VOLATILE + if 'restrict' in type.quals: + quals |= model.Q_RESTRICT + return quals + + def _get_type_pointer(self, type, quals, declname=None): + if isinstance(type, model.RawFunctionType): + return type.as_function_pointer() + if (isinstance(type, model.StructOrUnionOrEnum) and + type.name.startswith('$') and type.name[1:].isdigit() and + type.forcename is None and declname is not None): + return model.NamedPointerType(type, declname, quals) + return model.PointerType(type, quals) + + def _get_type_and_quals(self, typenode, name=None, partial_length_ok=False): + # first, dereference typedefs, if we have it already parsed, we're good + if (isinstance(typenode, pycparser.c_ast.TypeDecl) and + isinstance(typenode.type, pycparser.c_ast.IdentifierType) and + len(typenode.type.names) == 1 and + ('typedef ' + typenode.type.names[0]) in self._declarations): + tp, quals = self._declarations['typedef ' + typenode.type.names[0]] + quals |= self._extract_quals(typenode) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.ArrayDecl): + # array type + if typenode.dim is None: + length = None + else: + length = self._parse_constant( + typenode.dim, partial_length_ok=partial_length_ok) + tp, quals = self._get_type_and_quals(typenode.type, + partial_length_ok=partial_length_ok) + return model.ArrayType(tp, length), quals + # + if isinstance(typenode, pycparser.c_ast.PtrDecl): + # pointer type + itemtype, itemquals = self._get_type_and_quals(typenode.type) + tp = self._get_type_pointer(itemtype, itemquals, declname=name) + quals = self._extract_quals(typenode) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.TypeDecl): + quals = self._extract_quals(typenode) + type = typenode.type + if isinstance(type, pycparser.c_ast.IdentifierType): + # assume a primitive type. get it from .names, but reduce + # synonyms to a single chosen combination + names = list(type.names) + if names != ['signed', 'char']: # keep this unmodified + prefixes = {} + while names: + name = names[0] + if name in ('short', 'long', 'signed', 'unsigned'): + prefixes[name] = prefixes.get(name, 0) + 1 + del names[0] + else: + break + # ignore the 'signed' prefix below, and reorder the others + newnames = [] + for prefix in ('unsigned', 'short', 'long'): + for i in range(prefixes.get(prefix, 0)): + newnames.append(prefix) + if not names: + names = ['int'] # implicitly + if names == ['int']: # but kill it if 'short' or 'long' + if 'short' in prefixes or 'long' in prefixes: + names = [] + names = newnames + names + ident = ' '.join(names) + if ident == 'void': + return model.void_type, quals + if ident == '__dotdotdot__': + raise api.FFIError(':%d: bad usage of "..."' % + typenode.coord.line) + tp0, quals0 = resolve_common_type(self, ident) + return tp0, (quals | quals0) + # + if isinstance(type, pycparser.c_ast.Struct): + # 'struct foobar' + tp = self._get_struct_union_enum_type('struct', type, name) + return tp, quals + # + if isinstance(type, pycparser.c_ast.Union): + # 'union foobar' + tp = self._get_struct_union_enum_type('union', type, name) + return tp, quals + # + if isinstance(type, pycparser.c_ast.Enum): + # 'enum foobar' + tp = self._get_struct_union_enum_type('enum', type, name) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.FuncDecl): + # a function type + return self._parse_function_type(typenode, name), 0 + # + # nested anonymous structs or unions end up here + if isinstance(typenode, pycparser.c_ast.Struct): + return self._get_struct_union_enum_type('struct', typenode, name, + nested=True), 0 + if isinstance(typenode, pycparser.c_ast.Union): + return self._get_struct_union_enum_type('union', typenode, name, + nested=True), 0 + # + raise api.FFIError(":%d: bad or unsupported type declaration" % + typenode.coord.line) + + def _parse_function_type(self, typenode, funcname=None): + params = list(getattr(typenode.args, 'params', [])) + for i, arg in enumerate(params): + if not hasattr(arg, 'type'): + raise api.CDefError("%s arg %d: unknown type '%s'" + " (if you meant to use the old C syntax of giving" + " untyped arguments, it is not supported)" + % (funcname or 'in expression', i + 1, + getattr(arg, 'name', '?'))) + ellipsis = ( + len(params) > 0 and + isinstance(params[-1].type, pycparser.c_ast.TypeDecl) and + isinstance(params[-1].type.type, + pycparser.c_ast.IdentifierType) and + params[-1].type.type.names == ['__dotdotdot__']) + if ellipsis: + params.pop() + if not params: + raise api.CDefError( + "%s: a function with only '(...)' as argument" + " is not correct C" % (funcname or 'in expression')) + args = [self._as_func_arg(*self._get_type_and_quals(argdeclnode.type)) + for argdeclnode in params] + if not ellipsis and args == [model.void_type]: + args = [] + result, quals = self._get_type_and_quals(typenode.type) + # the 'quals' on the result type are ignored. HACK: we absure them + # to detect __stdcall functions: we textually replace "__stdcall" + # with "volatile volatile const" above. + abi = None + if hasattr(typenode.type, 'quals'): # else, probable syntax error anyway + if typenode.type.quals[-3:] == ['volatile', 'volatile', 'const']: + abi = '__stdcall' + return model.RawFunctionType(tuple(args), result, ellipsis, abi) + + def _as_func_arg(self, type, quals): + if isinstance(type, model.ArrayType): + return model.PointerType(type.item, quals) + elif isinstance(type, model.RawFunctionType): + return type.as_function_pointer() + else: + return type + + def _get_struct_union_enum_type(self, kind, type, name=None, nested=False): + # First, a level of caching on the exact 'type' node of the AST. + # This is obscure, but needed because pycparser "unrolls" declarations + # such as "typedef struct { } foo_t, *foo_p" and we end up with + # an AST that is not a tree, but a DAG, with the "type" node of the + # two branches foo_t and foo_p of the trees being the same node. + # It's a bit silly but detecting "DAG-ness" in the AST tree seems + # to be the only way to distinguish this case from two independent + # structs. See test_struct_with_two_usages. + try: + return self._structnode2type[type] + except KeyError: + pass + # + # Note that this must handle parsing "struct foo" any number of + # times and always return the same StructType object. Additionally, + # one of these times (not necessarily the first), the fields of + # the struct can be specified with "struct foo { ...fields... }". + # If no name is given, then we have to create a new anonymous struct + # with no caching; in this case, the fields are either specified + # right now or never. + # + force_name = name + name = type.name + # + # get the type or create it if needed + if name is None: + # 'force_name' is used to guess a more readable name for + # anonymous structs, for the common case "typedef struct { } foo". + if force_name is not None: + explicit_name = '$%s' % force_name + else: + self._anonymous_counter += 1 + explicit_name = '$%d' % self._anonymous_counter + tp = None + else: + explicit_name = name + key = '%s %s' % (kind, name) + tp, _ = self._declarations.get(key, (None, None)) + # + if tp is None: + if kind == 'struct': + tp = model.StructType(explicit_name, None, None, None) + elif kind == 'union': + tp = model.UnionType(explicit_name, None, None, None) + elif kind == 'enum': + if explicit_name == '__dotdotdot__': + raise CDefError("Enums cannot be declared with ...") + tp = self._build_enum_type(explicit_name, type.values) + else: + raise AssertionError("kind = %r" % (kind,)) + if name is not None: + self._declare(key, tp) + else: + if kind == 'enum' and type.values is not None: + raise NotImplementedError( + "enum %s: the '{}' declaration should appear on the first " + "time the enum is mentioned, not later" % explicit_name) + if not tp.forcename: + tp.force_the_name(force_name) + if tp.forcename and '$' in tp.name: + self._declare('anonymous %s' % tp.forcename, tp) + # + self._structnode2type[type] = tp + # + # enums: done here + if kind == 'enum': + return tp + # + # is there a 'type.decls'? If yes, then this is the place in the + # C sources that declare the fields. If no, then just return the + # existing type, possibly still incomplete. + if type.decls is None: + return tp + # + if tp.fldnames is not None: + raise api.CDefError("duplicate declaration of struct %s" % name) + fldnames = [] + fldtypes = [] + fldbitsize = [] + fldquals = [] + for decl in type.decls: + if (isinstance(decl.type, pycparser.c_ast.IdentifierType) and + ''.join(decl.type.names) == '__dotdotdot__'): + # XXX pycparser is inconsistent: 'names' should be a list + # of strings, but is sometimes just one string. Use + # str.join() as a way to cope with both. + self._make_partial(tp, nested) + continue + if decl.bitsize is None: + bitsize = -1 + else: + bitsize = self._parse_constant(decl.bitsize) + self._partial_length = False + type, fqual = self._get_type_and_quals(decl.type, + partial_length_ok=True) + if self._partial_length: + self._make_partial(tp, nested) + if isinstance(type, model.StructType) and type.partial: + self._make_partial(tp, nested) + fldnames.append(decl.name or '') + fldtypes.append(type) + fldbitsize.append(bitsize) + fldquals.append(fqual) + tp.fldnames = tuple(fldnames) + tp.fldtypes = tuple(fldtypes) + tp.fldbitsize = tuple(fldbitsize) + tp.fldquals = tuple(fldquals) + if fldbitsize != [-1] * len(fldbitsize): + if isinstance(tp, model.StructType) and tp.partial: + raise NotImplementedError("%s: using both bitfields and '...;'" + % (tp,)) + tp.packed = self._options.get('packed') + if tp.completed: # must be re-completed: it is not opaque any more + tp.completed = 0 + self._recomplete.append(tp) + return tp + + def _make_partial(self, tp, nested): + if not isinstance(tp, model.StructOrUnion): + raise api.CDefError("%s cannot be partial" % (tp,)) + if not tp.has_c_name() and not nested: + raise NotImplementedError("%s is partial but has no C name" %(tp,)) + tp.partial = True + + def _parse_constant(self, exprnode, partial_length_ok=False): + # for now, limited to expressions that are an immediate number + # or positive/negative number + if isinstance(exprnode, pycparser.c_ast.Constant): + s = exprnode.value + if s.startswith('0'): + if s.startswith('0x') or s.startswith('0X'): + return int(s, 16) + return int(s, 8) + elif '1' <= s[0] <= '9': + return int(s, 10) + elif s[0] == "'" and s[-1] == "'" and ( + len(s) == 3 or (len(s) == 4 and s[1] == "\\")): + return ord(s[-2]) + else: + raise api.CDefError("invalid constant %r" % (s,)) + # + if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and + exprnode.op == '+'): + return self._parse_constant(exprnode.expr) + # + if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and + exprnode.op == '-'): + return -self._parse_constant(exprnode.expr) + # load previously defined int constant + if (isinstance(exprnode, pycparser.c_ast.ID) and + exprnode.name in self._int_constants): + return self._int_constants[exprnode.name] + # + if partial_length_ok: + if (isinstance(exprnode, pycparser.c_ast.ID) and + exprnode.name == '__dotdotdotarray__'): + self._partial_length = True + return '...' + # + raise api.FFIError(":%d: unsupported expression: expected a " + "simple numeric constant" % exprnode.coord.line) + + def _build_enum_type(self, explicit_name, decls): + if decls is not None: + partial = False + enumerators = [] + enumvalues = [] + nextenumvalue = 0 + for enum in decls.enumerators: + if _r_enum_dotdotdot.match(enum.name): + partial = True + continue + if enum.value is not None: + nextenumvalue = self._parse_constant(enum.value) + enumerators.append(enum.name) + enumvalues.append(nextenumvalue) + self._add_constants(enum.name, nextenumvalue) + nextenumvalue += 1 + enumerators = tuple(enumerators) + enumvalues = tuple(enumvalues) + tp = model.EnumType(explicit_name, enumerators, enumvalues) + tp.partial = partial + else: # opaque enum + tp = model.EnumType(explicit_name, (), ()) + return tp + + def include(self, other): + for name, (tp, quals) in other._declarations.items(): + if name.startswith('anonymous $enum_$'): + continue # fix for test_anonymous_enum_include + kind = name.split(' ', 1)[0] + if kind in ('struct', 'union', 'enum', 'anonymous', 'typedef'): + self._declare(name, tp, included=True, quals=quals) + for k, v in other._int_constants.items(): + self._add_constants(k, v) + + def _get_unknown_type(self, decl): + typenames = decl.type.type.names + assert typenames[-1] == '__dotdotdot__' + if len(typenames) == 1: + return model.unknown_type(decl.name) + + if (typenames[:-1] == ['float'] or + typenames[:-1] == ['double']): + # not for 'long double' so far + result = model.UnknownFloatType(decl.name) + else: + for t in typenames[:-1]: + if t not in ['int', 'short', 'long', 'signed', + 'unsigned', 'char']: + raise api.FFIError(':%d: bad usage of "..."' % + decl.coord.line) + result = model.UnknownIntegerType(decl.name) + + if self._uses_new_feature is None: + self._uses_new_feature = "'typedef %s... %s'" % ( + ' '.join(typenames[:-1]), decl.name) + + return result diff --git a/lib/python3.4/site-packages/cffi/ffiplatform.py b/lib/python3.4/site-packages/cffi/ffiplatform.py new file mode 100644 index 0000000..d2daa8e --- /dev/null +++ b/lib/python3.4/site-packages/cffi/ffiplatform.py @@ -0,0 +1,121 @@ +import sys, os + + +class VerificationError(Exception): + """ An error raised when verification fails + """ + +class VerificationMissing(Exception): + """ An error raised when incomplete structures are passed into + cdef, but no verification has been done + """ + + +LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs', + 'extra_objects', 'depends'] + +def get_extension(srcfilename, modname, sources=(), **kwds): + from distutils.core import Extension + allsources = [srcfilename] + for src in sources: + allsources.append(os.path.normpath(src)) + return Extension(name=modname, sources=allsources, **kwds) + +def compile(tmpdir, ext, compiler_verbose=0): + """Compile a C extension module using distutils.""" + + saved_environ = os.environ.copy() + try: + outputfilename = _build(tmpdir, ext, compiler_verbose) + outputfilename = os.path.abspath(outputfilename) + finally: + # workaround for a distutils bugs where some env vars can + # become longer and longer every time it is used + for key, value in saved_environ.items(): + if os.environ.get(key) != value: + os.environ[key] = value + return outputfilename + +def _build(tmpdir, ext, compiler_verbose=0): + # XXX compact but horrible :-( + from distutils.core import Distribution + import distutils.errors, distutils.log + # + dist = Distribution({'ext_modules': [ext]}) + dist.parse_config_files() + options = dist.get_option_dict('build_ext') + options['force'] = ('ffiplatform', True) + options['build_lib'] = ('ffiplatform', tmpdir) + options['build_temp'] = ('ffiplatform', tmpdir) + # + try: + old_level = distutils.log.set_threshold(0) or 0 + try: + distutils.log.set_verbosity(compiler_verbose) + dist.run_command('build_ext') + cmd_obj = dist.get_command_obj('build_ext') + [soname] = cmd_obj.get_outputs() + finally: + distutils.log.set_threshold(old_level) + except (distutils.errors.CompileError, + distutils.errors.LinkError) as e: + raise VerificationError('%s: %s' % (e.__class__.__name__, e)) + # + return soname + +try: + from os.path import samefile +except ImportError: + def samefile(f1, f2): + return os.path.abspath(f1) == os.path.abspath(f2) + +def maybe_relative_path(path): + if not os.path.isabs(path): + return path # already relative + dir = path + names = [] + while True: + prevdir = dir + dir, name = os.path.split(prevdir) + if dir == prevdir or not dir: + return path # failed to make it relative + names.append(name) + try: + if samefile(dir, os.curdir): + names.reverse() + return os.path.join(*names) + except OSError: + pass + +# ____________________________________________________________ + +try: + int_or_long = (int, long) + import cStringIO +except NameError: + int_or_long = int # Python 3 + import io as cStringIO + +def _flatten(x, f): + if isinstance(x, str): + f.write('%ds%s' % (len(x), x)) + elif isinstance(x, dict): + keys = sorted(x.keys()) + f.write('%dd' % len(keys)) + for key in keys: + _flatten(key, f) + _flatten(x[key], f) + elif isinstance(x, (list, tuple)): + f.write('%dl' % len(x)) + for value in x: + _flatten(value, f) + elif isinstance(x, int_or_long): + f.write('%di' % (x,)) + else: + raise TypeError( + "the keywords to verify() contains unsupported object %r" % (x,)) + +def flatten(x): + f = cStringIO.StringIO() + _flatten(x, f) + return f.getvalue() diff --git a/lib/python3.4/site-packages/cffi/lock.py b/lib/python3.4/site-packages/cffi/lock.py new file mode 100644 index 0000000..db91b71 --- /dev/null +++ b/lib/python3.4/site-packages/cffi/lock.py @@ -0,0 +1,30 @@ +import sys + +if sys.version_info < (3,): + try: + from thread import allocate_lock + except ImportError: + from dummy_thread import allocate_lock +else: + try: + from _thread import allocate_lock + except ImportError: + from _dummy_thread import allocate_lock + + +##import sys +##l1 = allocate_lock + +##class allocate_lock(object): +## def __init__(self): +## self._real = l1() +## def __enter__(self): +## for i in range(4, 0, -1): +## print sys._getframe(i).f_code +## print +## return self._real.__enter__() +## def __exit__(self, *args): +## return self._real.__exit__(*args) +## def acquire(self, f): +## assert f is False +## return self._real.acquire(f) diff --git a/lib/python3.4/site-packages/cffi/model.py b/lib/python3.4/site-packages/cffi/model.py new file mode 100644 index 0000000..5783034 --- /dev/null +++ b/lib/python3.4/site-packages/cffi/model.py @@ -0,0 +1,602 @@ +import types, sys +import weakref + +from .lock import allocate_lock + + +# type qualifiers +Q_CONST = 0x01 +Q_RESTRICT = 0x02 +Q_VOLATILE = 0x04 + +def qualify(quals, replace_with): + if quals & Q_CONST: + replace_with = ' const ' + replace_with.lstrip() + if quals & Q_VOLATILE: + replace_with = ' volatile ' + replace_with.lstrip() + if quals & Q_RESTRICT: + # It seems that __restrict is supported by gcc and msvc. + # If you hit some different compiler, add a #define in + # _cffi_include.h for it (and in its copies, documented there) + replace_with = ' __restrict ' + replace_with.lstrip() + return replace_with + + +class BaseTypeByIdentity(object): + is_array_type = False + is_raw_function = False + + def get_c_name(self, replace_with='', context='a C file', quals=0): + result = self.c_name_with_marker + assert result.count('&') == 1 + # some logic duplication with ffi.getctype()... :-( + replace_with = replace_with.strip() + if replace_with: + if replace_with.startswith('*') and '&[' in result: + replace_with = '(%s)' % replace_with + elif not replace_with[0] in '[(': + replace_with = ' ' + replace_with + replace_with = qualify(quals, replace_with) + result = result.replace('&', replace_with) + if '$' in result: + from .ffiplatform import VerificationError + raise VerificationError( + "cannot generate '%s' in %s: unknown type name" + % (self._get_c_name(), context)) + return result + + def _get_c_name(self): + return self.c_name_with_marker.replace('&', '') + + def has_c_name(self): + return '$' not in self._get_c_name() + + def is_integer_type(self): + return False + + def get_cached_btype(self, ffi, finishlist, can_delay=False): + try: + BType = ffi._cached_btypes[self] + except KeyError: + BType = self.build_backend_type(ffi, finishlist) + BType2 = ffi._cached_btypes.setdefault(self, BType) + assert BType2 is BType + return BType + + def __repr__(self): + return '<%s>' % (self._get_c_name(),) + + def _get_items(self): + return [(name, getattr(self, name)) for name in self._attrs_] + + +class BaseType(BaseTypeByIdentity): + + def __eq__(self, other): + return (self.__class__ == other.__class__ and + self._get_items() == other._get_items()) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.__class__, tuple(self._get_items()))) + + +class VoidType(BaseType): + _attrs_ = () + + def __init__(self): + self.c_name_with_marker = 'void&' + + def build_backend_type(self, ffi, finishlist): + return global_cache(self, ffi, 'new_void_type') + +void_type = VoidType() + + +class BasePrimitiveType(BaseType): + pass + + +class PrimitiveType(BasePrimitiveType): + _attrs_ = ('name',) + + ALL_PRIMITIVE_TYPES = { + 'char': 'c', + 'short': 'i', + 'int': 'i', + 'long': 'i', + 'long long': 'i', + 'signed char': 'i', + 'unsigned char': 'i', + 'unsigned short': 'i', + 'unsigned int': 'i', + 'unsigned long': 'i', + 'unsigned long long': 'i', + 'float': 'f', + 'double': 'f', + 'long double': 'f', + '_Bool': 'i', + # the following types are not primitive in the C sense + 'wchar_t': 'c', + 'int8_t': 'i', + 'uint8_t': 'i', + 'int16_t': 'i', + 'uint16_t': 'i', + 'int32_t': 'i', + 'uint32_t': 'i', + 'int64_t': 'i', + 'uint64_t': 'i', + 'int_least8_t': 'i', + 'uint_least8_t': 'i', + 'int_least16_t': 'i', + 'uint_least16_t': 'i', + 'int_least32_t': 'i', + 'uint_least32_t': 'i', + 'int_least64_t': 'i', + 'uint_least64_t': 'i', + 'int_fast8_t': 'i', + 'uint_fast8_t': 'i', + 'int_fast16_t': 'i', + 'uint_fast16_t': 'i', + 'int_fast32_t': 'i', + 'uint_fast32_t': 'i', + 'int_fast64_t': 'i', + 'uint_fast64_t': 'i', + 'intptr_t': 'i', + 'uintptr_t': 'i', + 'intmax_t': 'i', + 'uintmax_t': 'i', + 'ptrdiff_t': 'i', + 'size_t': 'i', + 'ssize_t': 'i', + } + + def __init__(self, name): + assert name in self.ALL_PRIMITIVE_TYPES + self.name = name + self.c_name_with_marker = name + '&' + + def is_char_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'c' + def is_integer_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'i' + def is_float_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'f' + + def build_backend_type(self, ffi, finishlist): + return global_cache(self, ffi, 'new_primitive_type', self.name) + + +class UnknownIntegerType(BasePrimitiveType): + _attrs_ = ('name',) + + def __init__(self, name): + self.name = name + self.c_name_with_marker = name + '&' + + def is_integer_type(self): + return True + + def build_backend_type(self, ffi, finishlist): + raise NotImplementedError("integer type '%s' can only be used after " + "compilation" % self.name) + +class UnknownFloatType(BasePrimitiveType): + _attrs_ = ('name', ) + + def __init__(self, name): + self.name = name + self.c_name_with_marker = name + '&' + + def build_backend_type(self, ffi, finishlist): + raise NotImplementedError("float type '%s' can only be used after " + "compilation" % self.name) + + +class BaseFunctionType(BaseType): + _attrs_ = ('args', 'result', 'ellipsis', 'abi') + + def __init__(self, args, result, ellipsis, abi=None): + self.args = args + self.result = result + self.ellipsis = ellipsis + self.abi = abi + # + reprargs = [arg._get_c_name() for arg in self.args] + if self.ellipsis: + reprargs.append('...') + reprargs = reprargs or ['void'] + replace_with = self._base_pattern % (', '.join(reprargs),) + if abi is not None: + replace_with = replace_with[:1] + abi + ' ' + replace_with[1:] + self.c_name_with_marker = ( + self.result.c_name_with_marker.replace('&', replace_with)) + + +class RawFunctionType(BaseFunctionType): + # Corresponds to a C type like 'int(int)', which is the C type of + # a function, but not a pointer-to-function. The backend has no + # notion of such a type; it's used temporarily by parsing. + _base_pattern = '(&)(%s)' + is_raw_function = True + + def build_backend_type(self, ffi, finishlist): + from . import api + raise api.CDefError("cannot render the type %r: it is a function " + "type, not a pointer-to-function type" % (self,)) + + def as_function_pointer(self): + return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi) + + +class FunctionPtrType(BaseFunctionType): + _base_pattern = '(*&)(%s)' + + def build_backend_type(self, ffi, finishlist): + result = self.result.get_cached_btype(ffi, finishlist) + args = [] + for tp in self.args: + args.append(tp.get_cached_btype(ffi, finishlist)) + abi_args = () + if self.abi == "__stdcall": + if not self.ellipsis: # __stdcall ignored for variadic funcs + try: + abi_args = (ffi._backend.FFI_STDCALL,) + except AttributeError: + pass + return global_cache(self, ffi, 'new_function_type', + tuple(args), result, self.ellipsis, *abi_args) + + def as_raw_function(self): + return RawFunctionType(self.args, self.result, self.ellipsis, self.abi) + + +class PointerType(BaseType): + _attrs_ = ('totype', 'quals') + + def __init__(self, totype, quals=0): + self.totype = totype + self.quals = quals + extra = qualify(quals, " *&") + if totype.is_array_type: + extra = "(%s)" % (extra.lstrip(),) + self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra) + + def build_backend_type(self, ffi, finishlist): + BItem = self.totype.get_cached_btype(ffi, finishlist, can_delay=True) + return global_cache(self, ffi, 'new_pointer_type', BItem) + +voidp_type = PointerType(void_type) + +def ConstPointerType(totype): + return PointerType(totype, Q_CONST) + +const_voidp_type = ConstPointerType(void_type) + + +class NamedPointerType(PointerType): + _attrs_ = ('totype', 'name') + + def __init__(self, totype, name, quals=0): + PointerType.__init__(self, totype, quals) + self.name = name + self.c_name_with_marker = name + '&' + + +class ArrayType(BaseType): + _attrs_ = ('item', 'length') + is_array_type = True + + def __init__(self, item, length): + self.item = item + self.length = length + # + if length is None: + brackets = '&[]' + elif length == '...': + brackets = '&[/*...*/]' + else: + brackets = '&[%s]' % length + self.c_name_with_marker = ( + self.item.c_name_with_marker.replace('&', brackets)) + + def resolve_length(self, newlength): + return ArrayType(self.item, newlength) + + def build_backend_type(self, ffi, finishlist): + if self.length == '...': + from . import api + raise api.CDefError("cannot render the type %r: unknown length" % + (self,)) + self.item.get_cached_btype(ffi, finishlist) # force the item BType + BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist) + return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length) + +char_array_type = ArrayType(PrimitiveType('char'), None) + + +class StructOrUnionOrEnum(BaseTypeByIdentity): + _attrs_ = ('name',) + forcename = None + + def build_c_name_with_marker(self): + name = self.forcename or '%s %s' % (self.kind, self.name) + self.c_name_with_marker = name + '&' + + def force_the_name(self, forcename): + self.forcename = forcename + self.build_c_name_with_marker() + + def get_official_name(self): + assert self.c_name_with_marker.endswith('&') + return self.c_name_with_marker[:-1] + + +class StructOrUnion(StructOrUnionOrEnum): + fixedlayout = None + completed = 0 + partial = False + packed = False + + def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None): + self.name = name + self.fldnames = fldnames + self.fldtypes = fldtypes + self.fldbitsize = fldbitsize + self.fldquals = fldquals + self.build_c_name_with_marker() + + def has_anonymous_struct_fields(self): + if self.fldtypes is None: + return False + for name, type in zip(self.fldnames, self.fldtypes): + if name == '' and isinstance(type, StructOrUnion): + return True + return False + + def enumfields(self): + fldquals = self.fldquals + if fldquals is None: + fldquals = (0,) * len(self.fldnames) + for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes, + self.fldbitsize, fldquals): + if name == '' and isinstance(type, StructOrUnion): + # nested anonymous struct/union + for result in type.enumfields(): + yield result + else: + yield (name, type, bitsize, quals) + + def force_flatten(self): + # force the struct or union to have a declaration that lists + # directly all fields returned by enumfields(), flattening + # nested anonymous structs/unions. + names = [] + types = [] + bitsizes = [] + fldquals = [] + for name, type, bitsize, quals in self.enumfields(): + names.append(name) + types.append(type) + bitsizes.append(bitsize) + fldquals.append(quals) + self.fldnames = tuple(names) + self.fldtypes = tuple(types) + self.fldbitsize = tuple(bitsizes) + self.fldquals = tuple(fldquals) + + def get_cached_btype(self, ffi, finishlist, can_delay=False): + BType = StructOrUnionOrEnum.get_cached_btype(self, ffi, finishlist, + can_delay) + if not can_delay: + self.finish_backend_type(ffi, finishlist) + return BType + + def finish_backend_type(self, ffi, finishlist): + if self.completed: + if self.completed != 2: + raise NotImplementedError("recursive structure declaration " + "for '%s'" % (self.name,)) + return + BType = ffi._cached_btypes[self] + # + self.completed = 1 + # + if self.fldtypes is None: + pass # not completing it: it's an opaque struct + # + elif self.fixedlayout is None: + fldtypes = [tp.get_cached_btype(ffi, finishlist) + for tp in self.fldtypes] + lst = list(zip(self.fldnames, fldtypes, self.fldbitsize)) + sflags = 0 + if self.packed: + sflags = 8 # SF_PACKED + ffi._backend.complete_struct_or_union(BType, lst, self, + -1, -1, sflags) + # + else: + fldtypes = [] + fieldofs, fieldsize, totalsize, totalalignment = self.fixedlayout + for i in range(len(self.fldnames)): + fsize = fieldsize[i] + ftype = self.fldtypes[i] + # + if isinstance(ftype, ArrayType) and ftype.length == '...': + # fix the length to match the total size + BItemType = ftype.item.get_cached_btype(ffi, finishlist) + nlen, nrest = divmod(fsize, ffi.sizeof(BItemType)) + if nrest != 0: + self._verification_error( + "field '%s.%s' has a bogus size?" % ( + self.name, self.fldnames[i] or '{}')) + ftype = ftype.resolve_length(nlen) + self.fldtypes = (self.fldtypes[:i] + (ftype,) + + self.fldtypes[i+1:]) + # + BFieldType = ftype.get_cached_btype(ffi, finishlist) + if isinstance(ftype, ArrayType) and ftype.length is None: + assert fsize == 0 + else: + bitemsize = ffi.sizeof(BFieldType) + if bitemsize != fsize: + self._verification_error( + "field '%s.%s' is declared as %d bytes, but is " + "really %d bytes" % (self.name, + self.fldnames[i] or '{}', + bitemsize, fsize)) + fldtypes.append(BFieldType) + # + lst = list(zip(self.fldnames, fldtypes, self.fldbitsize, fieldofs)) + ffi._backend.complete_struct_or_union(BType, lst, self, + totalsize, totalalignment) + self.completed = 2 + + def _verification_error(self, msg): + from .ffiplatform import VerificationError + raise VerificationError(msg) + + def check_not_partial(self): + if self.partial and self.fixedlayout is None: + from . import ffiplatform + raise ffiplatform.VerificationMissing(self._get_c_name()) + + def build_backend_type(self, ffi, finishlist): + self.check_not_partial() + finishlist.append(self) + # + return global_cache(self, ffi, 'new_%s_type' % self.kind, + self.get_official_name(), key=self) + + +class StructType(StructOrUnion): + kind = 'struct' + + +class UnionType(StructOrUnion): + kind = 'union' + + +class EnumType(StructOrUnionOrEnum): + kind = 'enum' + partial = False + partial_resolved = False + + def __init__(self, name, enumerators, enumvalues, baseinttype=None): + self.name = name + self.enumerators = enumerators + self.enumvalues = enumvalues + self.baseinttype = baseinttype + self.build_c_name_with_marker() + + def force_the_name(self, forcename): + StructOrUnionOrEnum.force_the_name(self, forcename) + if self.forcename is None: + name = self.get_official_name() + self.forcename = '$' + name.replace(' ', '_') + + def check_not_partial(self): + if self.partial and not self.partial_resolved: + from . import ffiplatform + raise ffiplatform.VerificationMissing(self._get_c_name()) + + def build_backend_type(self, ffi, finishlist): + self.check_not_partial() + base_btype = self.build_baseinttype(ffi, finishlist) + return global_cache(self, ffi, 'new_enum_type', + self.get_official_name(), + self.enumerators, self.enumvalues, + base_btype, key=self) + + def build_baseinttype(self, ffi, finishlist): + if self.baseinttype is not None: + return self.baseinttype.get_cached_btype(ffi, finishlist) + # + from . import api + if self.enumvalues: + smallest_value = min(self.enumvalues) + largest_value = max(self.enumvalues) + else: + import warnings + warnings.warn("%r has no values explicitly defined; next version " + "will refuse to guess which integer type it is " + "meant to be (unsigned/signed, int/long)" + % self._get_c_name()) + smallest_value = largest_value = 0 + if smallest_value < 0: # needs a signed type + sign = 1 + candidate1 = PrimitiveType("int") + candidate2 = PrimitiveType("long") + else: + sign = 0 + candidate1 = PrimitiveType("unsigned int") + candidate2 = PrimitiveType("unsigned long") + btype1 = candidate1.get_cached_btype(ffi, finishlist) + btype2 = candidate2.get_cached_btype(ffi, finishlist) + size1 = ffi.sizeof(btype1) + size2 = ffi.sizeof(btype2) + if (smallest_value >= ((-1) << (8*size1-1)) and + largest_value < (1 << (8*size1-sign))): + return btype1 + if (smallest_value >= ((-1) << (8*size2-1)) and + largest_value < (1 << (8*size2-sign))): + return btype2 + raise api.CDefError("%s values don't all fit into either 'long' " + "or 'unsigned long'" % self._get_c_name()) + +def unknown_type(name, structname=None): + if structname is None: + structname = '$%s' % name + tp = StructType(structname, None, None, None) + tp.force_the_name(name) + tp.origin = "unknown_type" + return tp + +def unknown_ptr_type(name, structname=None): + if structname is None: + structname = '$$%s' % name + tp = StructType(structname, None, None, None) + return NamedPointerType(tp, name) + + +global_lock = allocate_lock() + +def global_cache(srctype, ffi, funcname, *args, **kwds): + key = kwds.pop('key', (funcname, args)) + assert not kwds + try: + return ffi._backend.__typecache[key] + except KeyError: + pass + except AttributeError: + # initialize the __typecache attribute, either at the module level + # if ffi._backend is a module, or at the class level if ffi._backend + # is some instance. + if isinstance(ffi._backend, types.ModuleType): + ffi._backend.__typecache = weakref.WeakValueDictionary() + else: + type(ffi._backend).__typecache = weakref.WeakValueDictionary() + try: + res = getattr(ffi._backend, funcname)(*args) + except NotImplementedError as e: + raise NotImplementedError("%s: %r: %s" % (funcname, srctype, e)) + # note that setdefault() on WeakValueDictionary is not atomic + # and contains a rare bug (http://bugs.python.org/issue19542); + # we have to use a lock and do it ourselves + cache = ffi._backend.__typecache + with global_lock: + res1 = cache.get(key) + if res1 is None: + cache[key] = res + return res + else: + return res1 + +def pointer_cache(ffi, BType): + return global_cache('?', ffi, 'new_pointer_type', BType) + +def attach_exception_info(e, name): + if e.args and type(e.args[0]) is str: + e.args = ('%s: %s' % (name, e.args[0]),) + e.args[1:] diff --git a/lib/python3.4/site-packages/cffi/parse_c_type.h b/lib/python3.4/site-packages/cffi/parse_c_type.h new file mode 100644 index 0000000..a01d89e --- /dev/null +++ b/lib/python3.4/site-packages/cffi/parse_c_type.h @@ -0,0 +1,177 @@ + +/* This part is from file 'cffi/parse_c_type.h'. It is copied at the + beginning of C sources generated by CFFI's ffi.set_source(). */ + +typedef void *_cffi_opcode_t; + +#define _CFFI_OP(opcode, arg) (_cffi_opcode_t)(opcode | (((uintptr_t)(arg)) << 8)) +#define _CFFI_GETOP(cffi_opcode) ((unsigned char)(uintptr_t)cffi_opcode) +#define _CFFI_GETARG(cffi_opcode) (((intptr_t)cffi_opcode) >> 8) + +#define _CFFI_OP_PRIMITIVE 1 +#define _CFFI_OP_POINTER 3 +#define _CFFI_OP_ARRAY 5 +#define _CFFI_OP_OPEN_ARRAY 7 +#define _CFFI_OP_STRUCT_UNION 9 +#define _CFFI_OP_ENUM 11 +#define _CFFI_OP_FUNCTION 13 +#define _CFFI_OP_FUNCTION_END 15 +#define _CFFI_OP_NOOP 17 +#define _CFFI_OP_BITFIELD 19 +#define _CFFI_OP_TYPENAME 21 +#define _CFFI_OP_CPYTHON_BLTN_V 23 // varargs +#define _CFFI_OP_CPYTHON_BLTN_N 25 // noargs +#define _CFFI_OP_CPYTHON_BLTN_O 27 // O (i.e. a single arg) +#define _CFFI_OP_CONSTANT 29 +#define _CFFI_OP_CONSTANT_INT 31 +#define _CFFI_OP_GLOBAL_VAR 33 +#define _CFFI_OP_DLOPEN_FUNC 35 +#define _CFFI_OP_DLOPEN_CONST 37 +#define _CFFI_OP_GLOBAL_VAR_F 39 +#define _CFFI_OP_EXTERN_PYTHON 41 + +#define _CFFI_PRIM_VOID 0 +#define _CFFI_PRIM_BOOL 1 +#define _CFFI_PRIM_CHAR 2 +#define _CFFI_PRIM_SCHAR 3 +#define _CFFI_PRIM_UCHAR 4 +#define _CFFI_PRIM_SHORT 5 +#define _CFFI_PRIM_USHORT 6 +#define _CFFI_PRIM_INT 7 +#define _CFFI_PRIM_UINT 8 +#define _CFFI_PRIM_LONG 9 +#define _CFFI_PRIM_ULONG 10 +#define _CFFI_PRIM_LONGLONG 11 +#define _CFFI_PRIM_ULONGLONG 12 +#define _CFFI_PRIM_FLOAT 13 +#define _CFFI_PRIM_DOUBLE 14 +#define _CFFI_PRIM_LONGDOUBLE 15 + +#define _CFFI_PRIM_WCHAR 16 +#define _CFFI_PRIM_INT8 17 +#define _CFFI_PRIM_UINT8 18 +#define _CFFI_PRIM_INT16 19 +#define _CFFI_PRIM_UINT16 20 +#define _CFFI_PRIM_INT32 21 +#define _CFFI_PRIM_UINT32 22 +#define _CFFI_PRIM_INT64 23 +#define _CFFI_PRIM_UINT64 24 +#define _CFFI_PRIM_INTPTR 25 +#define _CFFI_PRIM_UINTPTR 26 +#define _CFFI_PRIM_PTRDIFF 27 +#define _CFFI_PRIM_SIZE 28 +#define _CFFI_PRIM_SSIZE 29 +#define _CFFI_PRIM_INT_LEAST8 30 +#define _CFFI_PRIM_UINT_LEAST8 31 +#define _CFFI_PRIM_INT_LEAST16 32 +#define _CFFI_PRIM_UINT_LEAST16 33 +#define _CFFI_PRIM_INT_LEAST32 34 +#define _CFFI_PRIM_UINT_LEAST32 35 +#define _CFFI_PRIM_INT_LEAST64 36 +#define _CFFI_PRIM_UINT_LEAST64 37 +#define _CFFI_PRIM_INT_FAST8 38 +#define _CFFI_PRIM_UINT_FAST8 39 +#define _CFFI_PRIM_INT_FAST16 40 +#define _CFFI_PRIM_UINT_FAST16 41 +#define _CFFI_PRIM_INT_FAST32 42 +#define _CFFI_PRIM_UINT_FAST32 43 +#define _CFFI_PRIM_INT_FAST64 44 +#define _CFFI_PRIM_UINT_FAST64 45 +#define _CFFI_PRIM_INTMAX 46 +#define _CFFI_PRIM_UINTMAX 47 + +#define _CFFI__NUM_PRIM 48 +#define _CFFI__UNKNOWN_PRIM (-1) +#define _CFFI__UNKNOWN_FLOAT_PRIM (-2) +#define _CFFI__UNKNOWN_LONG_DOUBLE (-3) + +#define _CFFI__IO_FILE_STRUCT (-1) + + +struct _cffi_global_s { + const char *name; + void *address; + _cffi_opcode_t type_op; + void *size_or_direct_fn; // OP_GLOBAL_VAR: size, or 0 if unknown + // OP_CPYTHON_BLTN_*: addr of direct function +}; + +struct _cffi_getconst_s { + unsigned long long value; + const struct _cffi_type_context_s *ctx; + int gindex; +}; + +struct _cffi_struct_union_s { + const char *name; + int type_index; // -> _cffi_types, on a OP_STRUCT_UNION + int flags; // _CFFI_F_* flags below + size_t size; + int alignment; + int first_field_index; // -> _cffi_fields array + int num_fields; +}; +#define _CFFI_F_UNION 0x01 // is a union, not a struct +#define _CFFI_F_CHECK_FIELDS 0x02 // complain if fields are not in the + // "standard layout" or if some are missing +#define _CFFI_F_PACKED 0x04 // for CHECK_FIELDS, assume a packed struct +#define _CFFI_F_EXTERNAL 0x08 // in some other ffi.include() +#define _CFFI_F_OPAQUE 0x10 // opaque + +struct _cffi_field_s { + const char *name; + size_t field_offset; + size_t field_size; + _cffi_opcode_t field_type_op; +}; + +struct _cffi_enum_s { + const char *name; + int type_index; // -> _cffi_types, on a OP_ENUM + int type_prim; // _CFFI_PRIM_xxx + const char *enumerators; // comma-delimited string +}; + +struct _cffi_typename_s { + const char *name; + int type_index; /* if opaque, points to a possibly artificial + OP_STRUCT which is itself opaque */ +}; + +struct _cffi_type_context_s { + _cffi_opcode_t *types; + const struct _cffi_global_s *globals; + const struct _cffi_field_s *fields; + const struct _cffi_struct_union_s *struct_unions; + const struct _cffi_enum_s *enums; + const struct _cffi_typename_s *typenames; + int num_globals; + int num_struct_unions; + int num_enums; + int num_typenames; + const char *const *includes; + int num_types; + int flags; /* future extension */ +}; + +struct _cffi_parse_info_s { + const struct _cffi_type_context_s *ctx; + _cffi_opcode_t *output; + unsigned int output_size; + size_t error_location; + const char *error_message; +}; + +struct _cffi_externpy_s { + const char *name; + size_t size_of_result; + void *reserved1, *reserved2; +}; + +#ifdef _CFFI_INTERNAL +static int parse_c_type(struct _cffi_parse_info_s *info, const char *input); +static int search_in_globals(const struct _cffi_type_context_s *ctx, + const char *search, size_t search_len); +static int search_in_struct_unions(const struct _cffi_type_context_s *ctx, + const char *search, size_t search_len); +#endif diff --git a/lib/python3.4/site-packages/cffi/recompiler.py b/lib/python3.4/site-packages/cffi/recompiler.py new file mode 100644 index 0000000..b83fade --- /dev/null +++ b/lib/python3.4/site-packages/cffi/recompiler.py @@ -0,0 +1,1501 @@ +import os, sys, io +from . import ffiplatform, model +from .cffi_opcode import * + +VERSION = "0x2601" +VERSION_EMBEDDED = "0x2701" + + +class GlobalExpr: + def __init__(self, name, address, type_op, size=0, check_value=0): + self.name = name + self.address = address + self.type_op = type_op + self.size = size + self.check_value = check_value + + def as_c_expr(self): + return ' { "%s", (void *)%s, %s, (void *)%s },' % ( + self.name, self.address, self.type_op.as_c_expr(), self.size) + + def as_python_expr(self): + return "b'%s%s',%d" % (self.type_op.as_python_bytes(), self.name, + self.check_value) + +class FieldExpr: + def __init__(self, name, field_offset, field_size, fbitsize, field_type_op): + self.name = name + self.field_offset = field_offset + self.field_size = field_size + self.fbitsize = fbitsize + self.field_type_op = field_type_op + + def as_c_expr(self): + spaces = " " * len(self.name) + return (' { "%s", %s,\n' % (self.name, self.field_offset) + + ' %s %s,\n' % (spaces, self.field_size) + + ' %s %s },' % (spaces, self.field_type_op.as_c_expr())) + + def as_python_expr(self): + raise NotImplementedError + + def as_field_python_expr(self): + if self.field_type_op.op == OP_NOOP: + size_expr = '' + elif self.field_type_op.op == OP_BITFIELD: + size_expr = format_four_bytes(self.fbitsize) + else: + raise NotImplementedError + return "b'%s%s%s'" % (self.field_type_op.as_python_bytes(), + size_expr, + self.name) + +class StructUnionExpr: + def __init__(self, name, type_index, flags, size, alignment, comment, + first_field_index, c_fields): + self.name = name + self.type_index = type_index + self.flags = flags + self.size = size + self.alignment = alignment + self.comment = comment + self.first_field_index = first_field_index + self.c_fields = c_fields + + def as_c_expr(self): + return (' { "%s", %d, %s,' % (self.name, self.type_index, self.flags) + + '\n %s, %s, ' % (self.size, self.alignment) + + '%d, %d ' % (self.first_field_index, len(self.c_fields)) + + ('/* %s */ ' % self.comment if self.comment else '') + + '},') + + def as_python_expr(self): + flags = eval(self.flags, G_FLAGS) + fields_expr = [c_field.as_field_python_expr() + for c_field in self.c_fields] + return "(b'%s%s%s',%s)" % ( + format_four_bytes(self.type_index), + format_four_bytes(flags), + self.name, + ','.join(fields_expr)) + +class EnumExpr: + def __init__(self, name, type_index, size, signed, allenums): + self.name = name + self.type_index = type_index + self.size = size + self.signed = signed + self.allenums = allenums + + def as_c_expr(self): + return (' { "%s", %d, _cffi_prim_int(%s, %s),\n' + ' "%s" },' % (self.name, self.type_index, + self.size, self.signed, self.allenums)) + + def as_python_expr(self): + prim_index = { + (1, 0): PRIM_UINT8, (1, 1): PRIM_INT8, + (2, 0): PRIM_UINT16, (2, 1): PRIM_INT16, + (4, 0): PRIM_UINT32, (4, 1): PRIM_INT32, + (8, 0): PRIM_UINT64, (8, 1): PRIM_INT64, + }[self.size, self.signed] + return "b'%s%s%s\\x00%s'" % (format_four_bytes(self.type_index), + format_four_bytes(prim_index), + self.name, self.allenums) + +class TypenameExpr: + def __init__(self, name, type_index): + self.name = name + self.type_index = type_index + + def as_c_expr(self): + return ' { "%s", %d },' % (self.name, self.type_index) + + def as_python_expr(self): + return "b'%s%s'" % (format_four_bytes(self.type_index), self.name) + + +# ____________________________________________________________ + + +class Recompiler: + _num_externpy = 0 + + def __init__(self, ffi, module_name, target_is_python=False): + self.ffi = ffi + self.module_name = module_name + self.target_is_python = target_is_python + + def collect_type_table(self): + self._typesdict = {} + self._generate("collecttype") + # + all_decls = sorted(self._typesdict, key=str) + # + # prepare all FUNCTION bytecode sequences first + self.cffi_types = [] + for tp in all_decls: + if tp.is_raw_function: + assert self._typesdict[tp] is None + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + for tp1 in tp.args: + assert isinstance(tp1, (model.VoidType, + model.BasePrimitiveType, + model.PointerType, + model.StructOrUnionOrEnum, + model.FunctionPtrType)) + if self._typesdict[tp1] is None: + self._typesdict[tp1] = len(self.cffi_types) + self.cffi_types.append(tp1) # placeholder + self.cffi_types.append('END') # placeholder + # + # prepare all OTHER bytecode sequences + for tp in all_decls: + if not tp.is_raw_function and self._typesdict[tp] is None: + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + if tp.is_array_type and tp.length is not None: + self.cffi_types.append('LEN') # placeholder + assert None not in self._typesdict.values() + # + # collect all structs and unions and enums + self._struct_unions = {} + self._enums = {} + for tp in all_decls: + if isinstance(tp, model.StructOrUnion): + self._struct_unions[tp] = None + elif isinstance(tp, model.EnumType): + self._enums[tp] = None + for i, tp in enumerate(sorted(self._struct_unions, + key=lambda tp: tp.name)): + self._struct_unions[tp] = i + for i, tp in enumerate(sorted(self._enums, + key=lambda tp: tp.name)): + self._enums[tp] = i + # + # emit all bytecode sequences now + for tp in all_decls: + method = getattr(self, '_emit_bytecode_' + tp.__class__.__name__) + method(tp, self._typesdict[tp]) + # + # consistency check + for op in self.cffi_types: + assert isinstance(op, CffiOp) + self.cffi_types = tuple(self.cffi_types) # don't change any more + + def _do_collect_type(self, tp): + if not isinstance(tp, model.BaseTypeByIdentity): + if isinstance(tp, tuple): + for x in tp: + self._do_collect_type(x) + return + if tp not in self._typesdict: + self._typesdict[tp] = None + if isinstance(tp, model.FunctionPtrType): + self._do_collect_type(tp.as_raw_function()) + elif isinstance(tp, model.StructOrUnion): + if tp.fldtypes is not None and ( + tp not in self.ffi._parser._included_declarations): + for name1, tp1, _, _ in tp.enumfields(): + self._do_collect_type(self._field_type(tp, name1, tp1)) + else: + for _, x in tp._get_items(): + self._do_collect_type(x) + + def _generate(self, step_name): + lst = self.ffi._parser._declarations.items() + for name, (tp, quals) in sorted(lst): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise ffiplatform.VerificationError( + "not implemented in recompile(): %r" % name) + try: + self._current_quals = quals + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + # ---------- + + ALL_STEPS = ["global", "field", "struct_union", "enum", "typename"] + + def collect_step_tables(self): + # collect the declarations for '_cffi_globals', '_cffi_typenames', etc. + self._lsts = {} + for step_name in self.ALL_STEPS: + self._lsts[step_name] = [] + self._seen_struct_unions = set() + self._generate("ctx") + self._add_missing_struct_unions() + # + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if step_name != "field": + lst.sort(key=lambda entry: entry.name) + self._lsts[step_name] = tuple(lst) # don't change any more + # + # check for a possible internal inconsistency: _cffi_struct_unions + # should have been generated with exactly self._struct_unions + lst = self._lsts["struct_union"] + for tp, i in self._struct_unions.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._struct_unions) + # same with enums + lst = self._lsts["enum"] + for tp, i in self._enums.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._enums) + + # ---------- + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self, f, preamble): + if self.target_is_python: + assert preamble is None + self.write_py_source_to_f(f) + else: + assert preamble is not None + self.write_c_source_to_f(f, preamble) + + def _rel_readlines(self, filename): + g = open(os.path.join(os.path.dirname(__file__), filename), 'r') + lines = g.readlines() + g.close() + return lines + + def write_c_source_to_f(self, f, preamble): + self._f = f + prnt = self._prnt + # + # first the '#include' (actually done by inlining the file's content) + lines = self._rel_readlines('_cffi_include.h') + i = lines.index('#include "parse_c_type.h"\n') + lines[i:i+1] = self._rel_readlines('parse_c_type.h') + prnt(''.join(lines)) + # + # if we have ffi._embedding != None, we give it here as a macro + # and include an extra file + base_module_name = self.module_name.split('.')[-1] + if self.ffi._embedding is not None: + prnt('#define _CFFI_MODULE_NAME "%s"' % (self.module_name,)) + prnt('#define _CFFI_PYTHON_STARTUP_CODE %s' % + (self._string_literal(self.ffi._embedding),)) + prnt('#ifdef PYPY_VERSION') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC _cffi_pypyinit_%s' % ( + base_module_name,)) + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC PyInit_%s' % ( + base_module_name,)) + prnt('#else') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC init%s' % ( + base_module_name,)) + prnt('#endif') + lines = self._rel_readlines('_embedding.h') + prnt(''.join(lines)) + version = VERSION_EMBEDDED + else: + version = VERSION + # + # then paste the C source given by the user, verbatim. + prnt('/************************************************************/') + prnt() + prnt(preamble) + prnt() + prnt('/************************************************************/') + prnt() + # + # the declaration of '_cffi_types' + prnt('static void *_cffi_types[] = {') + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + for i, op in enumerate(self.cffi_types): + comment = '' + if i in typeindex2type: + comment = ' // ' + typeindex2type[i]._get_c_name() + prnt('/* %2d */ %s,%s' % (i, op.as_c_expr(), comment)) + if not self.cffi_types: + prnt(' 0') + prnt('};') + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._seen_constants = set() + self._generate("decl") + # + # the declaration of '_cffi_globals' and '_cffi_typenames' + nums = {} + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + nums[step_name] = len(lst) + if nums[step_name] > 0: + prnt('static const struct _cffi_%s_s _cffi_%ss[] = {' % ( + step_name, step_name)) + for entry in lst: + prnt(entry.as_c_expr()) + prnt('};') + prnt() + # + # the declaration of '_cffi_includes' + if self.ffi._included_ffis: + prnt('static const char * const _cffi_includes[] = {') + for ffi_to_include in self.ffi._included_ffis: + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise ffiplatform.VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is None: + raise ffiplatform.VerificationError( + "not implemented yet: ffi.include() of a Python-based " + "ffi inside a C-based ffi") + prnt(' "%s",' % (included_module_name,)) + prnt(' NULL') + prnt('};') + prnt() + # + # the declaration of '_cffi_type_context' + prnt('static const struct _cffi_type_context_s _cffi_type_context = {') + prnt(' _cffi_types,') + for step_name in self.ALL_STEPS: + if nums[step_name] > 0: + prnt(' _cffi_%ss,' % step_name) + else: + prnt(' NULL, /* no %ss */' % step_name) + for step_name in self.ALL_STEPS: + if step_name != "field": + prnt(' %d, /* num_%ss */' % (nums[step_name], step_name)) + if self.ffi._included_ffis: + prnt(' _cffi_includes,') + else: + prnt(' NULL, /* no includes */') + prnt(' %d, /* num_types */' % (len(self.cffi_types),)) + flags = 0 + if self._num_externpy: + flags |= 1 # set to mean that we use extern "Python" + prnt(' %d, /* flags */' % flags) + prnt('};') + prnt() + # + # the init function + prnt('#ifdef PYPY_VERSION') + prnt('PyMODINIT_FUNC') + prnt('_cffi_pypyinit_%s(const void *p[])' % (base_module_name,)) + prnt('{') + if self._num_externpy: + prnt(' if (((intptr_t)p[0]) >= 0x0A03) {') + prnt(' _cffi_call_python_org = ' + '(void(*)(struct _cffi_externpy_s *, char *))p[1];') + prnt(' }') + prnt(' p[0] = (const void *)%s;' % version) + prnt(' p[1] = &_cffi_type_context;') + prnt('}') + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + prnt('# ifdef _MSC_VER') + prnt(' PyMODINIT_FUNC') + prnt('# if PY_MAJOR_VERSION >= 3') + prnt(' PyInit_%s(void) { return NULL; }' % (base_module_name,)) + prnt('# else') + prnt(' init%s(void) { }' % (base_module_name,)) + prnt('# endif') + prnt('# endif') + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('PyMODINIT_FUNC') + prnt('PyInit_%s(void)' % (base_module_name,)) + prnt('{') + prnt(' return _cffi_init("%s", %s, &_cffi_type_context);' % ( + self.module_name, version)) + prnt('}') + prnt('#else') + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % (base_module_name,)) + prnt('{') + prnt(' _cffi_init("%s", %s, &_cffi_type_context);' % ( + self.module_name, version)) + prnt('}') + prnt('#endif') + + def _to_py(self, x): + if isinstance(x, str): + return "b'%s'" % (x,) + if isinstance(x, (list, tuple)): + rep = [self._to_py(item) for item in x] + if len(rep) == 1: + rep.append('') + return "(%s)" % (','.join(rep),) + return x.as_python_expr() # Py2: unicode unexpected; Py3: bytes unexp. + + def write_py_source_to_f(self, f): + self._f = f + prnt = self._prnt + # + # header + prnt("# auto-generated file") + prnt("import _cffi_backend") + # + # the 'import' of the included ffis + num_includes = len(self.ffi._included_ffis or ()) + for i in range(num_includes): + ffi_to_include = self.ffi._included_ffis[i] + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise ffiplatform.VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is not None: + raise ffiplatform.VerificationError( + "not implemented yet: ffi.include() of a C-based " + "ffi inside a Python-based ffi") + prnt('from %s import ffi as _ffi%d' % (included_module_name, i)) + prnt() + prnt("ffi = _cffi_backend.FFI('%s'," % (self.module_name,)) + prnt(" _version = %s," % (VERSION,)) + # + # the '_types' keyword argument + self.cffi_types = tuple(self.cffi_types) # don't change any more + types_lst = [op.as_python_bytes() for op in self.cffi_types] + prnt(' _types = %s,' % (self._to_py(''.join(types_lst)),)) + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + # + # the keyword arguments from ALL_STEPS + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if len(lst) > 0 and step_name != "field": + prnt(' _%ss = %s,' % (step_name, self._to_py(lst))) + # + # the '_includes' keyword argument + if num_includes > 0: + prnt(' _includes = (%s,),' % ( + ', '.join(['_ffi%d' % i for i in range(num_includes)]),)) + # + # the footer + prnt(')') + + # ---------- + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.BasePrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + elif isinstance(tp, model.UnknownFloatType): + # don't check with is_float_type(): it may be a 'long + # double' here, and _cffi_to_c_double would loose precision + converter = '(%s)_cffi_to_c_double' % (tp.get_c_name(''),) + else: + converter = '(%s)_cffi_to_c_%s' % (tp.get_c_name(''), + tp.name.replace(' ', '_')) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif isinstance(tp, (model.StructOrUnion, model.EnumType)): + # a struct (not a struct pointer) as a function argument + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' if (datasize < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' %s = (%s)alloca((size_t)datasize);' % ( + tovar, tp.get_c_name(''))) + self._prnt(' memset((void *)%s, 0, (size_t)datasize);' % (tovar,)) + self._prnt(' if (_cffi_convert_array_from_object(' + '(char *)%s, _cffi_type(%d), %s) < 0)' % ( + tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.BasePrimitiveType): + if tp.is_integer_type(): + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif isinstance(tp, model.UnknownFloatType): + return '_cffi_from_c_double(%s)' % (var,) + elif tp.name != 'long double': + return '_cffi_from_c_%s(%s)' % (tp.name.replace(' ', '_'), var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructType): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs + + def _generate_cpy_typedef_collecttype(self, tp, name): + self._do_collect_type(tp) + + def _generate_cpy_typedef_decl(self, tp, name): + pass + + def _typedef_ctx(self, tp, name): + type_index = self._typesdict[tp] + self._lsts["typename"].append(TypenameExpr(name, type_index)) + + def _generate_cpy_typedef_ctx(self, tp, name): + self._typedef_ctx(tp, name) + if getattr(tp, "origin", None) == "unknown_type": + self._struct_ctx(tp, tp.name, approxname=None) + elif isinstance(tp, model.NamedPointerType): + self._struct_ctx(tp.totype, tp.totype.name, approxname=tp.name, + named_ptr=tp) + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + self._do_collect_type(tp.as_raw_function()) + if tp.ellipsis and not self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_function_decl(self, tp, name): + assert not self.target_is_python + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_constant_decl(tp, name) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'noarg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + # + # ------------------------------ + # the 'd' version of the function, only for addressof(lib, 'func') + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arguments.append(type.get_c_name(' x%d' % i, context)) + call_arguments.append('x%d' % i) + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + name_and_arguments = '%s_cffi_d_%s(%s)' % (abi, name, repr_arguments) + prnt('static %s' % (tp.result.get_c_name(name_and_arguments),)) + prnt('{') + call_arguments = ', '.join(call_arguments) + result_code = 'return ' + if isinstance(tp.result, model.VoidType): + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, call_arguments)) + prnt('}') + # + prnt('#ifndef PYPY_VERSION') # ------------------------------ + # + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' x%d' % i, context) + prnt(' %s;' % arg) + # + localvars = set() + for type in tp.args: + self._extra_local_variables(type, localvars) + for decl in localvars: + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + result_decl = ' %s;' % tp.result.get_c_name(' result', context) + prnt(result_decl) + else: + result_decl = None + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt(' PyObject **aa;') + prnt() + prnt(' aa = _cffi_unpack_args(args, %d, "%s");' % (len(rng), name)) + prnt(' if (aa == NULL)') + prnt(' return NULL;') + for i in rng: + prnt(' arg%d = aa[%d];' % (i, i)) + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + call_arguments = ['x%d' % i for i in range(len(tp.args))] + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') + if result_code: + prnt(' return %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + else: + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + # + prnt('#else') # ------------------------------ + # + # the PyPy version: need to replace struct/union arguments with + # pointers, and if the result is a struct/union, insert a first + # arg that is a pointer to the result. + difference = False + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + indirection = '' + if isinstance(type, model.StructOrUnion): + indirection = '*' + difference = True + arg = type.get_c_name(' %sx%d' % (indirection, i), context) + arguments.append(arg) + call_arguments.append('%sx%d' % (indirection, i)) + tp_result = tp.result + if isinstance(tp_result, model.StructOrUnion): + context = 'result of %s' % name + arg = tp_result.get_c_name(' *result', context) + arguments.insert(0, arg) + tp_result = model.void_type + result_decl = None + result_code = '*result = ' + difference = True + if difference: + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s_cffi_f_%s(%s)' % (abi, name, + repr_arguments) + prnt('static %s' % (tp_result.get_c_name(name_and_arguments),)) + prnt('{') + if result_decl: + prnt(result_decl) + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + if result_decl: + prnt(' return result;') + prnt('}') + else: + prnt('# define _cffi_f_%s _cffi_d_%s' % (name, name)) + # + prnt('#endif') # ------------------------------ + prnt() + + def _generate_cpy_function_ctx(self, tp, name): + if tp.ellipsis and not self.target_is_python: + self._generate_cpy_constant_ctx(tp, name) + return + type_index = self._typesdict[tp.as_raw_function()] + numargs = len(tp.args) + if self.target_is_python: + meth_kind = OP_DLOPEN_FUNC + elif numargs == 0: + meth_kind = OP_CPYTHON_BLTN_N # 'METH_NOARGS' + elif numargs == 1: + meth_kind = OP_CPYTHON_BLTN_O # 'METH_O' + else: + meth_kind = OP_CPYTHON_BLTN_V # 'METH_VARARGS' + self._lsts["global"].append( + GlobalExpr(name, '_cffi_f_%s' % name, + CffiOp(meth_kind, type_index), + size='_cffi_d_%s' % name)) + + # ---------- + # named structs or unions + + def _field_type(self, tp_struct, field_name, tp_field): + if isinstance(tp_field, model.ArrayType): + actual_length = tp_field.length + if actual_length == '...': + ptr_struct_name = tp_struct.get_c_name('*') + actual_length = '_cffi_array_len(((%s)0)->%s)' % ( + ptr_struct_name, field_name) + tp_item = self._field_type(tp_struct, '%s[0]' % field_name, + tp_field.item) + tp_field = model.ArrayType(tp_item, actual_length) + return tp_field + + def _struct_collecttype(self, tp): + self._do_collect_type(tp) + + def _struct_decl(self, tp, cname, approxname): + if tp.fldtypes is None: + return + prnt = self._prnt + checkfuncname = '_cffi_checkfld_%s' % (approxname,) + prnt('_CFFI_UNUSED_FN') + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + try: + if ftype.is_integer_type() or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(" (void)((p->%s) | 0); /* check that '%s.%s' is " + "an integer */" % (fname, cname, fname)) + continue + # only accept exactly the type declared, except that '[]' + # is interpreted as a '*' and so will match any array length. + # (It would also match '*', but that's harder to detect...) + while (isinstance(ftype, model.ArrayType) + and (ftype.length is None or ftype.length == '...')): + ftype = ftype.item + fname = fname + '[0]' + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except ffiplatform.VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('struct _cffi_align_%s { char x; %s y; };' % (approxname, cname)) + prnt() + + def _struct_ctx(self, tp, cname, approxname, named_ptr=None): + type_index = self._typesdict[tp] + reason_for_not_expanding = None + flags = [] + if isinstance(tp, model.UnionType): + flags.append("_CFFI_F_UNION") + if tp.fldtypes is None: + flags.append("_CFFI_F_OPAQUE") + reason_for_not_expanding = "opaque" + if (tp not in self.ffi._parser._included_declarations and + (named_ptr is None or + named_ptr not in self.ffi._parser._included_declarations)): + if tp.fldtypes is None: + pass # opaque + elif tp.partial or tp.has_anonymous_struct_fields(): + pass # field layout obtained silently from the C compiler + else: + flags.append("_CFFI_F_CHECK_FIELDS") + if tp.packed: + flags.append("_CFFI_F_PACKED") + else: + flags.append("_CFFI_F_EXTERNAL") + reason_for_not_expanding = "external" + flags = '|'.join(flags) or '0' + c_fields = [] + if reason_for_not_expanding is None: + enumfields = list(tp.enumfields()) + for fldname, fldtype, fbitsize, fqual in enumfields: + fldtype = self._field_type(tp, fldname, fldtype) + # cname is None for _add_missing_struct_unions() only + op = OP_NOOP + if fbitsize >= 0: + op = OP_BITFIELD + size = '%d /* bits */' % fbitsize + elif cname is None or ( + isinstance(fldtype, model.ArrayType) and + fldtype.length is None): + size = '(size_t)-1' + else: + size = 'sizeof(((%s)0)->%s)' % ( + tp.get_c_name('*') if named_ptr is None + else named_ptr.name, + fldname) + if cname is None or fbitsize >= 0: + offset = '(size_t)-1' + elif named_ptr is not None: + offset = '((char *)&((%s)0)->%s) - (char *)0' % ( + named_ptr.name, fldname) + else: + offset = 'offsetof(%s, %s)' % (tp.get_c_name(''), fldname) + c_fields.append( + FieldExpr(fldname, offset, size, fbitsize, + CffiOp(op, self._typesdict[fldtype]))) + first_field_index = len(self._lsts["field"]) + self._lsts["field"].extend(c_fields) + # + if cname is None: # unknown name, for _add_missing_struct_unions + size = '(size_t)-2' + align = -2 + comment = "unnamed" + else: + if named_ptr is not None: + size = 'sizeof(*(%s)0)' % (named_ptr.name,) + align = '-1 /* unknown alignment */' + else: + size = 'sizeof(%s)' % (cname,) + align = 'offsetof(struct _cffi_align_%s, y)' % (approxname,) + comment = None + else: + size = '(size_t)-1' + align = -1 + first_field_index = -1 + comment = reason_for_not_expanding + self._lsts["struct_union"].append( + StructUnionExpr(tp.name, type_index, flags, size, align, comment, + first_field_index, c_fields)) + self._seen_struct_unions.add(tp) + + def _add_missing_struct_unions(self): + # not very nice, but some struct declarations might be missing + # because they don't have any known C name. Check that they are + # not partial (we can't complete or verify them!) and emit them + # anonymously. + lst = list(self._struct_unions.items()) + lst.sort(key=lambda tp_order: tp_order[1]) + for tp, order in lst: + if tp not in self._seen_struct_unions: + if tp.partial: + raise NotImplementedError("internal inconsistency: %r is " + "partial but was not seen at " + "this point" % (tp,)) + if tp.name.startswith('$') and tp.name[1:].isdigit(): + approxname = tp.name[1:] + elif tp.name == '_IO_FILE' and tp.forcename == 'FILE': + approxname = 'FILE' + self._typedef_ctx(tp, 'FILE') + else: + raise NotImplementedError("internal inconsistency: %r" % + (tp,)) + self._struct_ctx(tp, None, approxname) + + def _generate_cpy_struct_collecttype(self, tp, name): + self._struct_collecttype(tp) + _generate_cpy_union_collecttype = _generate_cpy_struct_collecttype + + def _struct_names(self, tp): + cname = tp.get_c_name('') + if ' ' in cname: + return cname, cname.replace(' ', '_') + else: + return cname, '_' + cname + + def _generate_cpy_struct_decl(self, tp, name): + self._struct_decl(tp, *self._struct_names(tp)) + _generate_cpy_union_decl = _generate_cpy_struct_decl + + def _generate_cpy_struct_ctx(self, tp, name): + self._struct_ctx(tp, *self._struct_names(tp)) + _generate_cpy_union_ctx = _generate_cpy_struct_ctx + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_cpy_anonymous_collecttype(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_collecttype(tp, name) + else: + self._struct_collecttype(tp) + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp) + else: + self._struct_decl(tp, name, 'typedef_' + name) + + def _generate_cpy_anonymous_ctx(self, tp, name): + if isinstance(tp, model.EnumType): + self._enum_ctx(tp, name) + else: + self._struct_ctx(tp, name, 'typedef_' + name) + + # ---------- + # constants, declared with "static const ..." + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + check_value=None): + if (category, name) in self._seen_constants: + raise ffiplatform.VerificationError( + "duplicate declaration of %s '%s'" % (category, name)) + self._seen_constants.add((category, name)) + # + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + if is_int: + prnt('static int %s(unsigned long long *o)' % funcname) + prnt('{') + prnt(' int n = (%s) <= 0;' % (name,)) + prnt(' *o = (unsigned long long)((%s) | 0);' + ' /* check that %s is an integer */' % (name, name)) + if check_value is not None: + if check_value > 0: + check_value = '%dU' % (check_value,) + prnt(' if (!_cffi_check_int(*o, n, %s))' % (check_value,)) + prnt(' n |= 2;') + prnt(' return n;') + prnt('}') + else: + assert check_value is None + prnt('static void %s(char *o)' % funcname) + prnt('{') + prnt(' *(%s)o = %s;' % (tp.get_c_name('*'), name)) + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = tp.is_integer_type() + if not is_int or self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + def _generate_cpy_constant_ctx(self, tp, name): + if not self.target_is_python and tp.is_integer_type(): + type_op = CffiOp(OP_CONSTANT_INT, -1) + else: + if self.target_is_python: + const_kind = OP_DLOPEN_CONST + else: + const_kind = OP_CONSTANT + type_index = self._typesdict[tp] + type_op = CffiOp(const_kind, type_index) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op)) + + # ---------- + # enums + + def _generate_cpy_enum_collecttype(self, tp, name): + self._do_collect_type(tp) + + def _generate_cpy_enum_decl(self, tp, name=None): + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator) + + def _enum_ctx(self, tp, cname): + type_index = self._typesdict[tp] + type_op = CffiOp(OP_ENUM, -1) + if self.target_is_python: + tp.check_not_partial() + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._lsts["global"].append( + GlobalExpr(enumerator, '_cffi_const_%s' % enumerator, type_op, + check_value=enumvalue)) + # + if cname is not None and '$' not in cname and not self.target_is_python: + size = "sizeof(%s)" % cname + signed = "((%s)-1) <= 0" % cname + else: + basetp = tp.build_baseinttype(self.ffi, []) + size = self.ffi.sizeof(basetp) + signed = int(int(self.ffi.cast(basetp, -1)) < 0) + allenums = ",".join(tp.enumerators) + self._lsts["enum"].append( + EnumExpr(tp.name, type_index, size, signed, allenums)) + + def _generate_cpy_enum_ctx(self, tp, name): + self._enum_ctx(tp, tp._get_c_name()) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_collecttype(self, tp, name): + pass + + def _generate_cpy_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) + + def _generate_cpy_macro_ctx(self, tp, name): + if tp == '...': + if self.target_is_python: + raise ffiplatform.VerificationError( + "cannot use the syntax '...' in '#define %s ...' when " + "using the ABI mode" % (name,)) + check_value = None + else: + check_value = tp # an integer + type_op = CffiOp(OP_CONSTANT_INT, -1) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op, + check_value=check_value)) + + # ---------- + # global variables + + def _global_type(self, tp, global_name): + if isinstance(tp, model.ArrayType): + actual_length = tp.length + if actual_length == '...': + actual_length = '_cffi_array_len(%s)' % (global_name,) + tp_item = self._global_type(tp.item, '%s[0]' % global_name) + tp = model.ArrayType(tp_item, actual_length) + return tp + + def _generate_cpy_variable_collecttype(self, tp, name): + self._do_collect_type(self._global_type(tp, name)) + + def _generate_cpy_variable_decl(self, tp, name): + prnt = self._prnt + tp = self._global_type(tp, name) + if isinstance(tp, model.ArrayType) and tp.length is None: + tp = tp.item + ampersand = '' + else: + ampersand = '&' + # This code assumes that casts from "tp *" to "void *" is a + # no-op, i.e. a function that returns a "tp *" can be called + # as if it returned a "void *". This should be generally true + # on any modern machine. The only exception to that rule (on + # uncommon architectures, and as far as I can tell) might be + # if 'tp' were a function type, but that is not possible here. + # (If 'tp' is a function _pointer_ type, then casts from "fn_t + # **" to "void *" are again no-ops, as far as I can tell.) + decl = '*_cffi_var_%s(void)' % (name,) + prnt('static ' + tp.get_c_name(decl, quals=self._current_quals)) + prnt('{') + prnt(' return %s(%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_cpy_variable_ctx(self, tp, name): + tp = self._global_type(tp, name) + type_index = self._typesdict[tp] + if self.target_is_python: + op = OP_GLOBAL_VAR + else: + op = OP_GLOBAL_VAR_F + self._lsts["global"].append( + GlobalExpr(name, '_cffi_var_%s' % name, CffiOp(op, type_index))) + + # ---------- + # extern "Python" + + def _generate_cpy_extern_python_collecttype(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + self._do_collect_type(tp) + _generate_cpy_dllexport_python_collecttype = \ + _generate_cpy_extern_python_plus_c_collecttype = \ + _generate_cpy_extern_python_collecttype + + def _extern_python_decl(self, tp, name, tag_and_space): + prnt = self._prnt + if isinstance(tp.result, model.VoidType): + size_of_result = '0' + else: + context = 'result of %s' % name + size_of_result = '(int)sizeof(%s)' % ( + tp.result.get_c_name('', context),) + prnt('static struct _cffi_externpy_s _cffi_externpy__%s =' % name) + prnt(' { "%s", %s };' % (name, size_of_result)) + prnt() + # + arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' a%d' % i, context) + arguments.append(arg) + # + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s(%s)' % (name, repr_arguments) + if tp.abi == "__stdcall": + name_and_arguments = '_cffi_stdcall ' + name_and_arguments + # + def may_need_128_bits(tp): + return (isinstance(tp, model.PrimitiveType) and + tp.name == 'long double') + # + size_of_a = max(len(tp.args)*8, 8) + if may_need_128_bits(tp.result): + size_of_a = max(size_of_a, 16) + if isinstance(tp.result, model.StructOrUnion): + size_of_a = 'sizeof(%s) > %d ? sizeof(%s) : %d' % ( + tp.result.get_c_name(''), size_of_a, + tp.result.get_c_name(''), size_of_a) + prnt('%s%s' % (tag_and_space, tp.result.get_c_name(name_and_arguments))) + prnt('{') + prnt(' char a[%s];' % size_of_a) + prnt(' char *p = a;') + for i, type in enumerate(tp.args): + arg = 'a%d' % i + if (isinstance(type, model.StructOrUnion) or + may_need_128_bits(type)): + arg = '&' + arg + type = model.PointerType(type) + prnt(' *(%s)(p + %d) = %s;' % (type.get_c_name('*'), i*8, arg)) + prnt(' _cffi_call_python(&_cffi_externpy__%s, p);' % name) + if not isinstance(tp.result, model.VoidType): + prnt(' return *(%s)p;' % (tp.result.get_c_name('*'),)) + prnt('}') + prnt() + self._num_externpy += 1 + + def _generate_cpy_extern_python_decl(self, tp, name): + self._extern_python_decl(tp, name, 'static ') + + def _generate_cpy_dllexport_python_decl(self, tp, name): + self._extern_python_decl(tp, name, 'CFFI_DLLEXPORT ') + + def _generate_cpy_extern_python_plus_c_decl(self, tp, name): + self._extern_python_decl(tp, name, '') + + def _generate_cpy_extern_python_ctx(self, tp, name): + if self.target_is_python: + raise ffiplatform.VerificationError( + "cannot use 'extern \"Python\"' in the ABI mode") + if tp.ellipsis: + raise NotImplementedError("a vararg function is extern \"Python\"") + type_index = self._typesdict[tp] + type_op = CffiOp(OP_EXTERN_PYTHON, type_index) + self._lsts["global"].append( + GlobalExpr(name, '&_cffi_externpy__%s' % name, type_op, name)) + + _generate_cpy_dllexport_python_ctx = \ + _generate_cpy_extern_python_plus_c_ctx = \ + _generate_cpy_extern_python_ctx + + def _string_literal(self, s): + def _char_repr(c): + # escape with a '\' the characters '\', '"' or (for trigraphs) '?' + if c in '\\"?': return '\\' + c + if ' ' <= c < '\x7F': return c + if c == '\n': return '\\n' + return '\\%03o' % ord(c) + lines = [] + for line in s.splitlines(True) or ['']: + lines.append('"%s"' % ''.join([_char_repr(c) for c in line])) + return ' \\\n'.join(lines) + + # ---------- + # emitting the opcodes for individual types + + def _emit_bytecode_VoidType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, PRIM_VOID) + + def _emit_bytecode_PrimitiveType(self, tp, index): + prim_index = PRIMITIVE_TO_INDEX[tp.name] + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, prim_index) + + def _emit_bytecode_UnknownIntegerType(self, tp, index): + s = ('_cffi_prim_int(sizeof(%s), (\n' + ' ((%s)-1) | 0 /* check that %s is an integer type */\n' + ' ) <= 0)' % (tp.name, tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_UnknownFloatType(self, tp, index): + s = ('_cffi_prim_float(sizeof(%s) *\n' + ' (((%s)1) / 2) * 2 /* integer => 0, float => 1 */\n' + ' )' % (tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_RawFunctionType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_FUNCTION, self._typesdict[tp.result]) + index += 1 + for tp1 in tp.args: + realindex = self._typesdict[tp1] + if index != realindex: + if isinstance(tp1, model.PrimitiveType): + self._emit_bytecode_PrimitiveType(tp1, index) + else: + self.cffi_types[index] = CffiOp(OP_NOOP, realindex) + index += 1 + flags = int(tp.ellipsis) + if tp.abi is not None: + if tp.abi == '__stdcall': + flags |= 2 + else: + raise NotImplementedError("abi=%r" % (tp.abi,)) + self.cffi_types[index] = CffiOp(OP_FUNCTION_END, flags) + + def _emit_bytecode_PointerType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[tp.totype]) + + _emit_bytecode_ConstPointerType = _emit_bytecode_PointerType + _emit_bytecode_NamedPointerType = _emit_bytecode_PointerType + + def _emit_bytecode_FunctionPtrType(self, tp, index): + raw = tp.as_raw_function() + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[raw]) + + def _emit_bytecode_ArrayType(self, tp, index): + item_index = self._typesdict[tp.item] + if tp.length is None: + self.cffi_types[index] = CffiOp(OP_OPEN_ARRAY, item_index) + elif tp.length == '...': + raise ffiplatform.VerificationError( + "type %s badly placed: the '...' array length can only be " + "used on global arrays or on fields of structures" % ( + str(tp).replace('/*...*/', '...'),)) + else: + assert self.cffi_types[index + 1] == 'LEN' + self.cffi_types[index] = CffiOp(OP_ARRAY, item_index) + self.cffi_types[index + 1] = CffiOp(None, str(tp.length)) + + def _emit_bytecode_StructType(self, tp, index): + struct_index = self._struct_unions[tp] + self.cffi_types[index] = CffiOp(OP_STRUCT_UNION, struct_index) + _emit_bytecode_UnionType = _emit_bytecode_StructType + + def _emit_bytecode_EnumType(self, tp, index): + enum_index = self._enums[tp] + self.cffi_types[index] = CffiOp(OP_ENUM, enum_index) + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + +def _make_c_or_py_source(ffi, module_name, preamble, target_file, verbose): + if verbose: + print("generating %s" % (target_file,)) + recompiler = Recompiler(ffi, module_name, + target_is_python=(preamble is None)) + recompiler.collect_type_table() + recompiler.collect_step_tables() + f = NativeIO() + recompiler.write_source_to_f(f, preamble) + output = f.getvalue() + try: + with open(target_file, 'r') as f1: + if f1.read(len(output) + 1) != output: + raise IOError + if verbose: + print("(already up-to-date)") + return False # already up-to-date + except IOError: + tmp_file = '%s.~%d' % (target_file, os.getpid()) + with open(tmp_file, 'w') as f1: + f1.write(output) + try: + os.rename(tmp_file, target_file) + except OSError: + os.unlink(target_file) + os.rename(tmp_file, target_file) + return True + +def make_c_source(ffi, module_name, preamble, target_c_file, verbose=False): + assert preamble is not None + return _make_c_or_py_source(ffi, module_name, preamble, target_c_file, + verbose) + +def make_py_source(ffi, module_name, target_py_file, verbose=False): + return _make_c_or_py_source(ffi, module_name, None, target_py_file, + verbose) + +def _modname_to_file(outputdir, modname, extension): + parts = modname.split('.') + try: + os.makedirs(os.path.join(outputdir, *parts[:-1])) + except OSError: + pass + parts[-1] += extension + return os.path.join(outputdir, *parts), parts + + +# Aaargh. Distutils is not tested at all for the purpose of compiling +# DLLs that are not extension modules. Here are some hacks to work +# around that, in the _patch_for_*() functions... + +def _patch_meth(patchlist, cls, name, new_meth): + old = getattr(cls, name) + patchlist.append((cls, name, old)) + setattr(cls, name, new_meth) + return old + +def _unpatch_meths(patchlist): + for cls, name, old_meth in reversed(patchlist): + setattr(cls, name, old_meth) + +def _patch_for_embedding(patchlist): + if sys.platform == 'win32': + # we must not remove the manifest when building for embedding! + from distutils.msvc9compiler import MSVCCompiler + _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref', + lambda self, manifest_file: manifest_file) + + if sys.platform == 'darwin': + # we must not make a '-bundle', but a '-dynamiclib' instead + from distutils.ccompiler import CCompiler + def my_link_shared_object(self, *args, **kwds): + if '-bundle' in self.linker_so: + self.linker_so = list(self.linker_so) + i = self.linker_so.index('-bundle') + self.linker_so[i] = '-dynamiclib' + return old_link_shared_object(self, *args, **kwds) + old_link_shared_object = _patch_meth(patchlist, CCompiler, + 'link_shared_object', + my_link_shared_object) + +def _patch_for_target(patchlist, target): + from distutils.command.build_ext import build_ext + # if 'target' is different from '*', we need to patch some internal + # method to just return this 'target' value, instead of having it + # built from module_name + if target.endswith('.*'): + target = target[:-2] + if sys.platform == 'win32': + target += '.dll' + elif sys.platform == 'darwin': + target += '.dylib' + else: + target += '.so' + _patch_meth(patchlist, build_ext, 'get_ext_filename', + lambda self, ext_name: target) + + +def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, + c_file=None, source_extension='.c', extradir=None, + compiler_verbose=1, target=None, **kwds): + if not isinstance(module_name, str): + module_name = module_name.encode('ascii') + if ffi._windows_unicode: + ffi._apply_windows_unicode(kwds) + if preamble is not None: + embedding = (ffi._embedding is not None) + if embedding: + ffi._apply_embedding_fix(kwds) + if c_file is None: + c_file, parts = _modname_to_file(tmpdir, module_name, + source_extension) + if extradir: + parts = [extradir] + parts + ext_c_file = os.path.join(*parts) + else: + ext_c_file = c_file + # + if target is None: + if embedding: + target = '%s.*' % module_name + else: + target = '*' + # + ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) + updated = make_c_source(ffi, module_name, preamble, c_file, + verbose=compiler_verbose) + if call_c_compiler: + patchlist = [] + cwd = os.getcwd() + try: + if embedding: + _patch_for_embedding(patchlist) + if target != '*': + _patch_for_target(patchlist, target) + os.chdir(tmpdir) + outputfilename = ffiplatform.compile('.', ext, compiler_verbose) + finally: + os.chdir(cwd) + _unpatch_meths(patchlist) + return outputfilename + else: + return ext, updated + else: + if c_file is None: + c_file, _ = _modname_to_file(tmpdir, module_name, '.py') + updated = make_py_source(ffi, module_name, c_file, + verbose=compiler_verbose) + if call_c_compiler: + return c_file + else: + return None, updated + +def _verify(ffi, module_name, preamble, *args, **kwds): + # FOR TESTS ONLY + from testing.udir import udir + import imp + assert module_name not in sys.modules, "module name conflict: %r" % ( + module_name,) + kwds.setdefault('tmpdir', str(udir)) + outputfilename = recompile(ffi, module_name, preamble, *args, **kwds) + module = imp.load_dynamic(module_name, outputfilename) + # + # hack hack hack: copy all *bound methods* from module.ffi back to the + # ffi instance. Then calls like ffi.new() will invoke module.ffi.new(). + for name in dir(module.ffi): + if not name.startswith('_'): + attr = getattr(module.ffi, name) + if attr is not getattr(ffi, name, object()): + setattr(ffi, name, attr) + def typeof_disabled(*args, **kwds): + raise NotImplementedError + ffi._typeof = typeof_disabled + for name in dir(ffi): + if not name.startswith('_') and not hasattr(module.ffi, name): + setattr(ffi, name, NotImplemented) + return module.lib diff --git a/lib/python3.4/site-packages/cffi/setuptools_ext.py b/lib/python3.4/site-packages/cffi/setuptools_ext.py new file mode 100644 index 0000000..9c6436d --- /dev/null +++ b/lib/python3.4/site-packages/cffi/setuptools_ext.py @@ -0,0 +1,161 @@ +import os + +try: + basestring +except NameError: + # Python 3.x + basestring = str + +def error(msg): + from distutils.errors import DistutilsSetupError + raise DistutilsSetupError(msg) + + +def execfile(filename, glob): + # We use execfile() (here rewritten for Python 3) instead of + # __import__() to load the build script. The problem with + # a normal import is that in some packages, the intermediate + # __init__.py files may already try to import the file that + # we are generating. + with open(filename) as f: + src = f.read() + src += '\n' # Python 2.6 compatibility + code = compile(src, filename, 'exec') + exec(code, glob, glob) + + +def add_cffi_module(dist, mod_spec): + from cffi.api import FFI + + if not isinstance(mod_spec, basestring): + error("argument to 'cffi_modules=...' must be a str or a list of str," + " not %r" % (type(mod_spec).__name__,)) + mod_spec = str(mod_spec) + try: + build_file_name, ffi_var_name = mod_spec.split(':') + except ValueError: + error("%r must be of the form 'path/build.py:ffi_variable'" % + (mod_spec,)) + if not os.path.exists(build_file_name): + ext = '' + rewritten = build_file_name.replace('.', '/') + '.py' + if os.path.exists(rewritten): + ext = ' (rewrite cffi_modules to [%r])' % ( + rewritten + ':' + ffi_var_name,) + error("%r does not name an existing file%s" % (build_file_name, ext)) + + mod_vars = {'__name__': '__cffi__', '__file__': build_file_name} + execfile(build_file_name, mod_vars) + + try: + ffi = mod_vars[ffi_var_name] + except KeyError: + error("%r: object %r not found in module" % (mod_spec, + ffi_var_name)) + if not isinstance(ffi, FFI): + ffi = ffi() # maybe it's a function instead of directly an ffi + if not isinstance(ffi, FFI): + error("%r is not an FFI instance (got %r)" % (mod_spec, + type(ffi).__name__)) + if not hasattr(ffi, '_assigned_source'): + error("%r: the set_source() method was not called" % (mod_spec,)) + module_name, source, source_extension, kwds = ffi._assigned_source + if ffi._windows_unicode: + kwds = kwds.copy() + ffi._apply_windows_unicode(kwds) + + if source is None: + _add_py_module(dist, ffi, module_name) + else: + _add_c_module(dist, ffi, module_name, source, source_extension, kwds) + + +def _add_c_module(dist, ffi, module_name, source, source_extension, kwds): + from distutils.core import Extension + from distutils.command.build_ext import build_ext + from distutils.dir_util import mkpath + from distutils import log + from cffi import recompiler + + allsources = ['$PLACEHOLDER'] + allsources.extend(kwds.pop('sources', [])) + ext = Extension(name=module_name, sources=allsources, **kwds) + + def make_mod(tmpdir, pre_run=None): + c_file = os.path.join(tmpdir, module_name + source_extension) + log.info("generating cffi module %r" % c_file) + mkpath(tmpdir) + # a setuptools-only, API-only hook: called with the "ext" and "ffi" + # arguments just before we turn the ffi into C code. To use it, + # subclass the 'distutils.command.build_ext.build_ext' class and + # add a method 'def pre_run(self, ext, ffi)'. + if pre_run is not None: + pre_run(ext, ffi) + updated = recompiler.make_c_source(ffi, module_name, source, c_file) + if not updated: + log.info("already up-to-date") + return c_file + + if dist.ext_modules is None: + dist.ext_modules = [] + dist.ext_modules.append(ext) + + base_class = dist.cmdclass.get('build_ext', build_ext) + class build_ext_make_mod(base_class): + def run(self): + if ext.sources[0] == '$PLACEHOLDER': + pre_run = getattr(self, 'pre_run', None) + ext.sources[0] = make_mod(self.build_temp, pre_run) + base_class.run(self) + dist.cmdclass['build_ext'] = build_ext_make_mod + # NB. multiple runs here will create multiple 'build_ext_make_mod' + # classes. Even in this case the 'build_ext' command should be + # run once; but just in case, the logic above does nothing if + # called again. + + +def _add_py_module(dist, ffi, module_name): + from distutils.dir_util import mkpath + from distutils.command.build_py import build_py + from distutils.command.build_ext import build_ext + from distutils import log + from cffi import recompiler + + def generate_mod(py_file): + log.info("generating cffi module %r" % py_file) + mkpath(os.path.dirname(py_file)) + updated = recompiler.make_py_source(ffi, module_name, py_file) + if not updated: + log.info("already up-to-date") + + base_class = dist.cmdclass.get('build_py', build_py) + class build_py_make_mod(base_class): + def run(self): + base_class.run(self) + module_path = module_name.split('.') + module_path[-1] += '.py' + generate_mod(os.path.join(self.build_lib, *module_path)) + dist.cmdclass['build_py'] = build_py_make_mod + + # the following is only for "build_ext -i" + base_class_2 = dist.cmdclass.get('build_ext', build_ext) + class build_ext_make_mod(base_class_2): + def run(self): + base_class_2.run(self) + if self.inplace: + # from get_ext_fullpath() in distutils/command/build_ext.py + module_path = module_name.split('.') + package = '.'.join(module_path[:-1]) + build_py = self.get_finalized_command('build_py') + package_dir = build_py.get_package_dir(package) + file_name = module_path[-1] + '.py' + generate_mod(os.path.join(package_dir, file_name)) + dist.cmdclass['build_ext'] = build_ext_make_mod + +def cffi_modules(dist, attr, value): + assert attr == 'cffi_modules' + if isinstance(value, basestring): + value = [value] + + for cffi_module in value: + add_cffi_module(dist, cffi_module) diff --git a/lib/python3.4/site-packages/cffi/vengine_cpy.py b/lib/python3.4/site-packages/cffi/vengine_cpy.py new file mode 100644 index 0000000..6bb0986 --- /dev/null +++ b/lib/python3.4/site-packages/cffi/vengine_cpy.py @@ -0,0 +1,1010 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, imp +from . import model, ffiplatform + + +class VCPythonEngine(object): + _class_key = 'x' + _gen_python_module = True + + def __init__(self, verifier): + self.verifier = verifier + self.ffi = verifier.ffi + self._struct_pending_verification = {} + self._types_of_builtin_functions = {} + + def patch_extension_kwds(self, kwds): + pass + + def find_module(self, module_name, path, so_suffixes): + try: + f, filename, descr = imp.find_module(module_name, path) + except ImportError: + return None + if f is not None: + f.close() + # Note that after a setuptools installation, there are both .py + # and .so files with the same basename. The code here relies on + # imp.find_module() locating the .so in priority. + if descr[0] not in so_suffixes: + return None + return filename + + def collect_types(self): + self._typesdict = {} + self._generate("collecttype") + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _do_collect_type(self, tp): + if ((not isinstance(tp, model.PrimitiveType) + or tp.name == 'long double') + and tp not in self._typesdict): + num = len(self._typesdict) + self._typesdict[tp] = num + + def write_source_to_f(self): + self.collect_types() + # + # The new module will have a _cffi_setup() function that receives + # objects from the ffi world, and that calls some setup code in + # the module. This setup code is split in several independent + # functions, e.g. one per constant. The functions are "chained" + # by ending in a tail call to each other. + # + # This is further split in two chained lists, depending on if we + # can do it at import-time or if we must wait for _cffi_setup() to + # provide us with the objects. This is needed because we + # need the values of the enum constants in order to build the + # that we may have to pass to _cffi_setup(). + # + # The following two 'chained_list_constants' items contains + # the head of these two chained lists, as a string that gives the + # call to do, if any. + self._chained_list_constants = ['((void)lib,0)', '((void)lib,0)'] + # + prnt = self._prnt + # first paste some standard set of lines that are mostly '#define' + prnt(cffimod_header) + prnt() + # then paste the C source given by the user, verbatim. + prnt(self.verifier.preamble) + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._generate("decl") + # + # implement the function _cffi_setup_custom() as calling the + # head of the chained list. + self._generate_setup_custom() + prnt() + # + # produce the method table, including the entries for the + # generated Python->C function wrappers, which are done + # by generate_cpy_function_method(). + prnt('static PyMethodDef _cffi_methods[] = {') + self._generate("method") + prnt(' {"_cffi_setup", _cffi_setup, METH_VARARGS, NULL},') + prnt(' {NULL, NULL, 0, NULL} /* Sentinel */') + prnt('};') + prnt() + # + # standard init. + modname = self.verifier.get_module_name() + constants = self._chained_list_constants[False] + prnt('#if PY_MAJOR_VERSION >= 3') + prnt() + prnt('static struct PyModuleDef _cffi_module_def = {') + prnt(' PyModuleDef_HEAD_INIT,') + prnt(' "%s",' % modname) + prnt(' NULL,') + prnt(' -1,') + prnt(' _cffi_methods,') + prnt(' NULL, NULL, NULL, NULL') + prnt('};') + prnt() + prnt('PyMODINIT_FUNC') + prnt('PyInit_%s(void)' % modname) + prnt('{') + prnt(' PyObject *lib;') + prnt(' lib = PyModule_Create(&_cffi_module_def);') + prnt(' if (lib == NULL)') + prnt(' return NULL;') + prnt(' if (%s < 0 || _cffi_init() < 0) {' % (constants,)) + prnt(' Py_DECREF(lib);') + prnt(' return NULL;') + prnt(' }') + prnt(' return lib;') + prnt('}') + prnt() + prnt('#else') + prnt() + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % modname) + prnt('{') + prnt(' PyObject *lib;') + prnt(' lib = Py_InitModule("%s", _cffi_methods);' % modname) + prnt(' if (lib == NULL)') + prnt(' return;') + prnt(' if (%s < 0 || _cffi_init() < 0)' % (constants,)) + prnt(' return;') + prnt(' return;') + prnt('}') + prnt() + prnt('#endif') + + def load_library(self, flags=None): + # XXX review all usages of 'self' here! + # import it as a new extension module + imp.acquire_lock() + try: + if hasattr(sys, "getdlopenflags"): + previous_flags = sys.getdlopenflags() + try: + if hasattr(sys, "setdlopenflags") and flags is not None: + sys.setdlopenflags(flags) + module = imp.load_dynamic(self.verifier.get_module_name(), + self.verifier.modulefilename) + except ImportError as e: + error = "importing %r: %s" % (self.verifier.modulefilename, e) + raise ffiplatform.VerificationError(error) + finally: + if hasattr(sys, "setdlopenflags"): + sys.setdlopenflags(previous_flags) + finally: + imp.release_lock() + # + # call loading_cpy_struct() to get the struct layout inferred by + # the C compiler + self._load(module, 'loading') + # + # the C code will need the objects. Collect them in + # order in a list. + revmapping = dict([(value, key) + for (key, value) in self._typesdict.items()]) + lst = [revmapping[i] for i in range(len(revmapping))] + lst = list(map(self.ffi._get_cached_btype, lst)) + # + # build the FFILibrary class and instance and call _cffi_setup(). + # this will set up some fields like '_cffi_types', and only then + # it will invoke the chained list of functions that will really + # build (notably) the constant objects, as if they are + # pointers, and store them as attributes on the 'library' object. + class FFILibrary(object): + _cffi_python_module = module + _cffi_ffi = self.ffi + _cffi_dir = [] + def __dir__(self): + return FFILibrary._cffi_dir + list(self.__dict__) + library = FFILibrary() + if module._cffi_setup(lst, ffiplatform.VerificationError, library): + import warnings + warnings.warn("reimporting %r might overwrite older definitions" + % (self.verifier.get_module_name())) + # + # finally, call the loaded_cpy_xxx() functions. This will perform + # the final adjustments, like copying the Python->C wrapper + # functions from the module to the 'library' object, and setting + # up the FFILibrary class with properties for the global C variables. + self._load(module, 'loaded', library=library) + module._cffi_original_ffi = self.ffi + module._cffi_types_of_builtin_funcs = self._types_of_builtin_functions + return library + + def _get_declarations(self): + lst = [(key, tp) for (key, (tp, qual)) in + self.ffi._parser._declarations.items()] + lst.sort() + return lst + + def _generate(self, step_name): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise ffiplatform.VerificationError( + "not implemented in verify(): %r" % name) + try: + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _load(self, module, step_name, **kwds): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + method = getattr(self, '_%s_cpy_%s' % (step_name, kind)) + try: + method(tp, realname, module, **kwds) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _generate_nothing(self, tp, name): + pass + + def _loaded_noop(self, tp, name, module, **kwds): + pass + + # ---------- + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.PrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + else: + converter = '(%s)_cffi_to_c_%s' % (tp.get_c_name(''), + tp.name.replace(' ', '_')) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif isinstance(tp, (model.StructOrUnion, model.EnumType)): + # a struct (not a struct pointer) as a function argument + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' if (datasize < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' %s = alloca((size_t)datasize);' % (tovar,)) + self._prnt(' memset((void *)%s, 0, (size_t)datasize);' % (tovar,)) + self._prnt(' if (_cffi_convert_array_from_object(' + '(char *)%s, _cffi_type(%d), %s) < 0)' % ( + tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.PrimitiveType): + if tp.is_integer_type(): + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif tp.name != 'long double': + return '_cffi_from_c_%s(%s)' % (tp.name.replace(' ', '_'), var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructType): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs: generates no code so far + + _generate_cpy_typedef_collecttype = _generate_nothing + _generate_cpy_typedef_decl = _generate_nothing + _generate_cpy_typedef_method = _generate_nothing + _loading_cpy_typedef = _loaded_noop + _loaded_cpy_typedef = _loaded_noop + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + self._do_collect_type(tp) + else: + # don't call _do_collect_type(tp) in this common case, + # otherwise test_autofilled_struct_as_argument fails + for type in tp.args: + self._do_collect_type(type) + self._do_collect_type(tp.result) + + def _generate_cpy_function_decl(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_const(False, name, tp) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'noarg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + prnt(' %s;' % type.get_c_name(' x%d' % i, context)) + # + localvars = set() + for type in tp.args: + self._extra_local_variables(type, localvars) + for decl in localvars: + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + prnt(' %s;' % tp.result.get_c_name(' result', context)) + else: + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt() + prnt(' if (!PyArg_ParseTuple(args, "%s:%s", %s))' % ( + 'O' * numargs, name, ', '.join(['&arg%d' % i for i in rng]))) + prnt(' return NULL;') + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + prnt(' { %s%s(%s); }' % ( + result_code, name, + ', '.join(['x%d' % i for i in range(len(tp.args))]))) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') + if result_code: + prnt(' return %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + else: + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + prnt() + + def _generate_cpy_function_method(self, tp, name): + if tp.ellipsis: + return + numargs = len(tp.args) + if numargs == 0: + meth = 'METH_NOARGS' + elif numargs == 1: + meth = 'METH_O' + else: + meth = 'METH_VARARGS' + self._prnt(' {"%s", _cffi_f_%s, %s, NULL},' % (name, name, meth)) + + _loading_cpy_function = _loaded_noop + + def _loaded_cpy_function(self, tp, name, module, library): + if tp.ellipsis: + return + func = getattr(module, name) + setattr(library, name, func) + self._types_of_builtin_functions[func] = tp + + # ---------- + # named structs + + _generate_cpy_struct_collecttype = _generate_nothing + def _generate_cpy_struct_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'struct', name) + def _generate_cpy_struct_method(self, tp, name): + self._generate_struct_or_union_method(tp, 'struct', name) + def _loading_cpy_struct(self, tp, name, module): + self._loading_struct_or_union(tp, 'struct', name, module) + def _loaded_cpy_struct(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + _generate_cpy_union_collecttype = _generate_nothing + def _generate_cpy_union_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'union', name) + def _generate_cpy_union_method(self, tp, name): + self._generate_struct_or_union_method(tp, 'union', name) + def _loading_cpy_union(self, tp, name, module): + self._loading_struct_or_union(tp, 'union', name, module) + def _loaded_cpy_union(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_struct_or_union_decl(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + checkfuncname = '_cffi_check_%s_%s' % (prefix, name) + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + cname = ('%s %s' % (prefix, name)).strip() + # + prnt = self._prnt + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if (isinstance(ftype, model.PrimitiveType) + and ftype.is_integer_type()) or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(' (void)((p->%s) << 1);' % fname) + else: + # only accept exactly the type declared. + try: + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except ffiplatform.VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('static PyObject *') + prnt('%s(PyObject *self, PyObject *noarg)' % (layoutfuncname,)) + prnt('{') + prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) + prnt(' static Py_ssize_t nums[] = {') + prnt(' sizeof(%s),' % cname) + prnt(' offsetof(struct _cffi_aligncheck, y),') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + prnt(' offsetof(%s, %s),' % (cname, fname)) + if isinstance(ftype, model.ArrayType) and ftype.length is None: + prnt(' 0, /* %s */' % ftype._get_c_name()) + else: + prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) + prnt(' -1') + prnt(' };') + prnt(' (void)self; /* unused */') + prnt(' (void)noarg; /* unused */') + prnt(' return _cffi_get_struct_layout(nums);') + prnt(' /* the next line is not executed, but compiled */') + prnt(' %s(0);' % (checkfuncname,)) + prnt('}') + prnt() + + def _generate_struct_or_union_method(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + self._prnt(' {"%s", %s, METH_NOARGS, NULL},' % (layoutfuncname, + layoutfuncname)) + + def _loading_struct_or_union(self, tp, prefix, name, module): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + # + function = getattr(module, layoutfuncname) + layout = function() + if isinstance(tp, model.StructOrUnion) and tp.partial: + # use the function()'s sizes and offsets to guide the + # layout of the struct + totalsize = layout[0] + totalalignment = layout[1] + fieldofs = layout[2::2] + fieldsize = layout[3::2] + tp.force_flatten() + assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) + tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment + else: + cname = ('%s %s' % (prefix, name)).strip() + self._struct_pending_verification[tp] = layout, cname + + def _loaded_struct_or_union(self, tp): + if tp.fldnames is None: + return # nothing to do with opaque structs + self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered + + if tp in self._struct_pending_verification: + # check that the layout sizes and offsets match the real ones + def check(realvalue, expectedvalue, msg): + if realvalue != expectedvalue: + raise ffiplatform.VerificationError( + "%s (we have %d, but C compiler says %d)" + % (msg, expectedvalue, realvalue)) + ffi = self.ffi + BStruct = ffi._get_cached_btype(tp) + layout, cname = self._struct_pending_verification.pop(tp) + check(layout[0], ffi.sizeof(BStruct), "wrong total size") + check(layout[1], ffi.alignof(BStruct), "wrong total alignment") + i = 2 + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + check(layout[i], ffi.offsetof(BStruct, fname), + "wrong offset for field %r" % (fname,)) + if layout[i+1] != 0: + BField = ffi._get_cached_btype(ftype) + check(layout[i+1], ffi.sizeof(BField), + "wrong size for field %r" % (fname,)) + i += 2 + assert i == len(layout) + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + _generate_cpy_anonymous_collecttype = _generate_nothing + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp, name, '') + else: + self._generate_struct_or_union_decl(tp, '', name) + + def _generate_cpy_anonymous_method(self, tp, name): + if not isinstance(tp, model.EnumType): + self._generate_struct_or_union_method(tp, '', name) + + def _loading_cpy_anonymous(self, tp, name, module): + if isinstance(tp, model.EnumType): + self._loading_cpy_enum(tp, name, module) + else: + self._loading_struct_or_union(tp, '', name, module) + + def _loaded_cpy_anonymous(self, tp, name, module, **kwds): + if isinstance(tp, model.EnumType): + self._loaded_cpy_enum(tp, name, module, **kwds) + else: + self._loaded_struct_or_union(tp) + + # ---------- + # constants, likely declared with '#define' + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + vartp=None, delayed=True, size_too=False, + check_value=None): + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + prnt('static int %s(PyObject *lib)' % funcname) + prnt('{') + prnt(' PyObject *o;') + prnt(' int res;') + if not is_int: + prnt(' %s;' % (vartp or tp).get_c_name(' i', name)) + else: + assert category == 'const' + # + if check_value is not None: + self._check_int_constant_value(name, check_value) + # + if not is_int: + if category == 'var': + realexpr = '&' + name + else: + realexpr = name + prnt(' i = (%s);' % (realexpr,)) + prnt(' o = %s;' % (self._convert_expr_from_c(tp, 'i', + 'variable type'),)) + assert delayed + else: + prnt(' o = _cffi_from_c_int_const(%s);' % name) + prnt(' if (o == NULL)') + prnt(' return -1;') + if size_too: + prnt(' {') + prnt(' PyObject *o1 = o;') + prnt(' o = Py_BuildValue("On", o1, (Py_ssize_t)sizeof(%s));' + % (name,)) + prnt(' Py_DECREF(o1);') + prnt(' if (o == NULL)') + prnt(' return -1;') + prnt(' }') + prnt(' res = PyObject_SetAttrString(lib, "%s", o);' % name) + prnt(' Py_DECREF(o);') + prnt(' if (res < 0)') + prnt(' return -1;') + prnt(' return %s;' % self._chained_list_constants[delayed]) + self._chained_list_constants[delayed] = funcname + '(lib)' + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + if not is_int: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + _generate_cpy_constant_method = _generate_nothing + _loading_cpy_constant = _loaded_noop + _loaded_cpy_constant = _loaded_noop + + # ---------- + # enums + + def _check_int_constant_value(self, name, value, err_prefix=''): + prnt = self._prnt + if value <= 0: + prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( + name, name, value)) + else: + prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( + name, name, value)) + prnt(' char buf[64];') + prnt(' if ((%s) <= 0)' % name) + prnt(' snprintf(buf, 63, "%%ld", (long)(%s));' % name) + prnt(' else') + prnt(' snprintf(buf, 63, "%%lu", (unsigned long)(%s));' % + name) + prnt(' PyErr_Format(_cffi_VerificationError,') + prnt(' "%s%s has the real value %s, not %s",') + prnt(' "%s", "%s", buf, "%d");' % ( + err_prefix, name, value)) + prnt(' return -1;') + prnt(' }') + + def _enum_funcname(self, prefix, name): + # "$enum_$1" => "___D_enum____D_1" + name = name.replace('$', '___D_') + return '_cffi_e_%s_%s' % (prefix, name) + + def _generate_cpy_enum_decl(self, tp, name, prefix='enum'): + if tp.partial: + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator, delayed=False) + return + # + funcname = self._enum_funcname(prefix, name) + prnt = self._prnt + prnt('static int %s(PyObject *lib)' % funcname) + prnt('{') + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._check_int_constant_value(enumerator, enumvalue, + "enum %s: " % name) + prnt(' return %s;' % self._chained_list_constants[True]) + self._chained_list_constants[True] = funcname + '(lib)' + prnt('}') + prnt() + + _generate_cpy_enum_collecttype = _generate_nothing + _generate_cpy_enum_method = _generate_nothing + + def _loading_cpy_enum(self, tp, name, module): + if tp.partial: + enumvalues = [getattr(module, enumerator) + for enumerator in tp.enumerators] + tp.enumvalues = tuple(enumvalues) + tp.partial_resolved = True + + def _loaded_cpy_enum(self, tp, name, module, library): + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + setattr(library, enumerator, enumvalue) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) + + _generate_cpy_macro_collecttype = _generate_nothing + _generate_cpy_macro_method = _generate_nothing + _loading_cpy_macro = _loaded_noop + _loaded_cpy_macro = _loaded_noop + + # ---------- + # global variables + + def _generate_cpy_variable_collecttype(self, tp, name): + if isinstance(tp, model.ArrayType): + tp_ptr = model.PointerType(tp.item) + else: + tp_ptr = model.PointerType(tp) + self._do_collect_type(tp_ptr) + + def _generate_cpy_variable_decl(self, tp, name): + if isinstance(tp, model.ArrayType): + tp_ptr = model.PointerType(tp.item) + self._generate_cpy_const(False, name, tp, vartp=tp_ptr, + size_too = (tp.length == '...')) + else: + tp_ptr = model.PointerType(tp) + self._generate_cpy_const(False, name, tp_ptr, category='var') + + _generate_cpy_variable_method = _generate_nothing + _loading_cpy_variable = _loaded_noop + + def _loaded_cpy_variable(self, tp, name, module, library): + value = getattr(library, name) + if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the + # sense that "a=..." is forbidden + if tp.length == '...': + assert isinstance(value, tuple) + (value, size) = value + BItemType = self.ffi._get_cached_btype(tp.item) + length, rest = divmod(size, self.ffi.sizeof(BItemType)) + if rest != 0: + raise ffiplatform.VerificationError( + "bad size: %r does not seem to be an array of %s" % + (name, tp.item)) + tp = tp.resolve_length(length) + # 'value' is a which we have to replace with + # a if the N is actually known + if tp.length is not None: + BArray = self.ffi._get_cached_btype(tp) + value = self.ffi.cast(BArray, value) + setattr(library, name, value) + return + # remove ptr= from the library instance, and replace + # it by a property on the class, which reads/writes into ptr[0]. + ptr = value + delattr(library, name) + def getter(library): + return ptr[0] + def setter(library, value): + ptr[0] = value + setattr(type(library), name, property(getter, setter)) + type(library)._cffi_dir.append(name) + + # ---------- + + def _generate_setup_custom(self): + prnt = self._prnt + prnt('static int _cffi_setup_custom(PyObject *lib)') + prnt('{') + prnt(' return %s;' % self._chained_list_constants[True]) + prnt('}') + +cffimod_header = r''' +#include +#include + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ + typedef unsigned char _Bool; +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) +# include +# endif +#endif + +#if PY_MAJOR_VERSION < 3 +# undef PyCapsule_CheckExact +# undef PyCapsule_GetPointer +# define PyCapsule_CheckExact(capsule) (PyCObject_Check(capsule)) +# define PyCapsule_GetPointer(capsule, name) \ + (PyCObject_AsVoidPtr(capsule)) +#endif + +#if PY_MAJOR_VERSION >= 3 +# define PyInt_FromLong PyLong_FromLong +#endif + +#define _cffi_from_c_double PyFloat_FromDouble +#define _cffi_from_c_float PyFloat_FromDouble +#define _cffi_from_c_long PyInt_FromLong +#define _cffi_from_c_ulong PyLong_FromUnsignedLong +#define _cffi_from_c_longlong PyLong_FromLongLong +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong + +#define _cffi_to_c_double PyFloat_AsDouble +#define _cffi_to_c_float PyFloat_AsDouble + +#define _cffi_from_c_int_const(x) \ + (((x) > 0) ? \ + ((unsigned long long)(x) <= (unsigned long long)LONG_MAX) ? \ + PyInt_FromLong((long)(x)) : \ + PyLong_FromUnsignedLongLong((unsigned long long)(x)) : \ + ((long long)(x) >= (long long)LONG_MIN) ? \ + PyInt_FromLong((long)(x)) : \ + PyLong_FromLongLong((long long)(x))) + +#define _cffi_from_c_int(x, type) \ + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + sizeof(type) == sizeof(long) ? \ + PyLong_FromUnsignedLong((unsigned long)x) : \ + PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ + (sizeof(type) <= sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + PyLong_FromLongLong((long long)x))) + +#define _cffi_to_c_int(o, type) \ + ((type)( \ + sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + : (type)_cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ + : (type)_cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ + : (type)_cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ + : (type)_cffi_to_c_i64(o)) : \ + (Py_FatalError("unsupported size for type " #type), (type)0))) + +#define _cffi_to_c_i8 \ + ((int(*)(PyObject *))_cffi_exports[1]) +#define _cffi_to_c_u8 \ + ((int(*)(PyObject *))_cffi_exports[2]) +#define _cffi_to_c_i16 \ + ((int(*)(PyObject *))_cffi_exports[3]) +#define _cffi_to_c_u16 \ + ((int(*)(PyObject *))_cffi_exports[4]) +#define _cffi_to_c_i32 \ + ((int(*)(PyObject *))_cffi_exports[5]) +#define _cffi_to_c_u32 \ + ((unsigned int(*)(PyObject *))_cffi_exports[6]) +#define _cffi_to_c_i64 \ + ((long long(*)(PyObject *))_cffi_exports[7]) +#define _cffi_to_c_u64 \ + ((unsigned long long(*)(PyObject *))_cffi_exports[8]) +#define _cffi_to_c_char \ + ((int(*)(PyObject *))_cffi_exports[9]) +#define _cffi_from_c_pointer \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[10]) +#define _cffi_to_c_pointer \ + ((char *(*)(PyObject *, CTypeDescrObject *))_cffi_exports[11]) +#define _cffi_get_struct_layout \ + ((PyObject *(*)(Py_ssize_t[]))_cffi_exports[12]) +#define _cffi_restore_errno \ + ((void(*)(void))_cffi_exports[13]) +#define _cffi_save_errno \ + ((void(*)(void))_cffi_exports[14]) +#define _cffi_from_c_char \ + ((PyObject *(*)(char))_cffi_exports[15]) +#define _cffi_from_c_deref \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[16]) +#define _cffi_to_c \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[17]) +#define _cffi_from_c_struct \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[18]) +#define _cffi_to_c_wchar_t \ + ((wchar_t(*)(PyObject *))_cffi_exports[19]) +#define _cffi_from_c_wchar_t \ + ((PyObject *(*)(wchar_t))_cffi_exports[20]) +#define _cffi_to_c_long_double \ + ((long double(*)(PyObject *))_cffi_exports[21]) +#define _cffi_to_c__Bool \ + ((_Bool(*)(PyObject *))_cffi_exports[22]) +#define _cffi_prepare_pointer_call_argument \ + ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23]) +#define _cffi_convert_array_from_object \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24]) +#define _CFFI_NUM_EXPORTS 25 + +typedef struct _ctypedescr CTypeDescrObject; + +static void *_cffi_exports[_CFFI_NUM_EXPORTS]; +static PyObject *_cffi_types, *_cffi_VerificationError; + +static int _cffi_setup_custom(PyObject *lib); /* forward */ + +static PyObject *_cffi_setup(PyObject *self, PyObject *args) +{ + PyObject *library; + int was_alive = (_cffi_types != NULL); + (void)self; /* unused */ + if (!PyArg_ParseTuple(args, "OOO", &_cffi_types, &_cffi_VerificationError, + &library)) + return NULL; + Py_INCREF(_cffi_types); + Py_INCREF(_cffi_VerificationError); + if (_cffi_setup_custom(library) < 0) + return NULL; + return PyBool_FromLong(was_alive); +} + +static int _cffi_init(void) +{ + PyObject *module, *c_api_object = NULL; + + module = PyImport_ImportModule("_cffi_backend"); + if (module == NULL) + goto failure; + + c_api_object = PyObject_GetAttrString(module, "_C_API"); + if (c_api_object == NULL) + goto failure; + if (!PyCapsule_CheckExact(c_api_object)) { + PyErr_SetNone(PyExc_ImportError); + goto failure; + } + memcpy(_cffi_exports, PyCapsule_GetPointer(c_api_object, "cffi"), + _CFFI_NUM_EXPORTS * sizeof(void *)); + + Py_DECREF(module); + Py_DECREF(c_api_object); + return 0; + + failure: + Py_XDECREF(module); + Py_XDECREF(c_api_object); + return -1; +} + +#define _cffi_type(num) ((CTypeDescrObject *)PyList_GET_ITEM(_cffi_types, num)) + +/**********/ +''' diff --git a/lib/python3.4/site-packages/cffi/vengine_gen.py b/lib/python3.4/site-packages/cffi/vengine_gen.py new file mode 100644 index 0000000..46bdb21 --- /dev/null +++ b/lib/python3.4/site-packages/cffi/vengine_gen.py @@ -0,0 +1,671 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, os +import types + +from . import model, ffiplatform + + +class VGenericEngine(object): + _class_key = 'g' + _gen_python_module = False + + def __init__(self, verifier): + self.verifier = verifier + self.ffi = verifier.ffi + self.export_symbols = [] + self._struct_pending_verification = {} + + def patch_extension_kwds(self, kwds): + # add 'export_symbols' to the dictionary. Note that we add the + # list before filling it. When we fill it, it will thus also show + # up in kwds['export_symbols']. + kwds.setdefault('export_symbols', self.export_symbols) + + def find_module(self, module_name, path, so_suffixes): + for so_suffix in so_suffixes: + basename = module_name + so_suffix + if path is None: + path = sys.path + for dirname in path: + filename = os.path.join(dirname, basename) + if os.path.isfile(filename): + return filename + + def collect_types(self): + pass # not needed in the generic engine + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self): + prnt = self._prnt + # first paste some standard set of lines that are mostly '#include' + prnt(cffimod_header) + # then paste the C source given by the user, verbatim. + prnt(self.verifier.preamble) + # + # call generate_gen_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._generate('decl') + # + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + if sys.platform == 'win32': + if sys.version_info >= (3,): + prefix = 'PyInit_' + else: + prefix = 'init' + modname = self.verifier.get_module_name() + prnt("void %s%s(void) { }\n" % (prefix, modname)) + + def load_library(self, flags=0): + # import it with the CFFI backend + backend = self.ffi._backend + # needs to make a path that contains '/', on Posix + filename = os.path.join(os.curdir, self.verifier.modulefilename) + module = backend.load_library(filename, flags) + # + # call loading_gen_struct() to get the struct layout inferred by + # the C compiler + self._load(module, 'loading') + + # build the FFILibrary class and instance, this is a module subclass + # because modules are expected to have usually-constant-attributes and + # in PyPy this means the JIT is able to treat attributes as constant, + # which we want. + class FFILibrary(types.ModuleType): + _cffi_generic_module = module + _cffi_ffi = self.ffi + _cffi_dir = [] + def __dir__(self): + return FFILibrary._cffi_dir + library = FFILibrary("") + # + # finally, call the loaded_gen_xxx() functions. This will set + # up the 'library' object. + self._load(module, 'loaded', library=library) + return library + + def _get_declarations(self): + lst = [(key, tp) for (key, (tp, qual)) in + self.ffi._parser._declarations.items()] + lst.sort() + return lst + + def _generate(self, step_name): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_gen_%s_%s' % (kind, + step_name)) + except AttributeError: + raise ffiplatform.VerificationError( + "not implemented in verify(): %r" % name) + try: + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _load(self, module, step_name, **kwds): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + method = getattr(self, '_%s_gen_%s' % (step_name, kind)) + try: + method(tp, realname, module, **kwds) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _generate_nothing(self, tp, name): + pass + + def _loaded_noop(self, tp, name, module, **kwds): + pass + + # ---------- + # typedefs: generates no code so far + + _generate_gen_typedef_decl = _generate_nothing + _loading_gen_typedef = _loaded_noop + _loaded_gen_typedef = _loaded_noop + + # ---------- + # function declarations + + def _generate_gen_function_decl(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no _cffi_f_%s wrapper) + self._generate_gen_const(False, name, tp) + return + prnt = self._prnt + numargs = len(tp.args) + argnames = [] + for i, type in enumerate(tp.args): + indirection = '' + if isinstance(type, model.StructOrUnion): + indirection = '*' + argnames.append('%sx%d' % (indirection, i)) + context = 'argument of %s' % name + arglist = [type.get_c_name(' %s' % arg, context) + for type, arg in zip(tp.args, argnames)] + tpresult = tp.result + if isinstance(tpresult, model.StructOrUnion): + arglist.insert(0, tpresult.get_c_name(' *r', context)) + tpresult = model.void_type + arglist = ', '.join(arglist) or 'void' + wrappername = '_cffi_f_%s' % name + self.export_symbols.append(wrappername) + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + funcdecl = ' %s%s(%s)' % (abi, wrappername, arglist) + context = 'result of %s' % name + prnt(tpresult.get_c_name(funcdecl, context)) + prnt('{') + # + if isinstance(tp.result, model.StructOrUnion): + result_code = '*r = ' + elif not isinstance(tp.result, model.VoidType): + result_code = 'return ' + else: + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, ', '.join(argnames))) + prnt('}') + prnt() + + _loading_gen_function = _loaded_noop + + def _loaded_gen_function(self, tp, name, module, library): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + newfunction = self._load_constant(False, tp, name, module) + else: + indirections = [] + base_tp = tp + if (any(isinstance(typ, model.StructOrUnion) for typ in tp.args) + or isinstance(tp.result, model.StructOrUnion)): + indirect_args = [] + for i, typ in enumerate(tp.args): + if isinstance(typ, model.StructOrUnion): + typ = model.PointerType(typ) + indirections.append((i, typ)) + indirect_args.append(typ) + indirect_result = tp.result + if isinstance(indirect_result, model.StructOrUnion): + if indirect_result.fldtypes is None: + raise TypeError("'%s' is used as result type, " + "but is opaque" % ( + indirect_result._get_c_name(),)) + indirect_result = model.PointerType(indirect_result) + indirect_args.insert(0, indirect_result) + indirections.insert(0, ("result", indirect_result)) + indirect_result = model.void_type + tp = model.FunctionPtrType(tuple(indirect_args), + indirect_result, tp.ellipsis) + BFunc = self.ffi._get_cached_btype(tp) + wrappername = '_cffi_f_%s' % name + newfunction = module.load_function(BFunc, wrappername) + for i, typ in indirections: + newfunction = self._make_struct_wrapper(newfunction, i, typ, + base_tp) + setattr(library, name, newfunction) + type(library)._cffi_dir.append(name) + + def _make_struct_wrapper(self, oldfunc, i, tp, base_tp): + backend = self.ffi._backend + BType = self.ffi._get_cached_btype(tp) + if i == "result": + ffi = self.ffi + def newfunc(*args): + res = ffi.new(BType) + oldfunc(res, *args) + return res[0] + else: + def newfunc(*args): + args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:] + return oldfunc(*args) + newfunc._cffi_base_type = base_tp + return newfunc + + # ---------- + # named structs + + def _generate_gen_struct_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'struct', name) + + def _loading_gen_struct(self, tp, name, module): + self._loading_struct_or_union(tp, 'struct', name, module) + + def _loaded_gen_struct(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_gen_union_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'union', name) + + def _loading_gen_union(self, tp, name, module): + self._loading_struct_or_union(tp, 'union', name, module) + + def _loaded_gen_union(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_struct_or_union_decl(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + checkfuncname = '_cffi_check_%s_%s' % (prefix, name) + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + cname = ('%s %s' % (prefix, name)).strip() + # + prnt = self._prnt + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if (isinstance(ftype, model.PrimitiveType) + and ftype.is_integer_type()) or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(' (void)((p->%s) << 1);' % fname) + else: + # only accept exactly the type declared. + try: + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except ffiplatform.VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + self.export_symbols.append(layoutfuncname) + prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,)) + prnt('{') + prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) + prnt(' static intptr_t nums[] = {') + prnt(' sizeof(%s),' % cname) + prnt(' offsetof(struct _cffi_aligncheck, y),') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + prnt(' offsetof(%s, %s),' % (cname, fname)) + if isinstance(ftype, model.ArrayType) and ftype.length is None: + prnt(' 0, /* %s */' % ftype._get_c_name()) + else: + prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) + prnt(' -1') + prnt(' };') + prnt(' return nums[i];') + prnt(' /* the next line is not executed, but compiled */') + prnt(' %s(0);' % (checkfuncname,)) + prnt('}') + prnt() + + def _loading_struct_or_union(self, tp, prefix, name, module): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + # + BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0] + function = module.load_function(BFunc, layoutfuncname) + layout = [] + num = 0 + while True: + x = function(num) + if x < 0: break + layout.append(x) + num += 1 + if isinstance(tp, model.StructOrUnion) and tp.partial: + # use the function()'s sizes and offsets to guide the + # layout of the struct + totalsize = layout[0] + totalalignment = layout[1] + fieldofs = layout[2::2] + fieldsize = layout[3::2] + tp.force_flatten() + assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) + tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment + else: + cname = ('%s %s' % (prefix, name)).strip() + self._struct_pending_verification[tp] = layout, cname + + def _loaded_struct_or_union(self, tp): + if tp.fldnames is None: + return # nothing to do with opaque structs + self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered + + if tp in self._struct_pending_verification: + # check that the layout sizes and offsets match the real ones + def check(realvalue, expectedvalue, msg): + if realvalue != expectedvalue: + raise ffiplatform.VerificationError( + "%s (we have %d, but C compiler says %d)" + % (msg, expectedvalue, realvalue)) + ffi = self.ffi + BStruct = ffi._get_cached_btype(tp) + layout, cname = self._struct_pending_verification.pop(tp) + check(layout[0], ffi.sizeof(BStruct), "wrong total size") + check(layout[1], ffi.alignof(BStruct), "wrong total alignment") + i = 2 + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + check(layout[i], ffi.offsetof(BStruct, fname), + "wrong offset for field %r" % (fname,)) + if layout[i+1] != 0: + BField = ffi._get_cached_btype(ftype) + check(layout[i+1], ffi.sizeof(BField), + "wrong size for field %r" % (fname,)) + i += 2 + assert i == len(layout) + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_gen_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_gen_enum_decl(tp, name, '') + else: + self._generate_struct_or_union_decl(tp, '', name) + + def _loading_gen_anonymous(self, tp, name, module): + if isinstance(tp, model.EnumType): + self._loading_gen_enum(tp, name, module, '') + else: + self._loading_struct_or_union(tp, '', name, module) + + def _loaded_gen_anonymous(self, tp, name, module, **kwds): + if isinstance(tp, model.EnumType): + self._loaded_gen_enum(tp, name, module, **kwds) + else: + self._loaded_struct_or_union(tp) + + # ---------- + # constants, likely declared with '#define' + + def _generate_gen_const(self, is_int, name, tp=None, category='const', + check_value=None): + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + self.export_symbols.append(funcname) + if check_value is not None: + assert is_int + assert category == 'const' + prnt('int %s(char *out_error)' % funcname) + prnt('{') + self._check_int_constant_value(name, check_value) + prnt(' return 0;') + prnt('}') + elif is_int: + assert category == 'const' + prnt('int %s(long long *out_value)' % funcname) + prnt('{') + prnt(' *out_value = (long long)(%s);' % (name,)) + prnt(' return (%s) <= 0;' % (name,)) + prnt('}') + else: + assert tp is not None + assert check_value is None + if category == 'var': + ampersand = '&' + else: + ampersand = '' + extra = '' + if category == 'const' and isinstance(tp, model.StructOrUnion): + extra = 'const *' + ampersand = '&' + prnt(tp.get_c_name(' %s%s(void)' % (extra, funcname), name)) + prnt('{') + prnt(' return (%s%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_gen_constant_decl(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + self._generate_gen_const(is_int, name, tp) + + _loading_gen_constant = _loaded_noop + + def _load_constant(self, is_int, tp, name, module, check_value=None): + funcname = '_cffi_const_%s' % name + if check_value is not None: + assert is_int + self._load_known_int_constant(module, funcname) + value = check_value + elif is_int: + BType = self.ffi._typeof_locked("long long*")[0] + BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0] + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType) + negative = function(p) + value = int(p[0]) + if value < 0 and not negative: + BLongLong = self.ffi._typeof_locked("long long")[0] + value += (1 << (8*self.ffi.sizeof(BLongLong))) + else: + assert check_value is None + fntypeextra = '(*)(void)' + if isinstance(tp, model.StructOrUnion): + fntypeextra = '*' + fntypeextra + BFunc = self.ffi._typeof_locked(tp.get_c_name(fntypeextra, name))[0] + function = module.load_function(BFunc, funcname) + value = function() + if isinstance(tp, model.StructOrUnion): + value = value[0] + return value + + def _loaded_gen_constant(self, tp, name, module, library): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + value = self._load_constant(is_int, tp, name, module) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + + # ---------- + # enums + + def _check_int_constant_value(self, name, value): + prnt = self._prnt + if value <= 0: + prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( + name, name, value)) + else: + prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( + name, name, value)) + prnt(' char buf[64];') + prnt(' if ((%s) <= 0)' % name) + prnt(' sprintf(buf, "%%ld", (long)(%s));' % name) + prnt(' else') + prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' % + name) + prnt(' sprintf(out_error, "%s has the real value %s, not %s",') + prnt(' "%s", buf, "%d");' % (name[:100], value)) + prnt(' return -1;') + prnt(' }') + + def _load_known_int_constant(self, module, funcname): + BType = self.ffi._typeof_locked("char[]")[0] + BFunc = self.ffi._typeof_locked("int(*)(char*)")[0] + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType, 256) + if function(p) < 0: + error = self.ffi.string(p) + if sys.version_info >= (3,): + error = str(error, 'utf-8') + raise ffiplatform.VerificationError(error) + + def _enum_funcname(self, prefix, name): + # "$enum_$1" => "___D_enum____D_1" + name = name.replace('$', '___D_') + return '_cffi_e_%s_%s' % (prefix, name) + + def _generate_gen_enum_decl(self, tp, name, prefix='enum'): + if tp.partial: + for enumerator in tp.enumerators: + self._generate_gen_const(True, enumerator) + return + # + funcname = self._enum_funcname(prefix, name) + self.export_symbols.append(funcname) + prnt = self._prnt + prnt('int %s(char *out_error)' % funcname) + prnt('{') + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._check_int_constant_value(enumerator, enumvalue) + prnt(' return 0;') + prnt('}') + prnt() + + def _loading_gen_enum(self, tp, name, module, prefix='enum'): + if tp.partial: + enumvalues = [self._load_constant(True, tp, enumerator, module) + for enumerator in tp.enumerators] + tp.enumvalues = tuple(enumvalues) + tp.partial_resolved = True + else: + funcname = self._enum_funcname(prefix, name) + self._load_known_int_constant(module, funcname) + + def _loaded_gen_enum(self, tp, name, module, library): + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + setattr(library, enumerator, enumvalue) + type(library)._cffi_dir.append(enumerator) + + # ---------- + # macros: for now only for integers + + def _generate_gen_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_gen_const(True, name, check_value=check_value) + + _loading_gen_macro = _loaded_noop + + def _loaded_gen_macro(self, tp, name, module, library): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + value = self._load_constant(True, tp, name, module, + check_value=check_value) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + + # ---------- + # global variables + + def _generate_gen_variable_decl(self, tp, name): + if isinstance(tp, model.ArrayType): + if tp.length == '...': + prnt = self._prnt + funcname = '_cffi_sizeof_%s' % (name,) + self.export_symbols.append(funcname) + prnt("size_t %s(void)" % funcname) + prnt("{") + prnt(" return sizeof(%s);" % (name,)) + prnt("}") + tp_ptr = model.PointerType(tp.item) + self._generate_gen_const(False, name, tp_ptr) + else: + tp_ptr = model.PointerType(tp) + self._generate_gen_const(False, name, tp_ptr, category='var') + + _loading_gen_variable = _loaded_noop + + def _loaded_gen_variable(self, tp, name, module, library): + if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the + # sense that "a=..." is forbidden + if tp.length == '...': + funcname = '_cffi_sizeof_%s' % (name,) + BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0] + function = module.load_function(BFunc, funcname) + size = function() + BItemType = self.ffi._get_cached_btype(tp.item) + length, rest = divmod(size, self.ffi.sizeof(BItemType)) + if rest != 0: + raise ffiplatform.VerificationError( + "bad size: %r does not seem to be an array of %s" % + (name, tp.item)) + tp = tp.resolve_length(length) + tp_ptr = model.PointerType(tp.item) + value = self._load_constant(False, tp_ptr, name, module) + # 'value' is a which we have to replace with + # a if the N is actually known + if tp.length is not None: + BArray = self.ffi._get_cached_btype(tp) + value = self.ffi.cast(BArray, value) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + return + # remove ptr= from the library instance, and replace + # it by a property on the class, which reads/writes into ptr[0]. + funcname = '_cffi_var_%s' % name + BFunc = self.ffi._typeof_locked(tp.get_c_name('*(*)(void)', name))[0] + function = module.load_function(BFunc, funcname) + ptr = function() + def getter(library): + return ptr[0] + def setter(library, value): + ptr[0] = value + setattr(type(library), name, property(getter, setter)) + type(library)._cffi_dir.append(name) + +cffimod_header = r''' +#include +#include +#include +#include +#include /* XXX for ssize_t on some platforms */ + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ + typedef unsigned char _Bool; +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) +# include +# endif +#endif +''' diff --git a/lib/python3.4/site-packages/cffi/verifier.py b/lib/python3.4/site-packages/cffi/verifier.py new file mode 100644 index 0000000..278a73e --- /dev/null +++ b/lib/python3.4/site-packages/cffi/verifier.py @@ -0,0 +1,316 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, os, binascii, shutil, io +from . import __version_verifier_modules__ +from . import ffiplatform + +if sys.version_info >= (3, 3): + import importlib.machinery + def _extension_suffixes(): + return importlib.machinery.EXTENSION_SUFFIXES[:] +else: + import imp + def _extension_suffixes(): + return [suffix for suffix, _, type in imp.get_suffixes() + if type == imp.C_EXTENSION] + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + +def _hack_at_distutils(): + # Windows-only workaround for some configurations: see + # https://bugs.python.org/issue23246 (Python 2.7 with + # a specific MS compiler suite download) + if sys.platform == "win32": + try: + import setuptools # for side-effects, patches distutils + except ImportError: + pass + + +class Verifier(object): + + def __init__(self, ffi, preamble, tmpdir=None, modulename=None, + ext_package=None, tag='', force_generic_engine=False, + source_extension='.c', flags=None, relative_to=None, **kwds): + if ffi._parser._uses_new_feature: + raise ffiplatform.VerificationError( + "feature not supported with ffi.verify(), but only " + "with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,)) + self.ffi = ffi + self.preamble = preamble + if not modulename: + flattened_kwds = ffiplatform.flatten(kwds) + vengine_class = _locate_engine_class(ffi, force_generic_engine) + self._vengine = vengine_class(self) + self._vengine.patch_extension_kwds(kwds) + self.flags = flags + self.kwds = self.make_relative_to(kwds, relative_to) + # + if modulename: + if tag: + raise TypeError("can't specify both 'modulename' and 'tag'") + else: + key = '\x00'.join([sys.version[:3], __version_verifier_modules__, + preamble, flattened_kwds] + + ffi._cdefsources) + if sys.version_info >= (3,): + key = key.encode('utf-8') + k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff) + k1 = k1.lstrip('0x').rstrip('L') + k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff) + k2 = k2.lstrip('0').rstrip('L') + modulename = '_cffi_%s_%s%s%s' % (tag, self._vengine._class_key, + k1, k2) + suffix = _get_so_suffixes()[0] + self.tmpdir = tmpdir or _caller_dir_pycache() + self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension) + self.modulefilename = os.path.join(self.tmpdir, modulename + suffix) + self.ext_package = ext_package + self._has_source = False + self._has_module = False + + def write_source(self, file=None): + """Write the C source code. It is produced in 'self.sourcefilename', + which can be tweaked beforehand.""" + with self.ffi._lock: + if self._has_source and file is None: + raise ffiplatform.VerificationError( + "source code already written") + self._write_source(file) + + def compile_module(self): + """Write the C source code (if not done already) and compile it. + This produces a dynamic link library in 'self.modulefilename'.""" + with self.ffi._lock: + if self._has_module: + raise ffiplatform.VerificationError("module already compiled") + if not self._has_source: + self._write_source() + self._compile_module() + + def load_library(self): + """Get a C module from this Verifier instance. + Returns an instance of a FFILibrary class that behaves like the + objects returned by ffi.dlopen(), but that delegates all + operations to the C module. If necessary, the C code is written + and compiled first. + """ + with self.ffi._lock: + if not self._has_module: + self._locate_module() + if not self._has_module: + if not self._has_source: + self._write_source() + self._compile_module() + return self._load_library() + + def get_module_name(self): + basename = os.path.basename(self.modulefilename) + # kill both the .so extension and the other .'s, as introduced + # by Python 3: 'basename.cpython-33m.so' + basename = basename.split('.', 1)[0] + # and the _d added in Python 2 debug builds --- but try to be + # conservative and not kill a legitimate _d + if basename.endswith('_d') and hasattr(sys, 'gettotalrefcount'): + basename = basename[:-2] + return basename + + def get_extension(self): + _hack_at_distutils() # backward compatibility hack + if not self._has_source: + with self.ffi._lock: + if not self._has_source: + self._write_source() + sourcename = ffiplatform.maybe_relative_path(self.sourcefilename) + modname = self.get_module_name() + return ffiplatform.get_extension(sourcename, modname, **self.kwds) + + def generates_python_module(self): + return self._vengine._gen_python_module + + def make_relative_to(self, kwds, relative_to): + if relative_to and os.path.dirname(relative_to): + dirname = os.path.dirname(relative_to) + kwds = kwds.copy() + for key in ffiplatform.LIST_OF_FILE_NAMES: + if key in kwds: + lst = kwds[key] + if not isinstance(lst, (list, tuple)): + raise TypeError("keyword '%s' should be a list or tuple" + % (key,)) + lst = [os.path.join(dirname, fn) for fn in lst] + kwds[key] = lst + return kwds + + # ---------- + + def _locate_module(self): + if not os.path.isfile(self.modulefilename): + if self.ext_package: + try: + pkg = __import__(self.ext_package, None, None, ['__doc__']) + except ImportError: + return # cannot import the package itself, give up + # (e.g. it might be called differently before installation) + path = pkg.__path__ + else: + path = None + filename = self._vengine.find_module(self.get_module_name(), path, + _get_so_suffixes()) + if filename is None: + return + self.modulefilename = filename + self._vengine.collect_types() + self._has_module = True + + def _write_source_to(self, file): + self._vengine._f = file + try: + self._vengine.write_source_to_f() + finally: + del self._vengine._f + + def _write_source(self, file=None): + if file is not None: + self._write_source_to(file) + else: + # Write our source file to an in memory file. + f = NativeIO() + self._write_source_to(f) + source_data = f.getvalue() + + # Determine if this matches the current file + if os.path.exists(self.sourcefilename): + with open(self.sourcefilename, "r") as fp: + needs_written = not (fp.read() == source_data) + else: + needs_written = True + + # Actually write the file out if it doesn't match + if needs_written: + _ensure_dir(self.sourcefilename) + with open(self.sourcefilename, "w") as fp: + fp.write(source_data) + + # Set this flag + self._has_source = True + + def _compile_module(self): + # compile this C source + tmpdir = os.path.dirname(self.sourcefilename) + outputfilename = ffiplatform.compile(tmpdir, self.get_extension()) + try: + same = ffiplatform.samefile(outputfilename, self.modulefilename) + except OSError: + same = False + if not same: + _ensure_dir(self.modulefilename) + shutil.move(outputfilename, self.modulefilename) + self._has_module = True + + def _load_library(self): + assert self._has_module + if self.flags is not None: + return self._vengine.load_library(self.flags) + else: + return self._vengine.load_library() + +# ____________________________________________________________ + +_FORCE_GENERIC_ENGINE = False # for tests + +def _locate_engine_class(ffi, force_generic_engine): + if _FORCE_GENERIC_ENGINE: + force_generic_engine = True + if not force_generic_engine: + if '__pypy__' in sys.builtin_module_names: + force_generic_engine = True + else: + try: + import _cffi_backend + except ImportError: + _cffi_backend = '?' + if ffi._backend is not _cffi_backend: + force_generic_engine = True + if force_generic_engine: + from . import vengine_gen + return vengine_gen.VGenericEngine + else: + from . import vengine_cpy + return vengine_cpy.VCPythonEngine + +# ____________________________________________________________ + +_TMPDIR = None + +def _caller_dir_pycache(): + if _TMPDIR: + return _TMPDIR + result = os.environ.get('CFFI_TMPDIR') + if result: + return result + filename = sys._getframe(2).f_code.co_filename + return os.path.abspath(os.path.join(os.path.dirname(filename), + '__pycache__')) + +def set_tmpdir(dirname): + """Set the temporary directory to use instead of __pycache__.""" + global _TMPDIR + _TMPDIR = dirname + +def cleanup_tmpdir(tmpdir=None, keep_so=False): + """Clean up the temporary directory by removing all files in it + called `_cffi_*.{c,so}` as well as the `build` subdirectory.""" + tmpdir = tmpdir or _caller_dir_pycache() + try: + filelist = os.listdir(tmpdir) + except OSError: + return + if keep_so: + suffix = '.c' # only remove .c files + else: + suffix = _get_so_suffixes()[0].lower() + for fn in filelist: + if fn.lower().startswith('_cffi_') and ( + fn.lower().endswith(suffix) or fn.lower().endswith('.c')): + try: + os.unlink(os.path.join(tmpdir, fn)) + except OSError: + pass + clean_dir = [os.path.join(tmpdir, 'build')] + for dir in clean_dir: + try: + for fn in os.listdir(dir): + fn = os.path.join(dir, fn) + if os.path.isdir(fn): + clean_dir.append(fn) + else: + os.unlink(fn) + except OSError: + pass + +def _get_so_suffixes(): + suffixes = _extension_suffixes() + if not suffixes: + # bah, no C_EXTENSION available. Occurs on pypy without cpyext + if sys.platform == 'win32': + suffixes = [".pyd"] + else: + suffixes = [".so"] + + return suffixes + +def _ensure_dir(filename): + try: + os.makedirs(os.path.dirname(filename)) + except OSError: + pass diff --git a/lib/python3.4/site-packages/cryptography-1.4-py3.4.egg-info/PKG-INFO b/lib/python3.4/site-packages/cryptography-1.4-py3.4.egg-info/PKG-INFO new file mode 100644 index 0000000..ee73ee7 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography-1.4-py3.4.egg-info/PKG-INFO @@ -0,0 +1,85 @@ +Metadata-Version: 1.1 +Name: cryptography +Version: 1.4 +Summary: cryptography is a package which provides cryptographic recipes and primitives to Python developers. +Home-page: https://github.com/pyca/cryptography +Author: The cryptography developers +Author-email: cryptography-dev@python.org +License: BSD or Apache License, Version 2.0 +Description: Cryptography + ============ + + .. image:: https://img.shields.io/pypi/v/cryptography.svg + :target: https://pypi.python.org/pypi/cryptography/ + :alt: Latest Version + + .. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest + :target: https://cryptography.io + :alt: Latest Docs + + .. image:: https://travis-ci.org/pyca/cryptography.svg?branch=master + :target: https://travis-ci.org/pyca/cryptography + + .. image:: https://codecov.io/github/pyca/cryptography/coverage.svg?branch=master + :target: https://codecov.io/github/pyca/cryptography?branch=master + + + ``cryptography`` is a package which provides cryptographic recipes and + primitives to Python developers. Our goal is for it to be your "cryptographic + standard library". It supports Python 2.6-2.7, Python 3.3+, and PyPy 2.6+. + + ``cryptography`` includes both high level recipes, and low level interfaces to + common cryptographic algorithms such as symmetric ciphers, message digests and + key derivation functions. For example, to encrypt something with + ``cryptography``'s high level symmetric encryption recipe: + + .. code-block:: pycon + + >>> from cryptography.fernet import Fernet + >>> # Put this somewhere safe! + >>> key = Fernet.generate_key() + >>> f = Fernet(key) + >>> token = f.encrypt(b"A really secret message. Not for prying eyes.") + >>> token + '...' + >>> f.decrypt(token) + 'A really secret message. Not for prying eyes.' + + You can find more information in the `documentation`_. + + Discussion + ~~~~~~~~~~ + + If you run into bugs, you can file them in our `issue tracker`_. + + We maintain a `cryptography-dev`_ mailing list for development discussion. + + You can also join ``#cryptography-dev`` on Freenode to ask questions or get + involved. + + + .. _`documentation`: https://cryptography.io/ + .. _`issue tracker`: https://github.com/pyca/cryptography/issues + .. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev + +Platform: UNKNOWN +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: License :: OSI Approved :: BSD License +Classifier: Natural Language :: English +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: POSIX +Classifier: Operating System :: POSIX :: BSD +Classifier: Operating System :: POSIX :: Linux +Classifier: Operating System :: Microsoft :: Windows +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Security :: Cryptography diff --git a/lib/python3.4/site-packages/cryptography-1.4-py3.4.egg-info/SOURCES.txt b/lib/python3.4/site-packages/cryptography-1.4-py3.4.egg-info/SOURCES.txt new file mode 100644 index 0000000..8360763 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography-1.4-py3.4.egg-info/SOURCES.txt @@ -0,0 +1,280 @@ +AUTHORS.rst +CHANGELOG.rst +CONTRIBUTING.rst +LICENSE +LICENSE.APACHE +LICENSE.BSD +MANIFEST.in +README.rst +setup.cfg +setup.py +docs/Makefile +docs/api-stability.rst +docs/changelog.rst +docs/community.rst +docs/conf.py +docs/cryptography-docs.py +docs/doing-a-release.rst +docs/exceptions.rst +docs/faq.rst +docs/fernet.rst +docs/glossary.rst +docs/index.rst +docs/installation.rst +docs/limitations.rst +docs/make.bat +docs/random-numbers.rst +docs/security.rst +docs/spelling_wordlist.txt +docs/_static/.keep +docs/development/c-bindings.rst +docs/development/getting-started.rst +docs/development/index.rst +docs/development/reviewing-patches.rst +docs/development/submitting-patches.rst +docs/development/test-vectors.rst +docs/development/custom-vectors/arc4.rst +docs/development/custom-vectors/cast5.rst +docs/development/custom-vectors/idea.rst +docs/development/custom-vectors/rsa-oaep-sha2.rst +docs/development/custom-vectors/secp256k1.rst +docs/development/custom-vectors/seed.rst +docs/development/custom-vectors/arc4/generate_arc4.py +docs/development/custom-vectors/arc4/verify_arc4.go +docs/development/custom-vectors/cast5/generate_cast5.py +docs/development/custom-vectors/cast5/verify_cast5.go +docs/development/custom-vectors/idea/generate_idea.py +docs/development/custom-vectors/idea/verify_idea.py +docs/development/custom-vectors/rsa-oaep-sha2/VerifyRSAOAEPSHA2.java +docs/development/custom-vectors/rsa-oaep-sha2/generate_rsa_oaep_sha2.py +docs/development/custom-vectors/secp256k1/generate_secp256k1.py +docs/development/custom-vectors/secp256k1/verify_secp256k1.py +docs/development/custom-vectors/seed/generate_seed.py +docs/development/custom-vectors/seed/verify_seed.py +docs/hazmat/backends/commoncrypto.rst +docs/hazmat/backends/index.rst +docs/hazmat/backends/interfaces.rst +docs/hazmat/backends/multibackend.rst +docs/hazmat/backends/openssl.rst +docs/hazmat/bindings/commoncrypto.rst +docs/hazmat/bindings/index.rst +docs/hazmat/bindings/openssl.rst +docs/hazmat/primitives/constant-time.rst +docs/hazmat/primitives/cryptographic-hashes.rst +docs/hazmat/primitives/index.rst +docs/hazmat/primitives/interfaces.rst +docs/hazmat/primitives/key-derivation-functions.rst +docs/hazmat/primitives/keywrap.rst +docs/hazmat/primitives/padding.rst +docs/hazmat/primitives/symmetric-encryption.rst +docs/hazmat/primitives/twofactor.rst +docs/hazmat/primitives/asymmetric/dh.rst +docs/hazmat/primitives/asymmetric/dsa.rst +docs/hazmat/primitives/asymmetric/ec.rst +docs/hazmat/primitives/asymmetric/index.rst +docs/hazmat/primitives/asymmetric/interfaces.rst +docs/hazmat/primitives/asymmetric/rsa.rst +docs/hazmat/primitives/asymmetric/serialization.rst +docs/hazmat/primitives/asymmetric/utils.rst +docs/hazmat/primitives/mac/cmac.rst +docs/hazmat/primitives/mac/hmac.rst +docs/hazmat/primitives/mac/index.rst +docs/x509/index.rst +docs/x509/reference.rst +docs/x509/tutorial.rst +src/_cffi_src/__init__.py +src/_cffi_src/build_commoncrypto.py +src/_cffi_src/build_constant_time.py +src/_cffi_src/build_openssl.py +src/_cffi_src/build_padding.py +src/_cffi_src/utils.py +src/_cffi_src/commoncrypto/__init__.py +src/_cffi_src/commoncrypto/cf.py +src/_cffi_src/commoncrypto/common_cryptor.py +src/_cffi_src/commoncrypto/common_digest.py +src/_cffi_src/commoncrypto/common_hmac.py +src/_cffi_src/commoncrypto/common_key_derivation.py +src/_cffi_src/commoncrypto/common_symmetric_key_wrap.py +src/_cffi_src/commoncrypto/seccertificate.py +src/_cffi_src/commoncrypto/secimport.py +src/_cffi_src/commoncrypto/secitem.py +src/_cffi_src/commoncrypto/seckey.py +src/_cffi_src/commoncrypto/seckeychain.py +src/_cffi_src/commoncrypto/secpolicy.py +src/_cffi_src/commoncrypto/sectransform.py +src/_cffi_src/commoncrypto/sectrust.py +src/_cffi_src/hazmat_src/constant_time.c +src/_cffi_src/hazmat_src/constant_time.h +src/_cffi_src/hazmat_src/padding.c +src/_cffi_src/hazmat_src/padding.h +src/_cffi_src/openssl/__init__.py +src/_cffi_src/openssl/aes.py +src/_cffi_src/openssl/asn1.py +src/_cffi_src/openssl/bignum.py +src/_cffi_src/openssl/bio.py +src/_cffi_src/openssl/callbacks.py +src/_cffi_src/openssl/cmac.py +src/_cffi_src/openssl/cms.py +src/_cffi_src/openssl/crypto.py +src/_cffi_src/openssl/dh.py +src/_cffi_src/openssl/dsa.py +src/_cffi_src/openssl/ec.py +src/_cffi_src/openssl/ecdh.py +src/_cffi_src/openssl/ecdsa.py +src/_cffi_src/openssl/engine.py +src/_cffi_src/openssl/err.py +src/_cffi_src/openssl/evp.py +src/_cffi_src/openssl/hmac.py +src/_cffi_src/openssl/nid.py +src/_cffi_src/openssl/objects.py +src/_cffi_src/openssl/ocsp.py +src/_cffi_src/openssl/opensslv.py +src/_cffi_src/openssl/pem.py +src/_cffi_src/openssl/pkcs12.py +src/_cffi_src/openssl/pkcs7.py +src/_cffi_src/openssl/rand.py +src/_cffi_src/openssl/rsa.py +src/_cffi_src/openssl/ssl.py +src/_cffi_src/openssl/x509.py +src/_cffi_src/openssl/x509_vfy.py +src/_cffi_src/openssl/x509name.py +src/_cffi_src/openssl/x509v3.py +src/cryptography/__about__.py +src/cryptography/__init__.py +src/cryptography/exceptions.py +src/cryptography/fernet.py +src/cryptography/utils.py +src/cryptography.egg-info/PKG-INFO +src/cryptography.egg-info/SOURCES.txt +src/cryptography.egg-info/dependency_links.txt +src/cryptography.egg-info/entry_points.txt +src/cryptography.egg-info/not-zip-safe +src/cryptography.egg-info/requires.txt +src/cryptography.egg-info/top_level.txt +src/cryptography/hazmat/__init__.py +src/cryptography/hazmat/backends/__init__.py +src/cryptography/hazmat/backends/interfaces.py +src/cryptography/hazmat/backends/multibackend.py +src/cryptography/hazmat/backends/commoncrypto/__init__.py +src/cryptography/hazmat/backends/commoncrypto/backend.py +src/cryptography/hazmat/backends/commoncrypto/ciphers.py +src/cryptography/hazmat/backends/commoncrypto/hashes.py +src/cryptography/hazmat/backends/commoncrypto/hmac.py +src/cryptography/hazmat/backends/openssl/__init__.py +src/cryptography/hazmat/backends/openssl/backend.py +src/cryptography/hazmat/backends/openssl/ciphers.py +src/cryptography/hazmat/backends/openssl/cmac.py +src/cryptography/hazmat/backends/openssl/decode_asn1.py +src/cryptography/hazmat/backends/openssl/dsa.py +src/cryptography/hazmat/backends/openssl/ec.py +src/cryptography/hazmat/backends/openssl/encode_asn1.py +src/cryptography/hazmat/backends/openssl/hashes.py +src/cryptography/hazmat/backends/openssl/hmac.py +src/cryptography/hazmat/backends/openssl/rsa.py +src/cryptography/hazmat/backends/openssl/utils.py +src/cryptography/hazmat/backends/openssl/x509.py +src/cryptography/hazmat/bindings/__init__.py +src/cryptography/hazmat/bindings/commoncrypto/__init__.py +src/cryptography/hazmat/bindings/commoncrypto/binding.py +src/cryptography/hazmat/bindings/openssl/__init__.py +src/cryptography/hazmat/bindings/openssl/_conditional.py +src/cryptography/hazmat/bindings/openssl/binding.py +src/cryptography/hazmat/primitives/__init__.py +src/cryptography/hazmat/primitives/cmac.py +src/cryptography/hazmat/primitives/constant_time.py +src/cryptography/hazmat/primitives/hashes.py +src/cryptography/hazmat/primitives/hmac.py +src/cryptography/hazmat/primitives/keywrap.py +src/cryptography/hazmat/primitives/padding.py +src/cryptography/hazmat/primitives/serialization.py +src/cryptography/hazmat/primitives/asymmetric/__init__.py +src/cryptography/hazmat/primitives/asymmetric/dh.py +src/cryptography/hazmat/primitives/asymmetric/dsa.py +src/cryptography/hazmat/primitives/asymmetric/ec.py +src/cryptography/hazmat/primitives/asymmetric/padding.py +src/cryptography/hazmat/primitives/asymmetric/rsa.py +src/cryptography/hazmat/primitives/asymmetric/utils.py +src/cryptography/hazmat/primitives/ciphers/__init__.py +src/cryptography/hazmat/primitives/ciphers/algorithms.py +src/cryptography/hazmat/primitives/ciphers/base.py +src/cryptography/hazmat/primitives/ciphers/modes.py +src/cryptography/hazmat/primitives/interfaces/__init__.py +src/cryptography/hazmat/primitives/kdf/__init__.py +src/cryptography/hazmat/primitives/kdf/concatkdf.py +src/cryptography/hazmat/primitives/kdf/hkdf.py +src/cryptography/hazmat/primitives/kdf/kbkdf.py +src/cryptography/hazmat/primitives/kdf/pbkdf2.py +src/cryptography/hazmat/primitives/kdf/x963kdf.py +src/cryptography/hazmat/primitives/twofactor/__init__.py +src/cryptography/hazmat/primitives/twofactor/hotp.py +src/cryptography/hazmat/primitives/twofactor/totp.py +src/cryptography/hazmat/primitives/twofactor/utils.py +src/cryptography/x509/__init__.py +src/cryptography/x509/base.py +src/cryptography/x509/extensions.py +src/cryptography/x509/general_name.py +src/cryptography/x509/name.py +src/cryptography/x509/oid.py +tests/__init__.py +tests/conftest.py +tests/doubles.py +tests/test_fernet.py +tests/test_interfaces.py +tests/test_utils.py +tests/test_warnings.py +tests/test_x509.py +tests/test_x509_crlbuilder.py +tests/test_x509_ext.py +tests/test_x509_revokedcertbuilder.py +tests/utils.py +tests/hazmat/__init__.py +tests/hazmat/backends/__init__.py +tests/hazmat/backends/test_commoncrypto.py +tests/hazmat/backends/test_multibackend.py +tests/hazmat/backends/test_openssl.py +tests/hazmat/bindings/test_commoncrypto.py +tests/hazmat/bindings/test_openssl.py +tests/hazmat/primitives/__init__.py +tests/hazmat/primitives/fixtures_dsa.py +tests/hazmat/primitives/fixtures_ec.py +tests/hazmat/primitives/fixtures_rsa.py +tests/hazmat/primitives/test_3des.py +tests/hazmat/primitives/test_aes.py +tests/hazmat/primitives/test_arc4.py +tests/hazmat/primitives/test_asym_utils.py +tests/hazmat/primitives/test_block.py +tests/hazmat/primitives/test_blowfish.py +tests/hazmat/primitives/test_camellia.py +tests/hazmat/primitives/test_cast5.py +tests/hazmat/primitives/test_ciphers.py +tests/hazmat/primitives/test_cmac.py +tests/hazmat/primitives/test_concatkdf.py +tests/hazmat/primitives/test_constant_time.py +tests/hazmat/primitives/test_dh.py +tests/hazmat/primitives/test_dsa.py +tests/hazmat/primitives/test_ec.py +tests/hazmat/primitives/test_hash_vectors.py +tests/hazmat/primitives/test_hashes.py +tests/hazmat/primitives/test_hkdf.py +tests/hazmat/primitives/test_hkdf_vectors.py +tests/hazmat/primitives/test_hmac.py +tests/hazmat/primitives/test_hmac_vectors.py +tests/hazmat/primitives/test_idea.py +tests/hazmat/primitives/test_kbkdf.py +tests/hazmat/primitives/test_kbkdf_vectors.py +tests/hazmat/primitives/test_keywrap.py +tests/hazmat/primitives/test_padding.py +tests/hazmat/primitives/test_pbkdf2hmac.py +tests/hazmat/primitives/test_pbkdf2hmac_vectors.py +tests/hazmat/primitives/test_rsa.py +tests/hazmat/primitives/test_seed.py +tests/hazmat/primitives/test_serialization.py +tests/hazmat/primitives/test_x963_vectors.py +tests/hazmat/primitives/test_x963kdf.py +tests/hazmat/primitives/utils.py +tests/hazmat/primitives/twofactor/__init__.py +tests/hazmat/primitives/twofactor/test_hotp.py +tests/hazmat/primitives/twofactor/test_totp.py +tests/hypothesis/__init__.py +tests/hypothesis/test_fernet.py +tests/hypothesis/test_padding.py \ No newline at end of file diff --git a/lib/python3.4/site-packages/cryptography-1.4-py3.4.egg-info/dependency_links.txt b/lib/python3.4/site-packages/cryptography-1.4-py3.4.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography-1.4-py3.4.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/lib/python3.4/site-packages/cryptography-1.4-py3.4.egg-info/entry_points.txt b/lib/python3.4/site-packages/cryptography-1.4-py3.4.egg-info/entry_points.txt new file mode 100644 index 0000000..bfde650 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography-1.4-py3.4.egg-info/entry_points.txt @@ -0,0 +1,3 @@ +[cryptography.backends] +openssl = cryptography.hazmat.backends.openssl:backend + diff --git a/lib/python3.4/site-packages/cryptography-1.4-py3.4.egg-info/installed-files.txt b/lib/python3.4/site-packages/cryptography-1.4-py3.4.egg-info/installed-files.txt new file mode 100644 index 0000000..c0ba59a --- /dev/null +++ b/lib/python3.4/site-packages/cryptography-1.4-py3.4.egg-info/installed-files.txt @@ -0,0 +1,149 @@ +../cryptography/__about__.py +../cryptography/__init__.py +../cryptography/fernet.py +../cryptography/exceptions.py +../cryptography/utils.py +../cryptography/x509/oid.py +../cryptography/x509/__init__.py +../cryptography/x509/extensions.py +../cryptography/x509/name.py +../cryptography/x509/base.py +../cryptography/x509/general_name.py +../cryptography/hazmat/__init__.py +../cryptography/hazmat/backends/multibackend.py +../cryptography/hazmat/backends/interfaces.py +../cryptography/hazmat/backends/__init__.py +../cryptography/hazmat/primitives/constant_time.py +../cryptography/hazmat/primitives/keywrap.py +../cryptography/hazmat/primitives/__init__.py +../cryptography/hazmat/primitives/hmac.py +../cryptography/hazmat/primitives/cmac.py +../cryptography/hazmat/primitives/padding.py +../cryptography/hazmat/primitives/serialization.py +../cryptography/hazmat/primitives/hashes.py +../cryptography/hazmat/bindings/__init__.py +../cryptography/hazmat/backends/openssl/x509.py +../cryptography/hazmat/backends/openssl/encode_asn1.py +../cryptography/hazmat/backends/openssl/decode_asn1.py +../cryptography/hazmat/backends/openssl/ec.py +../cryptography/hazmat/backends/openssl/__init__.py +../cryptography/hazmat/backends/openssl/hmac.py +../cryptography/hazmat/backends/openssl/cmac.py +../cryptography/hazmat/backends/openssl/ciphers.py +../cryptography/hazmat/backends/openssl/utils.py +../cryptography/hazmat/backends/openssl/backend.py +../cryptography/hazmat/backends/openssl/dsa.py +../cryptography/hazmat/backends/openssl/rsa.py +../cryptography/hazmat/backends/openssl/hashes.py +../cryptography/hazmat/backends/commoncrypto/__init__.py +../cryptography/hazmat/backends/commoncrypto/hmac.py +../cryptography/hazmat/backends/commoncrypto/ciphers.py +../cryptography/hazmat/backends/commoncrypto/backend.py +../cryptography/hazmat/backends/commoncrypto/hashes.py +../cryptography/hazmat/primitives/ciphers/modes.py +../cryptography/hazmat/primitives/ciphers/__init__.py +../cryptography/hazmat/primitives/ciphers/base.py +../cryptography/hazmat/primitives/ciphers/algorithms.py +../cryptography/hazmat/primitives/kdf/hkdf.py +../cryptography/hazmat/primitives/kdf/kbkdf.py +../cryptography/hazmat/primitives/kdf/pbkdf2.py +../cryptography/hazmat/primitives/kdf/__init__.py +../cryptography/hazmat/primitives/kdf/concatkdf.py +../cryptography/hazmat/primitives/kdf/x963kdf.py +../cryptography/hazmat/primitives/asymmetric/dh.py +../cryptography/hazmat/primitives/asymmetric/ec.py +../cryptography/hazmat/primitives/asymmetric/__init__.py +../cryptography/hazmat/primitives/asymmetric/utils.py +../cryptography/hazmat/primitives/asymmetric/dsa.py +../cryptography/hazmat/primitives/asymmetric/padding.py +../cryptography/hazmat/primitives/asymmetric/rsa.py +../cryptography/hazmat/primitives/interfaces/__init__.py +../cryptography/hazmat/primitives/twofactor/totp.py +../cryptography/hazmat/primitives/twofactor/hotp.py +../cryptography/hazmat/primitives/twofactor/__init__.py +../cryptography/hazmat/primitives/twofactor/utils.py +../cryptography/hazmat/bindings/openssl/__init__.py +../cryptography/hazmat/bindings/openssl/binding.py +../cryptography/hazmat/bindings/openssl/_conditional.py +../cryptography/hazmat/bindings/commoncrypto/__init__.py +../cryptography/hazmat/bindings/commoncrypto/binding.py +../cryptography/__pycache__/__about__.cpython-34.pyc +../cryptography/__pycache__/__init__.cpython-34.pyc +../cryptography/__pycache__/fernet.cpython-34.pyc +../cryptography/__pycache__/exceptions.cpython-34.pyc +../cryptography/__pycache__/utils.cpython-34.pyc +../cryptography/x509/__pycache__/oid.cpython-34.pyc +../cryptography/x509/__pycache__/__init__.cpython-34.pyc +../cryptography/x509/__pycache__/extensions.cpython-34.pyc +../cryptography/x509/__pycache__/name.cpython-34.pyc +../cryptography/x509/__pycache__/base.cpython-34.pyc +../cryptography/x509/__pycache__/general_name.cpython-34.pyc +../cryptography/hazmat/__pycache__/__init__.cpython-34.pyc +../cryptography/hazmat/backends/__pycache__/multibackend.cpython-34.pyc +../cryptography/hazmat/backends/__pycache__/interfaces.cpython-34.pyc +../cryptography/hazmat/backends/__pycache__/__init__.cpython-34.pyc +../cryptography/hazmat/primitives/__pycache__/constant_time.cpython-34.pyc +../cryptography/hazmat/primitives/__pycache__/keywrap.cpython-34.pyc +../cryptography/hazmat/primitives/__pycache__/__init__.cpython-34.pyc +../cryptography/hazmat/primitives/__pycache__/hmac.cpython-34.pyc +../cryptography/hazmat/primitives/__pycache__/cmac.cpython-34.pyc +../cryptography/hazmat/primitives/__pycache__/padding.cpython-34.pyc +../cryptography/hazmat/primitives/__pycache__/serialization.cpython-34.pyc +../cryptography/hazmat/primitives/__pycache__/hashes.cpython-34.pyc +../cryptography/hazmat/bindings/__pycache__/__init__.cpython-34.pyc +../cryptography/hazmat/backends/openssl/__pycache__/x509.cpython-34.pyc +../cryptography/hazmat/backends/openssl/__pycache__/encode_asn1.cpython-34.pyc +../cryptography/hazmat/backends/openssl/__pycache__/decode_asn1.cpython-34.pyc +../cryptography/hazmat/backends/openssl/__pycache__/ec.cpython-34.pyc +../cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-34.pyc +../cryptography/hazmat/backends/openssl/__pycache__/hmac.cpython-34.pyc +../cryptography/hazmat/backends/openssl/__pycache__/cmac.cpython-34.pyc +../cryptography/hazmat/backends/openssl/__pycache__/ciphers.cpython-34.pyc +../cryptography/hazmat/backends/openssl/__pycache__/utils.cpython-34.pyc +../cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-34.pyc +../cryptography/hazmat/backends/openssl/__pycache__/dsa.cpython-34.pyc +../cryptography/hazmat/backends/openssl/__pycache__/rsa.cpython-34.pyc +../cryptography/hazmat/backends/openssl/__pycache__/hashes.cpython-34.pyc +../cryptography/hazmat/backends/commoncrypto/__pycache__/__init__.cpython-34.pyc +../cryptography/hazmat/backends/commoncrypto/__pycache__/hmac.cpython-34.pyc +../cryptography/hazmat/backends/commoncrypto/__pycache__/ciphers.cpython-34.pyc +../cryptography/hazmat/backends/commoncrypto/__pycache__/backend.cpython-34.pyc +../cryptography/hazmat/backends/commoncrypto/__pycache__/hashes.cpython-34.pyc +../cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-34.pyc +../cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-34.pyc +../cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-34.pyc +../cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-34.pyc +../cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-34.pyc +../cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-34.pyc +../cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-34.pyc +../cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-34.pyc +../cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-34.pyc +../cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-34.pyc +../cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-34.pyc +../cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-34.pyc +../cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-34.pyc +../cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-34.pyc +../cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-34.pyc +../cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-34.pyc +../cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-34.pyc +../cryptography/hazmat/primitives/interfaces/__pycache__/__init__.cpython-34.pyc +../cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-34.pyc +../cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-34.pyc +../cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-34.pyc +../cryptography/hazmat/primitives/twofactor/__pycache__/utils.cpython-34.pyc +../cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-34.pyc +../cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-34.pyc +../cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-34.pyc +../cryptography/hazmat/bindings/commoncrypto/__pycache__/__init__.cpython-34.pyc +../cryptography/hazmat/bindings/commoncrypto/__pycache__/binding.cpython-34.pyc +../cryptography/hazmat/bindings/_openssl.cpython-34m.so +../cryptography/hazmat/bindings/_constant_time.cpython-34m.so +../cryptography/hazmat/bindings/_padding.cpython-34m.so +./ +PKG-INFO +requires.txt +not-zip-safe +dependency_links.txt +SOURCES.txt +top_level.txt +entry_points.txt diff --git a/lib/python3.4/site-packages/cryptography-1.4-py3.4.egg-info/not-zip-safe b/lib/python3.4/site-packages/cryptography-1.4-py3.4.egg-info/not-zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography-1.4-py3.4.egg-info/not-zip-safe @@ -0,0 +1 @@ + diff --git a/lib/python3.4/site-packages/cryptography-1.4-py3.4.egg-info/requires.txt b/lib/python3.4/site-packages/cryptography-1.4-py3.4.egg-info/requires.txt new file mode 100644 index 0000000..a963a04 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography-1.4-py3.4.egg-info/requires.txt @@ -0,0 +1,26 @@ +idna>=2.0 +pyasn1>=0.1.8 +six>=1.4.1 +setuptools>=11.3 +cffi>=1.4.1 + +[docstest] +doc8 +pyenchant +readme_renderer +sphinx +sphinx_rtd_theme +sphinxcontrib-spelling + +[pep8test] +flake8 +flake8-import-order +pep8-naming + +[test] +pytest +pretend +iso8601 +pyasn1_modules +hypothesis>=1.11.4 +cryptography_vectors==1.4 diff --git a/lib/python3.4/site-packages/cryptography-1.4-py3.4.egg-info/top_level.txt b/lib/python3.4/site-packages/cryptography-1.4-py3.4.egg-info/top_level.txt new file mode 100644 index 0000000..9333de6 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography-1.4-py3.4.egg-info/top_level.txt @@ -0,0 +1,4 @@ +_openssl +_constant_time +_padding +cryptography diff --git a/lib/python3.4/site-packages/cryptography/__about__.py b/lib/python3.4/site-packages/cryptography/__about__.py new file mode 100644 index 0000000..dc7c412 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/__about__.py @@ -0,0 +1,23 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +__all__ = [ + "__title__", "__summary__", "__uri__", "__version__", "__author__", + "__email__", "__license__", "__copyright__", +] + +__title__ = "cryptography" +__summary__ = ("cryptography is a package which provides cryptographic recipes" + " and primitives to Python developers.") +__uri__ = "https://github.com/pyca/cryptography" + +__version__ = "1.4" + +__author__ = "The cryptography developers" +__email__ = "cryptography-dev@python.org" + +__license__ = "BSD or Apache License, Version 2.0" +__copyright__ = "Copyright 2013-2016 {0}".format(__author__) diff --git a/lib/python3.4/site-packages/cryptography/__init__.py b/lib/python3.4/site-packages/cryptography/__init__.py new file mode 100644 index 0000000..940c66b --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/__init__.py @@ -0,0 +1,27 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import sys +import warnings + +from cryptography.__about__ import ( + __author__, __copyright__, __email__, __license__, __summary__, __title__, + __uri__, __version__ +) + + +__all__ = [ + "__title__", "__summary__", "__uri__", "__version__", "__author__", + "__email__", "__license__", "__copyright__", +] + +if sys.version_info[:2] == (2, 6): + warnings.warn( + "Python 2.6 is no longer supported by the Python core team, please " + "upgrade your Python. A future version of cryptography will drop " + "support for Python 2.6", + DeprecationWarning + ) diff --git a/lib/python3.4/site-packages/cryptography/exceptions.py b/lib/python3.4/site-packages/cryptography/exceptions.py new file mode 100644 index 0000000..ee43fed --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/exceptions.py @@ -0,0 +1,56 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from enum import Enum + + +class _Reasons(Enum): + BACKEND_MISSING_INTERFACE = 0 + UNSUPPORTED_HASH = 1 + UNSUPPORTED_CIPHER = 2 + UNSUPPORTED_PADDING = 3 + UNSUPPORTED_MGF = 4 + UNSUPPORTED_PUBLIC_KEY_ALGORITHM = 5 + UNSUPPORTED_ELLIPTIC_CURVE = 6 + UNSUPPORTED_SERIALIZATION = 7 + UNSUPPORTED_X509 = 8 + UNSUPPORTED_EXCHANGE_ALGORITHM = 9 + + +class UnsupportedAlgorithm(Exception): + def __init__(self, message, reason=None): + super(UnsupportedAlgorithm, self).__init__(message) + self._reason = reason + + +class AlreadyFinalized(Exception): + pass + + +class AlreadyUpdated(Exception): + pass + + +class NotYetFinalized(Exception): + pass + + +class InvalidTag(Exception): + pass + + +class InvalidSignature(Exception): + pass + + +class InternalError(Exception): + def __init__(self, msg, err_code): + super(InternalError, self).__init__(msg) + self.err_code = err_code + + +class InvalidKey(Exception): + pass diff --git a/lib/python3.4/site-packages/cryptography/fernet.py b/lib/python3.4/site-packages/cryptography/fernet.py new file mode 100644 index 0000000..99eb10e --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/fernet.py @@ -0,0 +1,143 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import base64 +import binascii +import os +import struct +import time + +import six + +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes, padding +from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes +from cryptography.hazmat.primitives.hmac import HMAC + + +class InvalidToken(Exception): + pass + + +_MAX_CLOCK_SKEW = 60 + + +class Fernet(object): + def __init__(self, key, backend=None): + if backend is None: + backend = default_backend() + + key = base64.urlsafe_b64decode(key) + if len(key) != 32: + raise ValueError( + "Fernet key must be 32 url-safe base64-encoded bytes." + ) + + self._signing_key = key[:16] + self._encryption_key = key[16:] + self._backend = backend + + @classmethod + def generate_key(cls): + return base64.urlsafe_b64encode(os.urandom(32)) + + def encrypt(self, data): + current_time = int(time.time()) + iv = os.urandom(16) + return self._encrypt_from_parts(data, current_time, iv) + + def _encrypt_from_parts(self, data, current_time, iv): + if not isinstance(data, bytes): + raise TypeError("data must be bytes.") + + padder = padding.PKCS7(algorithms.AES.block_size).padder() + padded_data = padder.update(data) + padder.finalize() + encryptor = Cipher( + algorithms.AES(self._encryption_key), modes.CBC(iv), self._backend + ).encryptor() + ciphertext = encryptor.update(padded_data) + encryptor.finalize() + + basic_parts = ( + b"\x80" + struct.pack(">Q", current_time) + iv + ciphertext + ) + + h = HMAC(self._signing_key, hashes.SHA256(), backend=self._backend) + h.update(basic_parts) + hmac = h.finalize() + return base64.urlsafe_b64encode(basic_parts + hmac) + + def decrypt(self, token, ttl=None): + if not isinstance(token, bytes): + raise TypeError("token must be bytes.") + + current_time = int(time.time()) + + try: + data = base64.urlsafe_b64decode(token) + except (TypeError, binascii.Error): + raise InvalidToken + + if not data or six.indexbytes(data, 0) != 0x80: + raise InvalidToken + + try: + timestamp, = struct.unpack(">Q", data[1:9]) + except struct.error: + raise InvalidToken + if ttl is not None: + if timestamp + ttl < current_time: + raise InvalidToken + + if current_time + _MAX_CLOCK_SKEW < timestamp: + raise InvalidToken + + h = HMAC(self._signing_key, hashes.SHA256(), backend=self._backend) + h.update(data[:-32]) + try: + h.verify(data[-32:]) + except InvalidSignature: + raise InvalidToken + + iv = data[9:25] + ciphertext = data[25:-32] + decryptor = Cipher( + algorithms.AES(self._encryption_key), modes.CBC(iv), self._backend + ).decryptor() + plaintext_padded = decryptor.update(ciphertext) + try: + plaintext_padded += decryptor.finalize() + except ValueError: + raise InvalidToken + unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder() + + unpadded = unpadder.update(plaintext_padded) + try: + unpadded += unpadder.finalize() + except ValueError: + raise InvalidToken + return unpadded + + +class MultiFernet(object): + def __init__(self, fernets): + fernets = list(fernets) + if not fernets: + raise ValueError( + "MultiFernet requires at least one Fernet instance" + ) + self._fernets = fernets + + def encrypt(self, msg): + return self._fernets[0].encrypt(msg) + + def decrypt(self, msg, ttl=None): + for f in self._fernets: + try: + return f.decrypt(msg, ttl) + except InvalidToken: + pass + raise InvalidToken diff --git a/lib/python3.4/site-packages/cryptography/hazmat/__init__.py b/lib/python3.4/site-packages/cryptography/hazmat/__init__.py new file mode 100644 index 0000000..9f06a99 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/__init__.py @@ -0,0 +1,11 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +""" +Hazardous Materials + +This is a "Hazardous Materials" module. You should ONLY use it if you're +100% absolutely sure that you know what you're doing because this module +is full of land mines, dragons, and dinosaurs with laser guns. +""" +from __future__ import absolute_import, division, print_function diff --git a/lib/python3.4/site-packages/cryptography/hazmat/backends/__init__.py b/lib/python3.4/site-packages/cryptography/hazmat/backends/__init__.py new file mode 100644 index 0000000..96a431d --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/backends/__init__.py @@ -0,0 +1,37 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import pkg_resources + +from cryptography.hazmat.backends.multibackend import MultiBackend + + +_available_backends_list = None + + +def _available_backends(): + global _available_backends_list + + if _available_backends_list is None: + _available_backends_list = [ + ep.resolve() + for ep in pkg_resources.iter_entry_points( + "cryptography.backends" + ) + ] + + return _available_backends_list + +_default_backend = None + + +def default_backend(): + global _default_backend + + if _default_backend is None: + _default_backend = MultiBackend(_available_backends()) + + return _default_backend diff --git a/lib/python3.4/site-packages/cryptography/hazmat/backends/commoncrypto/__init__.py b/lib/python3.4/site-packages/cryptography/hazmat/backends/commoncrypto/__init__.py new file mode 100644 index 0000000..1d52a25 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/backends/commoncrypto/__init__.py @@ -0,0 +1,10 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography.hazmat.backends.commoncrypto.backend import backend + + +__all__ = ["backend"] diff --git a/lib/python3.4/site-packages/cryptography/hazmat/backends/commoncrypto/backend.py b/lib/python3.4/site-packages/cryptography/hazmat/backends/commoncrypto/backend.py new file mode 100644 index 0000000..315d67d --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/backends/commoncrypto/backend.py @@ -0,0 +1,245 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from collections import namedtuple + +from cryptography import utils +from cryptography.exceptions import InternalError +from cryptography.hazmat.backends.commoncrypto.ciphers import ( + _CipherContext, _GCMCipherContext +) +from cryptography.hazmat.backends.commoncrypto.hashes import _HashContext +from cryptography.hazmat.backends.commoncrypto.hmac import _HMACContext +from cryptography.hazmat.backends.interfaces import ( + CipherBackend, HMACBackend, HashBackend, PBKDF2HMACBackend +) +from cryptography.hazmat.bindings.commoncrypto.binding import Binding +from cryptography.hazmat.primitives.ciphers.algorithms import ( + AES, ARC4, Blowfish, CAST5, TripleDES +) +from cryptography.hazmat.primitives.ciphers.modes import ( + CBC, CFB, CFB8, CTR, ECB, GCM, OFB +) + + +HashMethods = namedtuple( + "HashMethods", ["ctx", "hash_init", "hash_update", "hash_final"] +) + + +@utils.register_interface(CipherBackend) +@utils.register_interface(HashBackend) +@utils.register_interface(HMACBackend) +@utils.register_interface(PBKDF2HMACBackend) +class Backend(object): + """ + CommonCrypto API wrapper. + """ + name = "commoncrypto" + + def __init__(self): + self._binding = Binding() + self._ffi = self._binding.ffi + self._lib = self._binding.lib + + self._cipher_registry = {} + self._register_default_ciphers() + self._hash_mapping = { + "md5": HashMethods( + "CC_MD5_CTX *", self._lib.CC_MD5_Init, + self._lib.CC_MD5_Update, self._lib.CC_MD5_Final + ), + "sha1": HashMethods( + "CC_SHA1_CTX *", self._lib.CC_SHA1_Init, + self._lib.CC_SHA1_Update, self._lib.CC_SHA1_Final + ), + "sha224": HashMethods( + "CC_SHA256_CTX *", self._lib.CC_SHA224_Init, + self._lib.CC_SHA224_Update, self._lib.CC_SHA224_Final + ), + "sha256": HashMethods( + "CC_SHA256_CTX *", self._lib.CC_SHA256_Init, + self._lib.CC_SHA256_Update, self._lib.CC_SHA256_Final + ), + "sha384": HashMethods( + "CC_SHA512_CTX *", self._lib.CC_SHA384_Init, + self._lib.CC_SHA384_Update, self._lib.CC_SHA384_Final + ), + "sha512": HashMethods( + "CC_SHA512_CTX *", self._lib.CC_SHA512_Init, + self._lib.CC_SHA512_Update, self._lib.CC_SHA512_Final + ), + } + + self._supported_hmac_algorithms = { + "md5": self._lib.kCCHmacAlgMD5, + "sha1": self._lib.kCCHmacAlgSHA1, + "sha224": self._lib.kCCHmacAlgSHA224, + "sha256": self._lib.kCCHmacAlgSHA256, + "sha384": self._lib.kCCHmacAlgSHA384, + "sha512": self._lib.kCCHmacAlgSHA512, + } + + self._supported_pbkdf2_hmac_algorithms = { + "sha1": self._lib.kCCPRFHmacAlgSHA1, + "sha224": self._lib.kCCPRFHmacAlgSHA224, + "sha256": self._lib.kCCPRFHmacAlgSHA256, + "sha384": self._lib.kCCPRFHmacAlgSHA384, + "sha512": self._lib.kCCPRFHmacAlgSHA512, + } + + def hash_supported(self, algorithm): + return algorithm.name in self._hash_mapping + + def hmac_supported(self, algorithm): + return algorithm.name in self._supported_hmac_algorithms + + def create_hash_ctx(self, algorithm): + return _HashContext(self, algorithm) + + def create_hmac_ctx(self, key, algorithm): + return _HMACContext(self, key, algorithm) + + def cipher_supported(self, cipher, mode): + return (type(cipher), type(mode)) in self._cipher_registry + + def create_symmetric_encryption_ctx(self, cipher, mode): + if isinstance(mode, GCM): + return _GCMCipherContext( + self, cipher, mode, self._lib.kCCEncrypt + ) + else: + return _CipherContext(self, cipher, mode, self._lib.kCCEncrypt) + + def create_symmetric_decryption_ctx(self, cipher, mode): + if isinstance(mode, GCM): + return _GCMCipherContext( + self, cipher, mode, self._lib.kCCDecrypt + ) + else: + return _CipherContext(self, cipher, mode, self._lib.kCCDecrypt) + + def pbkdf2_hmac_supported(self, algorithm): + return algorithm.name in self._supported_pbkdf2_hmac_algorithms + + def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations, + key_material): + alg_enum = self._supported_pbkdf2_hmac_algorithms[algorithm.name] + buf = self._ffi.new("char[]", length) + res = self._lib.CCKeyDerivationPBKDF( + self._lib.kCCPBKDF2, + key_material, + len(key_material), + salt, + len(salt), + alg_enum, + iterations, + buf, + length + ) + self._check_cipher_response(res) + + return self._ffi.buffer(buf)[:] + + def _register_cipher_adapter(self, cipher_cls, cipher_const, mode_cls, + mode_const): + if (cipher_cls, mode_cls) in self._cipher_registry: + raise ValueError("Duplicate registration for: {0} {1}.".format( + cipher_cls, mode_cls) + ) + self._cipher_registry[cipher_cls, mode_cls] = (cipher_const, + mode_const) + + def _register_default_ciphers(self): + for mode_cls, mode_const in [ + (CBC, self._lib.kCCModeCBC), + (ECB, self._lib.kCCModeECB), + (CFB, self._lib.kCCModeCFB), + (CFB8, self._lib.kCCModeCFB8), + (OFB, self._lib.kCCModeOFB), + (CTR, self._lib.kCCModeCTR), + (GCM, self._lib.kCCModeGCM), + ]: + self._register_cipher_adapter( + AES, + self._lib.kCCAlgorithmAES128, + mode_cls, + mode_const + ) + for mode_cls, mode_const in [ + (CBC, self._lib.kCCModeCBC), + (ECB, self._lib.kCCModeECB), + (CFB, self._lib.kCCModeCFB), + (CFB8, self._lib.kCCModeCFB8), + (OFB, self._lib.kCCModeOFB), + ]: + self._register_cipher_adapter( + TripleDES, + self._lib.kCCAlgorithm3DES, + mode_cls, + mode_const + ) + for mode_cls, mode_const in [ + (CBC, self._lib.kCCModeCBC), + (ECB, self._lib.kCCModeECB), + (CFB, self._lib.kCCModeCFB), + (OFB, self._lib.kCCModeOFB) + ]: + self._register_cipher_adapter( + Blowfish, + self._lib.kCCAlgorithmBlowfish, + mode_cls, + mode_const + ) + for mode_cls, mode_const in [ + (CBC, self._lib.kCCModeCBC), + (ECB, self._lib.kCCModeECB), + (CFB, self._lib.kCCModeCFB), + (OFB, self._lib.kCCModeOFB), + (CTR, self._lib.kCCModeCTR) + ]: + self._register_cipher_adapter( + CAST5, + self._lib.kCCAlgorithmCAST, + mode_cls, + mode_const + ) + self._register_cipher_adapter( + ARC4, + self._lib.kCCAlgorithmRC4, + type(None), + self._lib.kCCModeRC4 + ) + + def _check_cipher_response(self, response): + if response == self._lib.kCCSuccess: + return + elif response == self._lib.kCCAlignmentError: + # This error is not currently triggered due to a bug filed as + # rdar://15589470 + raise ValueError( + "The length of the provided data is not a multiple of " + "the block length." + ) + else: + raise InternalError( + "The backend returned an unknown error, consider filing a bug." + " Code: {0}.".format(response), + response + ) + + def _release_cipher_ctx(self, ctx): + """ + Called by the garbage collector and used to safely dereference and + release the context. + """ + if ctx[0] != self._ffi.NULL: + res = self._lib.CCCryptorRelease(ctx[0]) + self._check_cipher_response(res) + ctx[0] = self._ffi.NULL + + +backend = Backend() diff --git a/lib/python3.4/site-packages/cryptography/hazmat/backends/commoncrypto/ciphers.py b/lib/python3.4/site-packages/cryptography/hazmat/backends/commoncrypto/ciphers.py new file mode 100644 index 0000000..1ce8aec --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/backends/commoncrypto/ciphers.py @@ -0,0 +1,193 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import ( + InvalidTag, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.primitives import ciphers, constant_time +from cryptography.hazmat.primitives.ciphers import modes +from cryptography.hazmat.primitives.ciphers.modes import ( + CFB, CFB8, CTR, OFB +) + + +@utils.register_interface(ciphers.CipherContext) +class _CipherContext(object): + def __init__(self, backend, cipher, mode, operation): + self._backend = backend + self._cipher = cipher + self._mode = mode + self._operation = operation + # There is a bug in CommonCrypto where block ciphers do not raise + # kCCAlignmentError when finalizing if you supply non-block aligned + # data. To work around this we need to keep track of the block + # alignment ourselves, but only for alg+mode combos that require + # block alignment. OFB, CFB, and CTR make a block cipher algorithm + # into a stream cipher so we don't need to track them (and thus their + # block size is effectively 1 byte just like OpenSSL/CommonCrypto + # treat RC4 and other stream cipher block sizes). + # This bug has been filed as rdar://15589470 + self._bytes_processed = 0 + if (isinstance(cipher, ciphers.BlockCipherAlgorithm) and not + isinstance(mode, (OFB, CFB, CFB8, CTR))): + self._byte_block_size = cipher.block_size // 8 + else: + self._byte_block_size = 1 + + registry = self._backend._cipher_registry + try: + cipher_enum, mode_enum = registry[type(cipher), type(mode)] + except KeyError: + raise UnsupportedAlgorithm( + "cipher {0} in {1} mode is not supported " + "by this backend.".format( + cipher.name, mode.name if mode else mode), + _Reasons.UNSUPPORTED_CIPHER + ) + + ctx = self._backend._ffi.new("CCCryptorRef *") + ctx = self._backend._ffi.gc(ctx, self._backend._release_cipher_ctx) + + if isinstance(mode, modes.ModeWithInitializationVector): + iv_nonce = mode.initialization_vector + elif isinstance(mode, modes.ModeWithNonce): + iv_nonce = mode.nonce + else: + iv_nonce = self._backend._ffi.NULL + + if isinstance(mode, CTR): + mode_option = self._backend._lib.kCCModeOptionCTR_BE + else: + mode_option = 0 + + res = self._backend._lib.CCCryptorCreateWithMode( + operation, + mode_enum, cipher_enum, + self._backend._lib.ccNoPadding, iv_nonce, + cipher.key, len(cipher.key), + self._backend._ffi.NULL, 0, 0, mode_option, ctx) + self._backend._check_cipher_response(res) + + self._ctx = ctx + + def update(self, data): + # Count bytes processed to handle block alignment. + self._bytes_processed += len(data) + buf = self._backend._ffi.new( + "unsigned char[]", len(data) + self._byte_block_size - 1) + outlen = self._backend._ffi.new("size_t *") + res = self._backend._lib.CCCryptorUpdate( + self._ctx[0], data, len(data), buf, + len(data) + self._byte_block_size - 1, outlen) + self._backend._check_cipher_response(res) + return self._backend._ffi.buffer(buf)[:outlen[0]] + + def finalize(self): + # Raise error if block alignment is wrong. + if self._bytes_processed % self._byte_block_size: + raise ValueError( + "The length of the provided data is not a multiple of " + "the block length." + ) + buf = self._backend._ffi.new("unsigned char[]", self._byte_block_size) + outlen = self._backend._ffi.new("size_t *") + res = self._backend._lib.CCCryptorFinal( + self._ctx[0], buf, len(buf), outlen) + self._backend._check_cipher_response(res) + self._backend._release_cipher_ctx(self._ctx) + return self._backend._ffi.buffer(buf)[:outlen[0]] + + +@utils.register_interface(ciphers.AEADCipherContext) +@utils.register_interface(ciphers.AEADEncryptionContext) +class _GCMCipherContext(object): + def __init__(self, backend, cipher, mode, operation): + self._backend = backend + self._cipher = cipher + self._mode = mode + self._operation = operation + self._tag = None + + registry = self._backend._cipher_registry + try: + cipher_enum, mode_enum = registry[type(cipher), type(mode)] + except KeyError: + raise UnsupportedAlgorithm( + "cipher {0} in {1} mode is not supported " + "by this backend.".format( + cipher.name, mode.name if mode else mode), + _Reasons.UNSUPPORTED_CIPHER + ) + + ctx = self._backend._ffi.new("CCCryptorRef *") + ctx = self._backend._ffi.gc(ctx, self._backend._release_cipher_ctx) + + self._ctx = ctx + + res = self._backend._lib.CCCryptorCreateWithMode( + operation, + mode_enum, cipher_enum, + self._backend._lib.ccNoPadding, + self._backend._ffi.NULL, + cipher.key, len(cipher.key), + self._backend._ffi.NULL, 0, 0, 0, self._ctx) + self._backend._check_cipher_response(res) + + res = self._backend._lib.CCCryptorGCMAddIV( + self._ctx[0], + mode.initialization_vector, + len(mode.initialization_vector) + ) + self._backend._check_cipher_response(res) + # CommonCrypto has a bug where calling update without at least one + # call to authenticate_additional_data will result in null byte output + # for ciphertext. The following empty byte string call prevents the + # issue, which is present in at least 10.8 and 10.9. + # Filed as rdar://18314544 + self.authenticate_additional_data(b"") + + def update(self, data): + buf = self._backend._ffi.new("unsigned char[]", len(data)) + args = (self._ctx[0], data, len(data), buf) + if self._operation == self._backend._lib.kCCEncrypt: + res = self._backend._lib.CCCryptorGCMEncrypt(*args) + else: + res = self._backend._lib.CCCryptorGCMDecrypt(*args) + + self._backend._check_cipher_response(res) + return self._backend._ffi.buffer(buf)[:] + + def finalize(self): + # CommonCrypto has a yet another bug where you must make at least one + # call to update. If you pass just AAD and call finalize without a call + # to update you'll get null bytes for tag. The following update call + # prevents this issue, which is present in at least 10.8 and 10.9. + # Filed as rdar://18314580 + self.update(b"") + tag_size = self._cipher.block_size // 8 + tag_buf = self._backend._ffi.new("unsigned char[]", tag_size) + tag_len = self._backend._ffi.new("size_t *", tag_size) + res = self._backend._lib.CCCryptorGCMFinal( + self._ctx[0], tag_buf, tag_len + ) + self._backend._check_cipher_response(res) + self._backend._release_cipher_ctx(self._ctx) + self._tag = self._backend._ffi.buffer(tag_buf)[:] + if (self._operation == self._backend._lib.kCCDecrypt and + not constant_time.bytes_eq( + self._tag[:len(self._mode.tag)], self._mode.tag + )): + raise InvalidTag + return b"" + + def authenticate_additional_data(self, data): + res = self._backend._lib.CCCryptorGCMAddAAD( + self._ctx[0], data, len(data) + ) + self._backend._check_cipher_response(res) + + tag = utils.read_only_property("_tag") diff --git a/lib/python3.4/site-packages/cryptography/hazmat/backends/commoncrypto/hashes.py b/lib/python3.4/site-packages/cryptography/hazmat/backends/commoncrypto/hashes.py new file mode 100644 index 0000000..a54e983 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/backends/commoncrypto/hashes.py @@ -0,0 +1,55 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.primitives import hashes + + +@utils.register_interface(hashes.HashContext) +class _HashContext(object): + def __init__(self, backend, algorithm, ctx=None): + self._algorithm = algorithm + self._backend = backend + + if ctx is None: + try: + methods = self._backend._hash_mapping[self.algorithm.name] + except KeyError: + raise UnsupportedAlgorithm( + "{0} is not a supported hash on this backend.".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + ctx = self._backend._ffi.new(methods.ctx) + res = methods.hash_init(ctx) + assert res == 1 + + self._ctx = ctx + + algorithm = utils.read_only_property("_algorithm") + + def copy(self): + methods = self._backend._hash_mapping[self.algorithm.name] + new_ctx = self._backend._ffi.new(methods.ctx) + # CommonCrypto has no APIs for copying hashes, so we have to copy the + # underlying struct. + new_ctx[0] = self._ctx[0] + + return _HashContext(self._backend, self.algorithm, ctx=new_ctx) + + def update(self, data): + methods = self._backend._hash_mapping[self.algorithm.name] + res = methods.hash_update(self._ctx, data, len(data)) + assert res == 1 + + def finalize(self): + methods = self._backend._hash_mapping[self.algorithm.name] + buf = self._backend._ffi.new("unsigned char[]", + self.algorithm.digest_size) + res = methods.hash_final(buf, self._ctx) + assert res == 1 + return self._backend._ffi.buffer(buf)[:] diff --git a/lib/python3.4/site-packages/cryptography/hazmat/backends/commoncrypto/hmac.py b/lib/python3.4/site-packages/cryptography/hazmat/backends/commoncrypto/hmac.py new file mode 100644 index 0000000..ae623d8 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/backends/commoncrypto/hmac.py @@ -0,0 +1,59 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import ( + InvalidSignature, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.primitives import constant_time, hashes, interfaces + + +@utils.register_interface(interfaces.MACContext) +@utils.register_interface(hashes.HashContext) +class _HMACContext(object): + def __init__(self, backend, key, algorithm, ctx=None): + self._algorithm = algorithm + self._backend = backend + if ctx is None: + ctx = self._backend._ffi.new("CCHmacContext *") + try: + alg = self._backend._supported_hmac_algorithms[algorithm.name] + except KeyError: + raise UnsupportedAlgorithm( + "{0} is not a supported HMAC hash on this backend.".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + + self._backend._lib.CCHmacInit(ctx, alg, key, len(key)) + + self._ctx = ctx + self._key = key + + algorithm = utils.read_only_property("_algorithm") + + def copy(self): + copied_ctx = self._backend._ffi.new("CCHmacContext *") + # CommonCrypto has no APIs for copying HMACs, so we have to copy the + # underlying struct. + copied_ctx[0] = self._ctx[0] + return _HMACContext( + self._backend, self._key, self.algorithm, ctx=copied_ctx + ) + + def update(self, data): + self._backend._lib.CCHmacUpdate(self._ctx, data, len(data)) + + def finalize(self): + buf = self._backend._ffi.new("unsigned char[]", + self.algorithm.digest_size) + self._backend._lib.CCHmacFinal(self._ctx, buf) + return self._backend._ffi.buffer(buf)[:] + + def verify(self, signature): + digest = self.finalize() + if not constant_time.bytes_eq(digest, signature): + raise InvalidSignature("Signature did not match digest.") diff --git a/lib/python3.4/site-packages/cryptography/hazmat/backends/interfaces.py b/lib/python3.4/site-packages/cryptography/hazmat/backends/interfaces.py new file mode 100644 index 0000000..5b9e6f3 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/backends/interfaces.py @@ -0,0 +1,359 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + + +@six.add_metaclass(abc.ABCMeta) +class CipherBackend(object): + @abc.abstractmethod + def cipher_supported(self, cipher, mode): + """ + Return True if the given cipher and mode are supported. + """ + + @abc.abstractmethod + def create_symmetric_encryption_ctx(self, cipher, mode): + """ + Get a CipherContext that can be used for encryption. + """ + + @abc.abstractmethod + def create_symmetric_decryption_ctx(self, cipher, mode): + """ + Get a CipherContext that can be used for decryption. + """ + + +@six.add_metaclass(abc.ABCMeta) +class HashBackend(object): + @abc.abstractmethod + def hash_supported(self, algorithm): + """ + Return True if the hash algorithm is supported by this backend. + """ + + @abc.abstractmethod + def create_hash_ctx(self, algorithm): + """ + Create a HashContext for calculating a message digest. + """ + + +@six.add_metaclass(abc.ABCMeta) +class HMACBackend(object): + @abc.abstractmethod + def hmac_supported(self, algorithm): + """ + Return True if the hash algorithm is supported for HMAC by this + backend. + """ + + @abc.abstractmethod + def create_hmac_ctx(self, key, algorithm): + """ + Create a MACContext for calculating a message authentication code. + """ + + +@six.add_metaclass(abc.ABCMeta) +class CMACBackend(object): + @abc.abstractmethod + def cmac_algorithm_supported(self, algorithm): + """ + Returns True if the block cipher is supported for CMAC by this backend + """ + + @abc.abstractmethod + def create_cmac_ctx(self, algorithm): + """ + Create a MACContext for calculating a message authentication code. + """ + + +@six.add_metaclass(abc.ABCMeta) +class PBKDF2HMACBackend(object): + @abc.abstractmethod + def pbkdf2_hmac_supported(self, algorithm): + """ + Return True if the hash algorithm is supported for PBKDF2 by this + backend. + """ + + @abc.abstractmethod + def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations, + key_material): + """ + Return length bytes derived from provided PBKDF2 parameters. + """ + + +@six.add_metaclass(abc.ABCMeta) +class RSABackend(object): + @abc.abstractmethod + def generate_rsa_private_key(self, public_exponent, key_size): + """ + Generate an RSAPrivateKey instance with public_exponent and a modulus + of key_size bits. + """ + + @abc.abstractmethod + def rsa_padding_supported(self, padding): + """ + Returns True if the backend supports the given padding options. + """ + + @abc.abstractmethod + def generate_rsa_parameters_supported(self, public_exponent, key_size): + """ + Returns True if the backend supports the given parameters for key + generation. + """ + + @abc.abstractmethod + def load_rsa_private_numbers(self, numbers): + """ + Returns an RSAPrivateKey provider. + """ + + @abc.abstractmethod + def load_rsa_public_numbers(self, numbers): + """ + Returns an RSAPublicKey provider. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DSABackend(object): + @abc.abstractmethod + def generate_dsa_parameters(self, key_size): + """ + Generate a DSAParameters instance with a modulus of key_size bits. + """ + + @abc.abstractmethod + def generate_dsa_private_key(self, parameters): + """ + Generate a DSAPrivateKey instance with parameters as a DSAParameters + object. + """ + + @abc.abstractmethod + def generate_dsa_private_key_and_parameters(self, key_size): + """ + Generate a DSAPrivateKey instance using key size only. + """ + + @abc.abstractmethod + def dsa_hash_supported(self, algorithm): + """ + Return True if the hash algorithm is supported by the backend for DSA. + """ + + @abc.abstractmethod + def dsa_parameters_supported(self, p, q, g): + """ + Return True if the parameters are supported by the backend for DSA. + """ + + @abc.abstractmethod + def load_dsa_private_numbers(self, numbers): + """ + Returns a DSAPrivateKey provider. + """ + + @abc.abstractmethod + def load_dsa_public_numbers(self, numbers): + """ + Returns a DSAPublicKey provider. + """ + + @abc.abstractmethod + def load_dsa_parameter_numbers(self, numbers): + """ + Returns a DSAParameters provider. + """ + + +@six.add_metaclass(abc.ABCMeta) +class EllipticCurveBackend(object): + @abc.abstractmethod + def elliptic_curve_signature_algorithm_supported( + self, signature_algorithm, curve + ): + """ + Returns True if the backend supports the named elliptic curve with the + specified signature algorithm. + """ + + @abc.abstractmethod + def elliptic_curve_supported(self, curve): + """ + Returns True if the backend supports the named elliptic curve. + """ + + @abc.abstractmethod + def generate_elliptic_curve_private_key(self, curve): + """ + Return an object conforming to the EllipticCurvePrivateKey interface. + """ + + @abc.abstractmethod + def load_elliptic_curve_public_numbers(self, numbers): + """ + Return an EllipticCurvePublicKey provider using the given numbers. + """ + + @abc.abstractmethod + def load_elliptic_curve_private_numbers(self, numbers): + """ + Return an EllipticCurvePrivateKey provider using the given numbers. + """ + + @abc.abstractmethod + def elliptic_curve_exchange_algorithm_supported(self, algorithm, curve): + """ + Returns whether the exchange algorithm is supported by this backend. + """ + + +@six.add_metaclass(abc.ABCMeta) +class PEMSerializationBackend(object): + @abc.abstractmethod + def load_pem_private_key(self, data, password): + """ + Loads a private key from PEM encoded data, using the provided password + if the data is encrypted. + """ + + @abc.abstractmethod + def load_pem_public_key(self, data): + """ + Loads a public key from PEM encoded data. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DERSerializationBackend(object): + @abc.abstractmethod + def load_der_private_key(self, data, password): + """ + Loads a private key from DER encoded data. Uses the provided password + if the data is encrypted. + """ + + @abc.abstractmethod + def load_der_public_key(self, data): + """ + Loads a public key from DER encoded data. + """ + + +@six.add_metaclass(abc.ABCMeta) +class X509Backend(object): + @abc.abstractmethod + def load_pem_x509_certificate(self, data): + """ + Load an X.509 certificate from PEM encoded data. + """ + + @abc.abstractmethod + def load_der_x509_certificate(self, data): + """ + Load an X.509 certificate from DER encoded data. + """ + + @abc.abstractmethod + def load_der_x509_csr(self, data): + """ + Load an X.509 CSR from DER encoded data. + """ + + @abc.abstractmethod + def load_pem_x509_csr(self, data): + """ + Load an X.509 CSR from PEM encoded data. + """ + + @abc.abstractmethod + def create_x509_csr(self, builder, private_key, algorithm): + """ + Create and sign an X.509 CSR from a CSR builder object. + """ + + @abc.abstractmethod + def create_x509_certificate(self, builder, private_key, algorithm): + """ + Create and sign an X.509 certificate from a CertificateBuilder object. + """ + + @abc.abstractmethod + def create_x509_crl(self, builder, private_key, algorithm): + """ + Create and sign an X.509 CertificateRevocationList from a + CertificateRevocationListBuilder object. + """ + + @abc.abstractmethod + def create_x509_revoked_certificate(self, builder): + """ + Create a RevokedCertificate object from a RevokedCertificateBuilder + object. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DHBackend(object): + @abc.abstractmethod + def generate_dh_parameters(self, key_size): + """ + Generate a DHParameters instance with a modulus of key_size bits. + """ + + @abc.abstractmethod + def generate_dh_private_key(self, parameters): + """ + Generate a DHPrivateKey instance with parameters as a DHParameters + object. + """ + + @abc.abstractmethod + def generate_dh_private_key_and_parameters(self, key_size): + """ + Generate a DHPrivateKey instance using key size only. + """ + + @abc.abstractmethod + def load_dh_private_numbers(self, numbers): + """ + Returns a DHPrivateKey provider. + """ + + @abc.abstractmethod + def load_dh_public_numbers(self, numbers): + """ + Returns a DHPublicKey provider. + """ + + @abc.abstractmethod + def load_dh_parameter_numbers(self, numbers): + """ + Returns a DHParameters provider. + """ + + @abc.abstractmethod + def dh_exchange_algorithm_supported(self, exchange_algorithm): + """ + Returns whether the exchange algorithm is supported by this backend. + """ + + @abc.abstractmethod + def dh_parameters_supported(self, p, g): + """ + Returns whether the backend supports DH with these parameter values. + """ diff --git a/lib/python3.4/site-packages/cryptography/hazmat/backends/multibackend.py b/lib/python3.4/site-packages/cryptography/hazmat/backends/multibackend.py new file mode 100644 index 0000000..48bc7d0 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/backends/multibackend.py @@ -0,0 +1,411 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.backends.interfaces import ( + CMACBackend, CipherBackend, DERSerializationBackend, DSABackend, + EllipticCurveBackend, HMACBackend, HashBackend, PBKDF2HMACBackend, + PEMSerializationBackend, RSABackend, X509Backend +) + + +@utils.register_interface(CMACBackend) +@utils.register_interface(CipherBackend) +@utils.register_interface(DERSerializationBackend) +@utils.register_interface(HashBackend) +@utils.register_interface(HMACBackend) +@utils.register_interface(PBKDF2HMACBackend) +@utils.register_interface(RSABackend) +@utils.register_interface(DSABackend) +@utils.register_interface(EllipticCurveBackend) +@utils.register_interface(PEMSerializationBackend) +@utils.register_interface(X509Backend) +class MultiBackend(object): + name = "multibackend" + + def __init__(self, backends): + if len(backends) == 0: + raise ValueError( + "Multibackend cannot be initialized with no backends. If you " + "are seeing this error when trying to use default_backend() " + "please try uninstalling and reinstalling cryptography." + ) + + self._backends = backends + + def _filtered_backends(self, interface): + for b in self._backends: + if isinstance(b, interface): + yield b + + def cipher_supported(self, cipher, mode): + return any( + b.cipher_supported(cipher, mode) + for b in self._filtered_backends(CipherBackend) + ) + + def create_symmetric_encryption_ctx(self, cipher, mode): + for b in self._filtered_backends(CipherBackend): + try: + return b.create_symmetric_encryption_ctx(cipher, mode) + except UnsupportedAlgorithm: + pass + raise UnsupportedAlgorithm( + "cipher {0} in {1} mode is not supported by this backend.".format( + cipher.name, mode.name if mode else mode), + _Reasons.UNSUPPORTED_CIPHER + ) + + def create_symmetric_decryption_ctx(self, cipher, mode): + for b in self._filtered_backends(CipherBackend): + try: + return b.create_symmetric_decryption_ctx(cipher, mode) + except UnsupportedAlgorithm: + pass + raise UnsupportedAlgorithm( + "cipher {0} in {1} mode is not supported by this backend.".format( + cipher.name, mode.name if mode else mode), + _Reasons.UNSUPPORTED_CIPHER + ) + + def hash_supported(self, algorithm): + return any( + b.hash_supported(algorithm) + for b in self._filtered_backends(HashBackend) + ) + + def create_hash_ctx(self, algorithm): + for b in self._filtered_backends(HashBackend): + try: + return b.create_hash_ctx(algorithm) + except UnsupportedAlgorithm: + pass + raise UnsupportedAlgorithm( + "{0} is not a supported hash on this backend.".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + + def hmac_supported(self, algorithm): + return any( + b.hmac_supported(algorithm) + for b in self._filtered_backends(HMACBackend) + ) + + def create_hmac_ctx(self, key, algorithm): + for b in self._filtered_backends(HMACBackend): + try: + return b.create_hmac_ctx(key, algorithm) + except UnsupportedAlgorithm: + pass + raise UnsupportedAlgorithm( + "{0} is not a supported hash on this backend.".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + + def pbkdf2_hmac_supported(self, algorithm): + return any( + b.pbkdf2_hmac_supported(algorithm) + for b in self._filtered_backends(PBKDF2HMACBackend) + ) + + def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations, + key_material): + for b in self._filtered_backends(PBKDF2HMACBackend): + try: + return b.derive_pbkdf2_hmac( + algorithm, length, salt, iterations, key_material + ) + except UnsupportedAlgorithm: + pass + raise UnsupportedAlgorithm( + "{0} is not a supported hash on this backend.".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + + def generate_rsa_private_key(self, public_exponent, key_size): + for b in self._filtered_backends(RSABackend): + return b.generate_rsa_private_key(public_exponent, key_size) + raise UnsupportedAlgorithm("RSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def generate_rsa_parameters_supported(self, public_exponent, key_size): + for b in self._filtered_backends(RSABackend): + return b.generate_rsa_parameters_supported( + public_exponent, key_size + ) + raise UnsupportedAlgorithm("RSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def rsa_padding_supported(self, padding): + for b in self._filtered_backends(RSABackend): + return b.rsa_padding_supported(padding) + raise UnsupportedAlgorithm("RSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def load_rsa_private_numbers(self, numbers): + for b in self._filtered_backends(RSABackend): + return b.load_rsa_private_numbers(numbers) + + raise UnsupportedAlgorithm("RSA is not supported by the backend", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def load_rsa_public_numbers(self, numbers): + for b in self._filtered_backends(RSABackend): + return b.load_rsa_public_numbers(numbers) + + raise UnsupportedAlgorithm("RSA is not supported by the backend", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def generate_dsa_parameters(self, key_size): + for b in self._filtered_backends(DSABackend): + return b.generate_dsa_parameters(key_size) + raise UnsupportedAlgorithm("DSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def generate_dsa_private_key(self, parameters): + for b in self._filtered_backends(DSABackend): + return b.generate_dsa_private_key(parameters) + raise UnsupportedAlgorithm("DSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def generate_dsa_private_key_and_parameters(self, key_size): + for b in self._filtered_backends(DSABackend): + return b.generate_dsa_private_key_and_parameters(key_size) + raise UnsupportedAlgorithm("DSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def dsa_hash_supported(self, algorithm): + for b in self._filtered_backends(DSABackend): + return b.dsa_hash_supported(algorithm) + raise UnsupportedAlgorithm("DSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def dsa_parameters_supported(self, p, q, g): + for b in self._filtered_backends(DSABackend): + return b.dsa_parameters_supported(p, q, g) + raise UnsupportedAlgorithm("DSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def load_dsa_public_numbers(self, numbers): + for b in self._filtered_backends(DSABackend): + return b.load_dsa_public_numbers(numbers) + raise UnsupportedAlgorithm("DSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def load_dsa_private_numbers(self, numbers): + for b in self._filtered_backends(DSABackend): + return b.load_dsa_private_numbers(numbers) + raise UnsupportedAlgorithm("DSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def load_dsa_parameter_numbers(self, numbers): + for b in self._filtered_backends(DSABackend): + return b.load_dsa_parameter_numbers(numbers) + raise UnsupportedAlgorithm("DSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def cmac_algorithm_supported(self, algorithm): + return any( + b.cmac_algorithm_supported(algorithm) + for b in self._filtered_backends(CMACBackend) + ) + + def create_cmac_ctx(self, algorithm): + for b in self._filtered_backends(CMACBackend): + try: + return b.create_cmac_ctx(algorithm) + except UnsupportedAlgorithm: + pass + raise UnsupportedAlgorithm("This backend does not support CMAC.", + _Reasons.UNSUPPORTED_CIPHER) + + def elliptic_curve_supported(self, curve): + return any( + b.elliptic_curve_supported(curve) + for b in self._filtered_backends(EllipticCurveBackend) + ) + + def elliptic_curve_signature_algorithm_supported( + self, signature_algorithm, curve + ): + return any( + b.elliptic_curve_signature_algorithm_supported( + signature_algorithm, curve + ) + for b in self._filtered_backends(EllipticCurveBackend) + ) + + def generate_elliptic_curve_private_key(self, curve): + for b in self._filtered_backends(EllipticCurveBackend): + try: + return b.generate_elliptic_curve_private_key(curve) + except UnsupportedAlgorithm: + continue + + raise UnsupportedAlgorithm( + "This backend does not support this elliptic curve.", + _Reasons.UNSUPPORTED_ELLIPTIC_CURVE + ) + + def load_elliptic_curve_private_numbers(self, numbers): + for b in self._filtered_backends(EllipticCurveBackend): + try: + return b.load_elliptic_curve_private_numbers(numbers) + except UnsupportedAlgorithm: + continue + + raise UnsupportedAlgorithm( + "This backend does not support this elliptic curve.", + _Reasons.UNSUPPORTED_ELLIPTIC_CURVE + ) + + def load_elliptic_curve_public_numbers(self, numbers): + for b in self._filtered_backends(EllipticCurveBackend): + try: + return b.load_elliptic_curve_public_numbers(numbers) + except UnsupportedAlgorithm: + continue + + raise UnsupportedAlgorithm( + "This backend does not support this elliptic curve.", + _Reasons.UNSUPPORTED_ELLIPTIC_CURVE + ) + + def elliptic_curve_exchange_algorithm_supported(self, algorithm, curve): + return any( + b.elliptic_curve_exchange_algorithm_supported(algorithm, curve) + for b in self._filtered_backends(EllipticCurveBackend) + ) + + def load_pem_private_key(self, data, password): + for b in self._filtered_backends(PEMSerializationBackend): + return b.load_pem_private_key(data, password) + + raise UnsupportedAlgorithm( + "This backend does not support this key serialization.", + _Reasons.UNSUPPORTED_SERIALIZATION + ) + + def load_pem_public_key(self, data): + for b in self._filtered_backends(PEMSerializationBackend): + return b.load_pem_public_key(data) + + raise UnsupportedAlgorithm( + "This backend does not support this key serialization.", + _Reasons.UNSUPPORTED_SERIALIZATION + ) + + def load_der_private_key(self, data, password): + for b in self._filtered_backends(DERSerializationBackend): + return b.load_der_private_key(data, password) + + raise UnsupportedAlgorithm( + "This backend does not support this key serialization.", + _Reasons.UNSUPPORTED_SERIALIZATION + ) + + def load_der_public_key(self, data): + for b in self._filtered_backends(DERSerializationBackend): + return b.load_der_public_key(data) + + raise UnsupportedAlgorithm( + "This backend does not support this key serialization.", + _Reasons.UNSUPPORTED_SERIALIZATION + ) + + def load_pem_x509_certificate(self, data): + for b in self._filtered_backends(X509Backend): + return b.load_pem_x509_certificate(data) + + raise UnsupportedAlgorithm( + "This backend does not support X.509.", + _Reasons.UNSUPPORTED_X509 + ) + + def load_der_x509_certificate(self, data): + for b in self._filtered_backends(X509Backend): + return b.load_der_x509_certificate(data) + + raise UnsupportedAlgorithm( + "This backend does not support X.509.", + _Reasons.UNSUPPORTED_X509 + ) + + def load_pem_x509_crl(self, data): + for b in self._filtered_backends(X509Backend): + return b.load_pem_x509_crl(data) + + raise UnsupportedAlgorithm( + "This backend does not support X.509.", + _Reasons.UNSUPPORTED_X509 + ) + + def load_der_x509_crl(self, data): + for b in self._filtered_backends(X509Backend): + return b.load_der_x509_crl(data) + + raise UnsupportedAlgorithm( + "This backend does not support X.509.", + _Reasons.UNSUPPORTED_X509 + ) + + def load_der_x509_csr(self, data): + for b in self._filtered_backends(X509Backend): + return b.load_der_x509_csr(data) + + raise UnsupportedAlgorithm( + "This backend does not support X.509.", + _Reasons.UNSUPPORTED_X509 + ) + + def load_pem_x509_csr(self, data): + for b in self._filtered_backends(X509Backend): + return b.load_pem_x509_csr(data) + + raise UnsupportedAlgorithm( + "This backend does not support X.509.", + _Reasons.UNSUPPORTED_X509 + ) + + def create_x509_csr(self, builder, private_key, algorithm): + for b in self._filtered_backends(X509Backend): + return b.create_x509_csr(builder, private_key, algorithm) + + raise UnsupportedAlgorithm( + "This backend does not support X.509.", + _Reasons.UNSUPPORTED_X509 + ) + + def create_x509_certificate(self, builder, private_key, algorithm): + for b in self._filtered_backends(X509Backend): + return b.create_x509_certificate(builder, private_key, algorithm) + + raise UnsupportedAlgorithm( + "This backend does not support X.509.", + _Reasons.UNSUPPORTED_X509 + ) + + def create_x509_crl(self, builder, private_key, algorithm): + for b in self._filtered_backends(X509Backend): + return b.create_x509_crl(builder, private_key, algorithm) + + raise UnsupportedAlgorithm( + "This backend does not support X.509.", + _Reasons.UNSUPPORTED_X509 + ) + + def create_x509_revoked_certificate(self, builder): + for b in self._filtered_backends(X509Backend): + return b.create_x509_revoked_certificate(builder) + + raise UnsupportedAlgorithm( + "This backend does not support X.509.", + _Reasons.UNSUPPORTED_X509 + ) diff --git a/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/__init__.py b/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/__init__.py new file mode 100644 index 0000000..8eadeb6 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/__init__.py @@ -0,0 +1,10 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography.hazmat.backends.openssl.backend import backend + + +__all__ = ["backend"] diff --git a/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/backend.py b/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/backend.py new file mode 100644 index 0000000..4c62146 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/backend.py @@ -0,0 +1,1813 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import base64 +import calendar +import collections +import itertools +from contextlib import contextmanager + +import six + +from cryptography import utils, x509 +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.backends.interfaces import ( + CMACBackend, CipherBackend, DERSerializationBackend, DSABackend, + EllipticCurveBackend, HMACBackend, HashBackend, PBKDF2HMACBackend, + PEMSerializationBackend, RSABackend, X509Backend +) +from cryptography.hazmat.backends.openssl.ciphers import ( + _AESCTRCipherContext, _CipherContext +) +from cryptography.hazmat.backends.openssl.cmac import _CMACContext +from cryptography.hazmat.backends.openssl.dsa import ( + _DSAParameters, _DSAPrivateKey, _DSAPublicKey +) +from cryptography.hazmat.backends.openssl.ec import ( + _EllipticCurvePrivateKey, _EllipticCurvePublicKey +) +from cryptography.hazmat.backends.openssl.encode_asn1 import ( + _CRL_ENTRY_EXTENSION_ENCODE_HANDLERS, + _CRL_EXTENSION_ENCODE_HANDLERS, _EXTENSION_ENCODE_HANDLERS, + _encode_asn1_int_gc, _encode_asn1_str_gc, _encode_name_gc, _txt2obj_gc, +) +from cryptography.hazmat.backends.openssl.hashes import _HashContext +from cryptography.hazmat.backends.openssl.hmac import _HMACContext +from cryptography.hazmat.backends.openssl.rsa import ( + _RSAPrivateKey, _RSAPublicKey +) +from cryptography.hazmat.backends.openssl.x509 import ( + _Certificate, _CertificateRevocationList, + _CertificateSigningRequest, _RevokedCertificate +) +from cryptography.hazmat.bindings._openssl import ffi as _ffi +from cryptography.hazmat.bindings.openssl import binding +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa +from cryptography.hazmat.primitives.asymmetric.padding import ( + MGF1, OAEP, PKCS1v15, PSS +) +from cryptography.hazmat.primitives.ciphers.algorithms import ( + AES, ARC4, Blowfish, CAST5, Camellia, IDEA, SEED, TripleDES +) +from cryptography.hazmat.primitives.ciphers.modes import ( + CBC, CFB, CFB8, CTR, ECB, GCM, OFB +) + + +_MemoryBIO = collections.namedtuple("_MemoryBIO", ["bio", "char_ptr"]) + + +class _PasswordUserdata(object): + def __init__(self, password): + self.password = password + self.called = 0 + self.exception = None + + +@binding.ffi_callback("int (char *, int, int, void *)", + name="Cryptography_pem_password_cb") +def _pem_password_cb(buf, size, writing, userdata_handle): + """ + A pem_password_cb function pointer that copied the password to + OpenSSL as required and returns the number of bytes copied. + + typedef int pem_password_cb(char *buf, int size, + int rwflag, void *userdata); + + Useful for decrypting PKCS8 files and so on. + + The userdata pointer must point to a cffi handle of a + _PasswordUserdata instance. + """ + ud = _ffi.from_handle(userdata_handle) + ud.called += 1 + + if not ud.password: + ud.exception = TypeError( + "Password was not given but private key is encrypted." + ) + return -1 + elif len(ud.password) < size: + pw_buf = _ffi.buffer(buf, size) + pw_buf[:len(ud.password)] = ud.password + return len(ud.password) + else: + ud.exception = ValueError( + "Passwords longer than {0} bytes are not supported " + "by this backend.".format(size - 1) + ) + return 0 + + +@utils.register_interface(CipherBackend) +@utils.register_interface(CMACBackend) +@utils.register_interface(DERSerializationBackend) +@utils.register_interface(DSABackend) +@utils.register_interface(EllipticCurveBackend) +@utils.register_interface(HashBackend) +@utils.register_interface(HMACBackend) +@utils.register_interface(PBKDF2HMACBackend) +@utils.register_interface(RSABackend) +@utils.register_interface(PEMSerializationBackend) +@utils.register_interface(X509Backend) +class Backend(object): + """ + OpenSSL API binding interfaces. + """ + name = "openssl" + + def __init__(self): + self._binding = binding.Binding() + self._ffi = self._binding.ffi + self._lib = self._binding.lib + + # Set the default string mask for encoding ASN1 strings to UTF8. This + # is the default for newer OpenSSLs for several years and is + # recommended in RFC 2459. + res = self._lib.ASN1_STRING_set_default_mask_asc(b"utf8only") + self.openssl_assert(res == 1) + + self._cipher_registry = {} + self._register_default_ciphers() + self.activate_osrandom_engine() + + def openssl_assert(self, ok): + return binding._openssl_assert(self._lib, ok) + + def activate_builtin_random(self): + # Obtain a new structural reference. + e = self._lib.ENGINE_get_default_RAND() + if e != self._ffi.NULL: + self._lib.ENGINE_unregister_RAND(e) + # Reset the RNG to use the new engine. + self._lib.RAND_cleanup() + # decrement the structural reference from get_default_RAND + res = self._lib.ENGINE_finish(e) + self.openssl_assert(res == 1) + + def activate_osrandom_engine(self): + # Unregister and free the current engine. + self.activate_builtin_random() + # Fetches an engine by id and returns it. This creates a structural + # reference. + e = self._lib.ENGINE_by_id(self._binding._osrandom_engine_id) + self.openssl_assert(e != self._ffi.NULL) + # Initialize the engine for use. This adds a functional reference. + res = self._lib.ENGINE_init(e) + self.openssl_assert(res == 1) + # Set the engine as the default RAND provider. + res = self._lib.ENGINE_set_default_RAND(e) + self.openssl_assert(res == 1) + # Decrement the structural ref incremented by ENGINE_by_id. + res = self._lib.ENGINE_free(e) + self.openssl_assert(res == 1) + # Decrement the functional ref incremented by ENGINE_init. + res = self._lib.ENGINE_finish(e) + self.openssl_assert(res == 1) + # Reset the RNG to use the new engine. + self._lib.RAND_cleanup() + + def openssl_version_text(self): + """ + Friendly string name of the loaded OpenSSL library. This is not + necessarily the same version as it was compiled against. + + Example: OpenSSL 1.0.1e 11 Feb 2013 + """ + return self._ffi.string( + self._lib.OpenSSL_version(self._lib.OPENSSL_VERSION) + ).decode("ascii") + + def create_hmac_ctx(self, key, algorithm): + return _HMACContext(self, key, algorithm) + + def hash_supported(self, algorithm): + digest = self._lib.EVP_get_digestbyname(algorithm.name.encode("ascii")) + return digest != self._ffi.NULL + + def hmac_supported(self, algorithm): + return self.hash_supported(algorithm) + + def create_hash_ctx(self, algorithm): + return _HashContext(self, algorithm) + + def cipher_supported(self, cipher, mode): + if self._evp_cipher_supported(cipher, mode): + return True + elif isinstance(mode, CTR) and isinstance(cipher, AES): + return True + else: + return False + + def _evp_cipher_supported(self, cipher, mode): + try: + adapter = self._cipher_registry[type(cipher), type(mode)] + except KeyError: + return False + evp_cipher = adapter(self, cipher, mode) + return self._ffi.NULL != evp_cipher + + def register_cipher_adapter(self, cipher_cls, mode_cls, adapter): + if (cipher_cls, mode_cls) in self._cipher_registry: + raise ValueError("Duplicate registration for: {0} {1}.".format( + cipher_cls, mode_cls) + ) + self._cipher_registry[cipher_cls, mode_cls] = adapter + + def _register_default_ciphers(self): + for mode_cls in [CBC, CTR, ECB, OFB, CFB, CFB8]: + self.register_cipher_adapter( + AES, + mode_cls, + GetCipherByName("{cipher.name}-{cipher.key_size}-{mode.name}") + ) + for mode_cls in [CBC, CTR, ECB, OFB, CFB]: + self.register_cipher_adapter( + Camellia, + mode_cls, + GetCipherByName("{cipher.name}-{cipher.key_size}-{mode.name}") + ) + for mode_cls in [CBC, CFB, CFB8, OFB]: + self.register_cipher_adapter( + TripleDES, + mode_cls, + GetCipherByName("des-ede3-{mode.name}") + ) + self.register_cipher_adapter( + TripleDES, + ECB, + GetCipherByName("des-ede3") + ) + for mode_cls in [CBC, CFB, OFB, ECB]: + self.register_cipher_adapter( + Blowfish, + mode_cls, + GetCipherByName("bf-{mode.name}") + ) + for mode_cls in [CBC, CFB, OFB, ECB]: + self.register_cipher_adapter( + SEED, + mode_cls, + GetCipherByName("seed-{mode.name}") + ) + for cipher_cls, mode_cls in itertools.product( + [CAST5, IDEA], + [CBC, OFB, CFB, ECB], + ): + self.register_cipher_adapter( + cipher_cls, + mode_cls, + GetCipherByName("{cipher.name}-{mode.name}") + ) + self.register_cipher_adapter( + ARC4, + type(None), + GetCipherByName("rc4") + ) + self.register_cipher_adapter( + AES, + GCM, + GetCipherByName("{cipher.name}-{cipher.key_size}-{mode.name}") + ) + + def create_symmetric_encryption_ctx(self, cipher, mode): + if (isinstance(mode, CTR) and isinstance(cipher, AES) and + not self._evp_cipher_supported(cipher, mode)): + # This is needed to provide support for AES CTR mode in OpenSSL + # 0.9.8. It can be removed when we drop 0.9.8 support (RHEL 5 + # extended life ends 2020). + return _AESCTRCipherContext(self, cipher, mode) + else: + return _CipherContext(self, cipher, mode, _CipherContext._ENCRYPT) + + def create_symmetric_decryption_ctx(self, cipher, mode): + if (isinstance(mode, CTR) and isinstance(cipher, AES) and + not self._evp_cipher_supported(cipher, mode)): + # This is needed to provide support for AES CTR mode in OpenSSL + # 0.9.8. It can be removed when we drop 0.9.8 support (RHEL 5 + # extended life ends 2020). + return _AESCTRCipherContext(self, cipher, mode) + else: + return _CipherContext(self, cipher, mode, _CipherContext._DECRYPT) + + def pbkdf2_hmac_supported(self, algorithm): + if self._lib.Cryptography_HAS_PBKDF2_HMAC: + return self.hmac_supported(algorithm) + else: + # OpenSSL < 1.0.0 has an explicit PBKDF2-HMAC-SHA1 function, + # so if the PBKDF2_HMAC function is missing we only support + # SHA1 via PBKDF2_HMAC_SHA1. + return isinstance(algorithm, hashes.SHA1) + + def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations, + key_material): + buf = self._ffi.new("char[]", length) + if self._lib.Cryptography_HAS_PBKDF2_HMAC: + evp_md = self._lib.EVP_get_digestbyname( + algorithm.name.encode("ascii")) + self.openssl_assert(evp_md != self._ffi.NULL) + res = self._lib.PKCS5_PBKDF2_HMAC( + key_material, + len(key_material), + salt, + len(salt), + iterations, + evp_md, + length, + buf + ) + self.openssl_assert(res == 1) + else: + if not isinstance(algorithm, hashes.SHA1): + raise UnsupportedAlgorithm( + "This version of OpenSSL only supports PBKDF2HMAC with " + "SHA1.", + _Reasons.UNSUPPORTED_HASH + ) + res = self._lib.PKCS5_PBKDF2_HMAC_SHA1( + key_material, + len(key_material), + salt, + len(salt), + iterations, + length, + buf + ) + self.openssl_assert(res == 1) + + return self._ffi.buffer(buf)[:] + + def _consume_errors(self): + return binding._consume_errors(self._lib) + + def _bn_to_int(self, bn): + assert bn != self._ffi.NULL + if six.PY3: + # Python 3 has constant time from_bytes, so use that. + bn_num_bytes = self._lib.BN_num_bytes(bn) + bin_ptr = self._ffi.new("unsigned char[]", bn_num_bytes) + bin_len = self._lib.BN_bn2bin(bn, bin_ptr) + # A zero length means the BN has value 0 + self.openssl_assert(bin_len >= 0) + return int.from_bytes(self._ffi.buffer(bin_ptr)[:bin_len], "big") + else: + # Under Python 2 the best we can do is hex() + hex_cdata = self._lib.BN_bn2hex(bn) + self.openssl_assert(hex_cdata != self._ffi.NULL) + hex_str = self._ffi.string(hex_cdata) + self._lib.OPENSSL_free(hex_cdata) + return int(hex_str, 16) + + def _int_to_bn(self, num, bn=None): + """ + Converts a python integer to a BIGNUM. The returned BIGNUM will not + be garbage collected (to support adding them to structs that take + ownership of the object). Be sure to register it for GC if it will + be discarded after use. + """ + assert bn is None or bn != self._ffi.NULL + + if bn is None: + bn = self._ffi.NULL + + if six.PY3: + # Python 3 has constant time to_bytes, so use that. + + binary = num.to_bytes(int(num.bit_length() / 8.0 + 1), "big") + bn_ptr = self._lib.BN_bin2bn(binary, len(binary), bn) + self.openssl_assert(bn_ptr != self._ffi.NULL) + return bn_ptr + + else: + # Under Python 2 the best we can do is hex() + + hex_num = hex(num).rstrip("L").lstrip("0x").encode("ascii") or b"0" + bn_ptr = self._ffi.new("BIGNUM **") + bn_ptr[0] = bn + res = self._lib.BN_hex2bn(bn_ptr, hex_num) + self.openssl_assert(res != 0) + self.openssl_assert(bn_ptr[0] != self._ffi.NULL) + return bn_ptr[0] + + def generate_rsa_private_key(self, public_exponent, key_size): + rsa._verify_rsa_parameters(public_exponent, key_size) + + rsa_cdata = self._lib.RSA_new() + self.openssl_assert(rsa_cdata != self._ffi.NULL) + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + + bn = self._int_to_bn(public_exponent) + bn = self._ffi.gc(bn, self._lib.BN_free) + + res = self._lib.RSA_generate_key_ex( + rsa_cdata, key_size, bn, self._ffi.NULL + ) + self.openssl_assert(res == 1) + evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata) + + return _RSAPrivateKey(self, rsa_cdata, evp_pkey) + + def generate_rsa_parameters_supported(self, public_exponent, key_size): + return (public_exponent >= 3 and public_exponent & 1 != 0 and + key_size >= 512) + + def load_rsa_private_numbers(self, numbers): + rsa._check_private_key_components( + numbers.p, + numbers.q, + numbers.d, + numbers.dmp1, + numbers.dmq1, + numbers.iqmp, + numbers.public_numbers.e, + numbers.public_numbers.n + ) + rsa_cdata = self._lib.RSA_new() + self.openssl_assert(rsa_cdata != self._ffi.NULL) + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + p = self._int_to_bn(numbers.p) + q = self._int_to_bn(numbers.q) + d = self._int_to_bn(numbers.d) + dmp1 = self._int_to_bn(numbers.dmp1) + dmq1 = self._int_to_bn(numbers.dmq1) + iqmp = self._int_to_bn(numbers.iqmp) + e = self._int_to_bn(numbers.public_numbers.e) + n = self._int_to_bn(numbers.public_numbers.n) + res = self._lib.RSA_set0_factors(rsa_cdata, p, q) + self.openssl_assert(res == 1) + res = self._lib.RSA_set0_key(rsa_cdata, n, e, d) + self.openssl_assert(res == 1) + res = self._lib.RSA_set0_crt_params(rsa_cdata, dmp1, dmq1, iqmp) + self.openssl_assert(res == 1) + res = self._lib.RSA_blinding_on(rsa_cdata, self._ffi.NULL) + self.openssl_assert(res == 1) + evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata) + + return _RSAPrivateKey(self, rsa_cdata, evp_pkey) + + def load_rsa_public_numbers(self, numbers): + rsa._check_public_key_components(numbers.e, numbers.n) + rsa_cdata = self._lib.RSA_new() + self.openssl_assert(rsa_cdata != self._ffi.NULL) + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + e = self._int_to_bn(numbers.e) + n = self._int_to_bn(numbers.n) + res = self._lib.RSA_set0_key(rsa_cdata, n, e, self._ffi.NULL) + self.openssl_assert(res == 1) + evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata) + + return _RSAPublicKey(self, rsa_cdata, evp_pkey) + + def _create_evp_pkey_gc(self): + evp_pkey = self._lib.EVP_PKEY_new() + self.openssl_assert(evp_pkey != self._ffi.NULL) + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + return evp_pkey + + def _rsa_cdata_to_evp_pkey(self, rsa_cdata): + evp_pkey = self._create_evp_pkey_gc() + res = self._lib.EVP_PKEY_set1_RSA(evp_pkey, rsa_cdata) + self.openssl_assert(res == 1) + return evp_pkey + + def _bytes_to_bio(self, data): + """ + Return a _MemoryBIO namedtuple of (BIO, char*). + + The char* is the storage for the BIO and it must stay alive until the + BIO is finished with. + """ + data_char_p = self._ffi.new("char[]", data) + bio = self._lib.BIO_new_mem_buf( + data_char_p, len(data) + ) + self.openssl_assert(bio != self._ffi.NULL) + + return _MemoryBIO(self._ffi.gc(bio, self._lib.BIO_free), data_char_p) + + def _create_mem_bio_gc(self): + """ + Creates an empty memory BIO. + """ + bio_method = self._lib.BIO_s_mem() + self.openssl_assert(bio_method != self._ffi.NULL) + bio = self._lib.BIO_new(bio_method) + self.openssl_assert(bio != self._ffi.NULL) + bio = self._ffi.gc(bio, self._lib.BIO_free) + return bio + + def _read_mem_bio(self, bio): + """ + Reads a memory BIO. This only works on memory BIOs. + """ + buf = self._ffi.new("char **") + buf_len = self._lib.BIO_get_mem_data(bio, buf) + self.openssl_assert(buf_len > 0) + self.openssl_assert(buf[0] != self._ffi.NULL) + bio_data = self._ffi.buffer(buf[0], buf_len)[:] + return bio_data + + def _evp_pkey_to_private_key(self, evp_pkey): + """ + Return the appropriate type of PrivateKey given an evp_pkey cdata + pointer. + """ + + key_type = self._lib.Cryptography_EVP_PKEY_id(evp_pkey) + + if key_type == self._lib.EVP_PKEY_RSA: + rsa_cdata = self._lib.EVP_PKEY_get1_RSA(evp_pkey) + self.openssl_assert(rsa_cdata != self._ffi.NULL) + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + return _RSAPrivateKey(self, rsa_cdata, evp_pkey) + elif key_type == self._lib.EVP_PKEY_DSA: + dsa_cdata = self._lib.EVP_PKEY_get1_DSA(evp_pkey) + self.openssl_assert(dsa_cdata != self._ffi.NULL) + dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) + return _DSAPrivateKey(self, dsa_cdata, evp_pkey) + elif (self._lib.Cryptography_HAS_EC == 1 and + key_type == self._lib.EVP_PKEY_EC): + ec_cdata = self._lib.EVP_PKEY_get1_EC_KEY(evp_pkey) + self.openssl_assert(ec_cdata != self._ffi.NULL) + ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free) + return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey) + else: + raise UnsupportedAlgorithm("Unsupported key type.") + + def _evp_pkey_to_public_key(self, evp_pkey): + """ + Return the appropriate type of PublicKey given an evp_pkey cdata + pointer. + """ + + key_type = self._lib.Cryptography_EVP_PKEY_id(evp_pkey) + + if key_type == self._lib.EVP_PKEY_RSA: + rsa_cdata = self._lib.EVP_PKEY_get1_RSA(evp_pkey) + self.openssl_assert(rsa_cdata != self._ffi.NULL) + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + return _RSAPublicKey(self, rsa_cdata, evp_pkey) + elif key_type == self._lib.EVP_PKEY_DSA: + dsa_cdata = self._lib.EVP_PKEY_get1_DSA(evp_pkey) + self.openssl_assert(dsa_cdata != self._ffi.NULL) + dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) + return _DSAPublicKey(self, dsa_cdata, evp_pkey) + elif (self._lib.Cryptography_HAS_EC == 1 and + key_type == self._lib.EVP_PKEY_EC): + ec_cdata = self._lib.EVP_PKEY_get1_EC_KEY(evp_pkey) + self.openssl_assert(ec_cdata != self._ffi.NULL) + ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free) + return _EllipticCurvePublicKey(self, ec_cdata, evp_pkey) + else: + raise UnsupportedAlgorithm("Unsupported key type.") + + def _pem_password_cb(self, password): + """ + Generate a pem_password_cb function pointer that copied the password to + OpenSSL as required and returns the number of bytes copied. + + typedef int pem_password_cb(char *buf, int size, + int rwflag, void *userdata); + + Useful for decrypting PKCS8 files and so on. + + Returns a tuple of (cdata function pointer, userdata). + """ + # Forward compatibility for new static callbacks: + # _pem_password_cb is not a nested function because closures don't + # work well with static callbacks. Static callbacks are registered + # globally. The backend is passed in as userdata argument. + + userdata = _PasswordUserdata(password=password) + return _pem_password_cb, userdata + + def _oaep_hash_supported(self, algorithm): + if self._lib.Cryptography_HAS_RSA_OAEP_MD: + return isinstance( + algorithm, ( + hashes.SHA1, + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, + ) + ) + else: + return isinstance(algorithm, hashes.SHA1) + + def _pss_mgf1_hash_supported(self, algorithm): + if self._lib.Cryptography_HAS_MGF1_MD: + return self.hash_supported(algorithm) + else: + return isinstance(algorithm, hashes.SHA1) + + def rsa_padding_supported(self, padding): + if isinstance(padding, PKCS1v15): + return True + elif isinstance(padding, PSS) and isinstance(padding._mgf, MGF1): + return self._pss_mgf1_hash_supported(padding._mgf._algorithm) + elif isinstance(padding, OAEP) and isinstance(padding._mgf, MGF1): + return ( + self._oaep_hash_supported(padding._mgf._algorithm) and + self._oaep_hash_supported(padding._algorithm) + ) + else: + return False + + def generate_dsa_parameters(self, key_size): + if key_size not in (1024, 2048, 3072): + raise ValueError("Key size must be 1024 or 2048 or 3072 bits.") + + if (self._lib.OPENSSL_VERSION_NUMBER < 0x1000000f and + key_size > 1024): + raise ValueError( + "Key size must be 1024 because OpenSSL < 1.0.0 doesn't " + "support larger key sizes.") + + ctx = self._lib.DSA_new() + self.openssl_assert(ctx != self._ffi.NULL) + ctx = self._ffi.gc(ctx, self._lib.DSA_free) + + res = self._lib.DSA_generate_parameters_ex( + ctx, key_size, self._ffi.NULL, 0, + self._ffi.NULL, self._ffi.NULL, self._ffi.NULL + ) + + self.openssl_assert(res == 1) + + return _DSAParameters(self, ctx) + + def _dup_dsa_params(self, dsa_cdata): + dsa_cdata_dup = self._lib.DSA_new() + self.openssl_assert(dsa_cdata_dup != self._ffi.NULL) + dsa_cdata_dup = self._ffi.gc(dsa_cdata_dup, self._lib.DSA_free) + p = self._ffi.new("BIGNUM **") + q = self._ffi.new("BIGNUM **") + g = self._ffi.new("BIGNUM **") + self._lib.DSA_get0_pqg(dsa_cdata, p, q, g) + self.openssl_assert(p[0] != self._ffi.NULL) + self.openssl_assert(q[0] != self._ffi.NULL) + self.openssl_assert(g[0] != self._ffi.NULL) + p_dup = self._lib.BN_dup(p[0]) + q_dup = self._lib.BN_dup(q[0]) + g_dup = self._lib.BN_dup(g[0]) + self.openssl_assert(p_dup != self._ffi.NULL) + self.openssl_assert(q_dup != self._ffi.NULL) + self.openssl_assert(g_dup != self._ffi.NULL) + res = self._lib.DSA_set0_pqg(dsa_cdata_dup, p_dup, q_dup, g_dup) + self.openssl_assert(res == 1) + return dsa_cdata_dup + + def generate_dsa_private_key(self, parameters): + ctx = self._dup_dsa_params(parameters._dsa_cdata) + self._lib.DSA_generate_key(ctx) + evp_pkey = self._dsa_cdata_to_evp_pkey(ctx) + + return _DSAPrivateKey(self, ctx, evp_pkey) + + def generate_dsa_private_key_and_parameters(self, key_size): + parameters = self.generate_dsa_parameters(key_size) + return self.generate_dsa_private_key(parameters) + + def _dsa_cdata_set_values(self, dsa_cdata, p, q, g, pub_key, priv_key): + res = self._lib.DSA_set0_pqg(dsa_cdata, p, q, g) + self.openssl_assert(res == 1) + res = self._lib.DSA_set0_key(dsa_cdata, pub_key, priv_key) + self.openssl_assert(res == 1) + + def load_dsa_private_numbers(self, numbers): + dsa._check_dsa_private_numbers(numbers) + parameter_numbers = numbers.public_numbers.parameter_numbers + + dsa_cdata = self._lib.DSA_new() + self.openssl_assert(dsa_cdata != self._ffi.NULL) + dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) + + p = self._int_to_bn(parameter_numbers.p) + q = self._int_to_bn(parameter_numbers.q) + g = self._int_to_bn(parameter_numbers.g) + pub_key = self._int_to_bn(numbers.public_numbers.y) + priv_key = self._int_to_bn(numbers.x) + self._dsa_cdata_set_values(dsa_cdata, p, q, g, pub_key, priv_key) + + evp_pkey = self._dsa_cdata_to_evp_pkey(dsa_cdata) + + return _DSAPrivateKey(self, dsa_cdata, evp_pkey) + + def load_dsa_public_numbers(self, numbers): + dsa._check_dsa_parameters(numbers.parameter_numbers) + dsa_cdata = self._lib.DSA_new() + self.openssl_assert(dsa_cdata != self._ffi.NULL) + dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) + + p = self._int_to_bn(numbers.parameter_numbers.p) + q = self._int_to_bn(numbers.parameter_numbers.q) + g = self._int_to_bn(numbers.parameter_numbers.g) + pub_key = self._int_to_bn(numbers.y) + priv_key = self._ffi.NULL + self._dsa_cdata_set_values(dsa_cdata, p, q, g, pub_key, priv_key) + + evp_pkey = self._dsa_cdata_to_evp_pkey(dsa_cdata) + + return _DSAPublicKey(self, dsa_cdata, evp_pkey) + + def load_dsa_parameter_numbers(self, numbers): + dsa._check_dsa_parameters(numbers) + dsa_cdata = self._lib.DSA_new() + self.openssl_assert(dsa_cdata != self._ffi.NULL) + dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) + + p = self._int_to_bn(numbers.p) + q = self._int_to_bn(numbers.q) + g = self._int_to_bn(numbers.g) + res = self._lib.DSA_set0_pqg(dsa_cdata, p, q, g) + self.openssl_assert(res == 1) + + return _DSAParameters(self, dsa_cdata) + + def _dsa_cdata_to_evp_pkey(self, dsa_cdata): + evp_pkey = self._create_evp_pkey_gc() + res = self._lib.EVP_PKEY_set1_DSA(evp_pkey, dsa_cdata) + self.openssl_assert(res == 1) + return evp_pkey + + def dsa_hash_supported(self, algorithm): + if self._lib.OPENSSL_VERSION_NUMBER < 0x1000000f: + return isinstance(algorithm, hashes.SHA1) + else: + return self.hash_supported(algorithm) + + def dsa_parameters_supported(self, p, q, g): + if self._lib.OPENSSL_VERSION_NUMBER < 0x1000000f: + return utils.bit_length(p) <= 1024 and utils.bit_length(q) <= 160 + else: + return True + + def cmac_algorithm_supported(self, algorithm): + return ( + self._lib.Cryptography_HAS_CMAC == 1 and + self.cipher_supported( + algorithm, CBC(b"\x00" * algorithm.block_size) + ) + ) + + def create_cmac_ctx(self, algorithm): + return _CMACContext(self, algorithm) + + def create_x509_csr(self, builder, private_key, algorithm): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError('Algorithm must be a registered hash algorithm.') + + if self._lib.OPENSSL_VERSION_NUMBER <= 0x10001000: + if isinstance(private_key, _DSAPrivateKey): + raise NotImplementedError( + "Certificate signing requests aren't implemented for DSA" + " keys on OpenSSL versions less than 1.0.1." + ) + if isinstance(private_key, _EllipticCurvePrivateKey): + raise NotImplementedError( + "Certificate signing requests aren't implemented for EC" + " keys on OpenSSL versions less than 1.0.1." + ) + + # Resolve the signature algorithm. + evp_md = self._lib.EVP_get_digestbyname( + algorithm.name.encode('ascii') + ) + self.openssl_assert(evp_md != self._ffi.NULL) + + # Create an empty request. + x509_req = self._lib.X509_REQ_new() + self.openssl_assert(x509_req != self._ffi.NULL) + x509_req = self._ffi.gc(x509_req, self._lib.X509_REQ_free) + + # Set x509 version. + res = self._lib.X509_REQ_set_version(x509_req, x509.Version.v1.value) + self.openssl_assert(res == 1) + + # Set subject name. + res = self._lib.X509_REQ_set_subject_name( + x509_req, _encode_name_gc(self, builder._subject_name) + ) + self.openssl_assert(res == 1) + + # Set subject public key. + public_key = private_key.public_key() + res = self._lib.X509_REQ_set_pubkey( + x509_req, public_key._evp_pkey + ) + self.openssl_assert(res == 1) + + # Add extensions. + sk_extension = self._lib.sk_X509_EXTENSION_new_null() + self.openssl_assert(sk_extension != self._ffi.NULL) + sk_extension = self._ffi.gc( + sk_extension, self._lib.sk_X509_EXTENSION_free + ) + # gc is not necessary for CSRs, as sk_X509_EXTENSION_free + # will release all the X509_EXTENSIONs. + self._create_x509_extensions( + extensions=builder._extensions, + handlers=_EXTENSION_ENCODE_HANDLERS, + x509_obj=sk_extension, + add_func=self._lib.sk_X509_EXTENSION_insert, + gc=False + ) + res = self._lib.X509_REQ_add_extensions(x509_req, sk_extension) + self.openssl_assert(res == 1) + + # Sign the request using the requester's private key. + res = self._lib.X509_REQ_sign( + x509_req, private_key._evp_pkey, evp_md + ) + if res == 0: + errors = self._consume_errors() + self.openssl_assert(errors[0][1] == self._lib.ERR_LIB_RSA) + self.openssl_assert( + errors[0][3] == self._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY + ) + raise ValueError("Digest too big for RSA key") + + return _CertificateSigningRequest(self, x509_req) + + def create_x509_certificate(self, builder, private_key, algorithm): + if not isinstance(builder, x509.CertificateBuilder): + raise TypeError('Builder type mismatch.') + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError('Algorithm must be a registered hash algorithm.') + + if self._lib.OPENSSL_VERSION_NUMBER <= 0x10001000: + if isinstance(private_key, _DSAPrivateKey): + raise NotImplementedError( + "Certificate signatures aren't implemented for DSA" + " keys on OpenSSL versions less than 1.0.1." + ) + if isinstance(private_key, _EllipticCurvePrivateKey): + raise NotImplementedError( + "Certificate signatures aren't implemented for EC" + " keys on OpenSSL versions less than 1.0.1." + ) + + # Resolve the signature algorithm. + evp_md = self._lib.EVP_get_digestbyname( + algorithm.name.encode('ascii') + ) + self.openssl_assert(evp_md != self._ffi.NULL) + + # Create an empty certificate. + x509_cert = self._lib.X509_new() + x509_cert = self._ffi.gc(x509_cert, backend._lib.X509_free) + + # Set the x509 version. + res = self._lib.X509_set_version(x509_cert, builder._version.value) + self.openssl_assert(res == 1) + + # Set the subject's name. + res = self._lib.X509_set_subject_name( + x509_cert, _encode_name_gc(self, list(builder._subject_name)) + ) + self.openssl_assert(res == 1) + + # Set the subject's public key. + res = self._lib.X509_set_pubkey( + x509_cert, builder._public_key._evp_pkey + ) + self.openssl_assert(res == 1) + + # Set the certificate serial number. + serial_number = _encode_asn1_int_gc(self, builder._serial_number) + res = self._lib.X509_set_serialNumber(x509_cert, serial_number) + self.openssl_assert(res == 1) + + # Set the "not before" time. + res = self._lib.ASN1_TIME_set( + self._lib.X509_get_notBefore(x509_cert), + calendar.timegm(builder._not_valid_before.timetuple()) + ) + self.openssl_assert(res != self._ffi.NULL) + + # Set the "not after" time. + res = self._lib.ASN1_TIME_set( + self._lib.X509_get_notAfter(x509_cert), + calendar.timegm(builder._not_valid_after.timetuple()) + ) + self.openssl_assert(res != self._ffi.NULL) + + # Add extensions. + self._create_x509_extensions( + extensions=builder._extensions, + handlers=_EXTENSION_ENCODE_HANDLERS, + x509_obj=x509_cert, + add_func=self._lib.X509_add_ext, + gc=True + ) + + # Set the issuer name. + res = self._lib.X509_set_issuer_name( + x509_cert, _encode_name_gc(self, list(builder._issuer_name)) + ) + self.openssl_assert(res == 1) + + # Sign the certificate with the issuer's private key. + res = self._lib.X509_sign( + x509_cert, private_key._evp_pkey, evp_md + ) + if res == 0: + errors = self._consume_errors() + self.openssl_assert(errors[0][1] == self._lib.ERR_LIB_RSA) + self.openssl_assert( + errors[0][3] == self._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY + ) + raise ValueError("Digest too big for RSA key") + + return _Certificate(self, x509_cert) + + def create_x509_crl(self, builder, private_key, algorithm): + if not isinstance(builder, x509.CertificateRevocationListBuilder): + raise TypeError('Builder type mismatch.') + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError('Algorithm must be a registered hash algorithm.') + + if self._lib.OPENSSL_VERSION_NUMBER <= 0x10001000: + if isinstance(private_key, _DSAPrivateKey): + raise NotImplementedError( + "CRL signatures aren't implemented for DSA" + " keys on OpenSSL versions less than 1.0.1." + ) + if isinstance(private_key, _EllipticCurvePrivateKey): + raise NotImplementedError( + "CRL signatures aren't implemented for EC" + " keys on OpenSSL versions less than 1.0.1." + ) + + evp_md = self._lib.EVP_get_digestbyname( + algorithm.name.encode('ascii') + ) + self.openssl_assert(evp_md != self._ffi.NULL) + + # Create an empty CRL. + x509_crl = self._lib.X509_CRL_new() + x509_crl = self._ffi.gc(x509_crl, backend._lib.X509_CRL_free) + + # Set the x509 CRL version. We only support v2 (integer value 1). + res = self._lib.X509_CRL_set_version(x509_crl, 1) + self.openssl_assert(res == 1) + + # Set the issuer name. + res = self._lib.X509_CRL_set_issuer_name( + x509_crl, _encode_name_gc(self, list(builder._issuer_name)) + ) + self.openssl_assert(res == 1) + + # Set the last update time. + last_update = self._lib.ASN1_TIME_set( + self._ffi.NULL, calendar.timegm(builder._last_update.timetuple()) + ) + self.openssl_assert(last_update != self._ffi.NULL) + last_update = self._ffi.gc(last_update, self._lib.ASN1_TIME_free) + res = self._lib.X509_CRL_set_lastUpdate(x509_crl, last_update) + self.openssl_assert(res == 1) + + # Set the next update time. + next_update = self._lib.ASN1_TIME_set( + self._ffi.NULL, calendar.timegm(builder._next_update.timetuple()) + ) + self.openssl_assert(next_update != self._ffi.NULL) + next_update = self._ffi.gc(next_update, self._lib.ASN1_TIME_free) + res = self._lib.X509_CRL_set_nextUpdate(x509_crl, next_update) + self.openssl_assert(res == 1) + + # Add extensions. + self._create_x509_extensions( + extensions=builder._extensions, + handlers=_CRL_EXTENSION_ENCODE_HANDLERS, + x509_obj=x509_crl, + add_func=self._lib.X509_CRL_add_ext, + gc=True + ) + + # add revoked certificates + for revoked_cert in builder._revoked_certificates: + # Duplicating because the X509_CRL takes ownership and will free + # this memory when X509_CRL_free is called. + revoked = self._lib.Cryptography_X509_REVOKED_dup( + revoked_cert._x509_revoked + ) + self.openssl_assert(revoked != self._ffi.NULL) + res = self._lib.X509_CRL_add0_revoked(x509_crl, revoked) + self.openssl_assert(res == 1) + + res = self._lib.X509_CRL_sign( + x509_crl, private_key._evp_pkey, evp_md + ) + if res == 0: + errors = self._consume_errors() + self.openssl_assert(errors[0][1] == self._lib.ERR_LIB_RSA) + self.openssl_assert( + errors[0][3] == self._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY + ) + raise ValueError("Digest too big for RSA key") + + return _CertificateRevocationList(self, x509_crl) + + def _create_x509_extensions(self, extensions, handlers, x509_obj, + add_func, gc): + for i, extension in enumerate(extensions): + x509_extension = self._create_x509_extension( + handlers, extension + ) + self.openssl_assert(x509_extension != self._ffi.NULL) + + if gc: + x509_extension = self._ffi.gc( + x509_extension, self._lib.X509_EXTENSION_free + ) + res = add_func(x509_obj, x509_extension, i) + self.openssl_assert(res >= 1) + + def _create_x509_extension(self, handlers, extension): + if isinstance(extension.value, x509.UnrecognizedExtension): + obj = _txt2obj_gc(self, extension.oid.dotted_string) + value = _encode_asn1_str_gc( + self, extension.value.value, len(extension.value.value) + ) + return self._lib.X509_EXTENSION_create_by_OBJ( + self._ffi.NULL, + obj, + 1 if extension.critical else 0, + value + ) + else: + try: + encode = handlers[extension.oid] + except KeyError: + raise NotImplementedError( + 'Extension not supported: {0}'.format(extension.oid) + ) + + ext_struct = encode(self, extension.value) + nid = self._lib.OBJ_txt2nid( + extension.oid.dotted_string.encode("ascii") + ) + backend.openssl_assert(nid != self._lib.NID_undef) + x509_extension = self._lib.X509V3_EXT_i2d( + nid, 1 if extension.critical else 0, ext_struct + ) + if ( + x509_extension == self._ffi.NULL and + extension.oid == x509.OID_CERTIFICATE_ISSUER + ): + # This path exists to support OpenSSL 0.9.8, which does not + # know how to encode a CERTIFICATE_ISSUER for CRLs. Once we + # drop 0.9.8 support we can remove this. + self._consume_errors() + pp = backend._ffi.new("unsigned char **") + r = self._lib.i2d_GENERAL_NAMES(ext_struct, pp) + backend.openssl_assert(r > 0) + pp = backend._ffi.gc( + pp, + lambda pointer: backend._lib.OPENSSL_free(pointer[0]) + ) + obj = _txt2obj_gc(self, extension.oid.dotted_string) + return self._lib.X509_EXTENSION_create_by_OBJ( + self._ffi.NULL, + obj, + 1 if extension.critical else 0, + _encode_asn1_str_gc(self, pp[0], r) + ) + return x509_extension + + def create_x509_revoked_certificate(self, builder): + if not isinstance(builder, x509.RevokedCertificateBuilder): + raise TypeError('Builder type mismatch.') + + x509_revoked = self._lib.X509_REVOKED_new() + self.openssl_assert(x509_revoked != self._ffi.NULL) + x509_revoked = self._ffi.gc(x509_revoked, self._lib.X509_REVOKED_free) + serial_number = _encode_asn1_int_gc(self, builder._serial_number) + res = self._lib.X509_REVOKED_set_serialNumber( + x509_revoked, serial_number + ) + self.openssl_assert(res == 1) + rev_date = self._lib.ASN1_TIME_set( + self._ffi.NULL, + calendar.timegm(builder._revocation_date.timetuple()) + ) + self.openssl_assert(rev_date != self._ffi.NULL) + rev_date = self._ffi.gc(rev_date, self._lib.ASN1_TIME_free) + res = self._lib.X509_REVOKED_set_revocationDate(x509_revoked, rev_date) + self.openssl_assert(res == 1) + # add CRL entry extensions + self._create_x509_extensions( + extensions=builder._extensions, + handlers=_CRL_ENTRY_EXTENSION_ENCODE_HANDLERS, + x509_obj=x509_revoked, + add_func=self._lib.X509_REVOKED_add_ext, + gc=True + ) + return _RevokedCertificate(self, None, x509_revoked) + + def load_pem_private_key(self, data, password): + return self._load_key( + self._lib.PEM_read_bio_PrivateKey, + self._evp_pkey_to_private_key, + data, + password, + ) + + def load_pem_public_key(self, data): + mem_bio = self._bytes_to_bio(data) + evp_pkey = self._lib.PEM_read_bio_PUBKEY( + mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL + ) + if evp_pkey != self._ffi.NULL: + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + return self._evp_pkey_to_public_key(evp_pkey) + else: + # It's not a (RSA/DSA/ECDSA) subjectPublicKeyInfo, but we still + # need to check to see if it is a pure PKCS1 RSA public key (not + # embedded in a subjectPublicKeyInfo) + self._consume_errors() + res = self._lib.BIO_reset(mem_bio.bio) + self.openssl_assert(res == 1) + rsa_cdata = self._lib.PEM_read_bio_RSAPublicKey( + mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL + ) + if rsa_cdata != self._ffi.NULL: + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata) + return _RSAPublicKey(self, rsa_cdata, evp_pkey) + else: + self._handle_key_loading_error() + + def load_der_private_key(self, data, password): + # OpenSSL has a function called d2i_AutoPrivateKey that can simplify + # this. Unfortunately it doesn't properly support PKCS8 on OpenSSL + # 0.9.8 so we can't use it. Instead we sequentially try to load it 3 + # different ways. First we'll try to load it as a traditional key + bio_data = self._bytes_to_bio(data) + key = self._evp_pkey_from_der_traditional_key(bio_data, password) + if not key: + # Okay so it's not a traditional key. Let's try + # PKCS8 unencrypted. OpenSSL 0.9.8 can't load unencrypted + # PKCS8 keys using d2i_PKCS8PrivateKey_bio so we do this instead. + # Reset the memory BIO so we can read the data again. + res = self._lib.BIO_reset(bio_data.bio) + self.openssl_assert(res == 1) + key = self._evp_pkey_from_der_unencrypted_pkcs8(bio_data, password) + + if key: + return self._evp_pkey_to_private_key(key) + else: + # Finally we try to load it with the method that handles encrypted + # PKCS8 properly. + return self._load_key( + self._lib.d2i_PKCS8PrivateKey_bio, + self._evp_pkey_to_private_key, + data, + password, + ) + + def _evp_pkey_from_der_traditional_key(self, bio_data, password): + key = self._lib.d2i_PrivateKey_bio(bio_data.bio, self._ffi.NULL) + if key != self._ffi.NULL: + key = self._ffi.gc(key, self._lib.EVP_PKEY_free) + if password is not None: + raise TypeError( + "Password was given but private key is not encrypted." + ) + + return key + else: + self._consume_errors() + return None + + def _evp_pkey_from_der_unencrypted_pkcs8(self, bio_data, password): + info = self._lib.d2i_PKCS8_PRIV_KEY_INFO_bio( + bio_data.bio, self._ffi.NULL + ) + info = self._ffi.gc(info, self._lib.PKCS8_PRIV_KEY_INFO_free) + if info != self._ffi.NULL: + key = self._lib.EVP_PKCS82PKEY(info) + self.openssl_assert(key != self._ffi.NULL) + key = self._ffi.gc(key, self._lib.EVP_PKEY_free) + if password is not None: + raise TypeError( + "Password was given but private key is not encrypted." + ) + return key + else: + self._consume_errors() + return None + + def load_der_public_key(self, data): + mem_bio = self._bytes_to_bio(data) + evp_pkey = self._lib.d2i_PUBKEY_bio(mem_bio.bio, self._ffi.NULL) + if evp_pkey != self._ffi.NULL: + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + return self._evp_pkey_to_public_key(evp_pkey) + else: + # It's not a (RSA/DSA/ECDSA) subjectPublicKeyInfo, but we still + # need to check to see if it is a pure PKCS1 RSA public key (not + # embedded in a subjectPublicKeyInfo) + self._consume_errors() + res = self._lib.BIO_reset(mem_bio.bio) + self.openssl_assert(res == 1) + rsa_cdata = self._lib.d2i_RSAPublicKey_bio( + mem_bio.bio, self._ffi.NULL + ) + if rsa_cdata != self._ffi.NULL: + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata) + return _RSAPublicKey(self, rsa_cdata, evp_pkey) + else: + self._handle_key_loading_error() + + def load_pem_x509_certificate(self, data): + mem_bio = self._bytes_to_bio(data) + x509 = self._lib.PEM_read_bio_X509( + mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL + ) + if x509 == self._ffi.NULL: + self._consume_errors() + raise ValueError("Unable to load certificate") + + x509 = self._ffi.gc(x509, self._lib.X509_free) + return _Certificate(self, x509) + + def load_der_x509_certificate(self, data): + mem_bio = self._bytes_to_bio(data) + x509 = self._lib.d2i_X509_bio(mem_bio.bio, self._ffi.NULL) + if x509 == self._ffi.NULL: + self._consume_errors() + raise ValueError("Unable to load certificate") + + x509 = self._ffi.gc(x509, self._lib.X509_free) + return _Certificate(self, x509) + + def load_pem_x509_crl(self, data): + mem_bio = self._bytes_to_bio(data) + x509_crl = self._lib.PEM_read_bio_X509_CRL( + mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL + ) + if x509_crl == self._ffi.NULL: + self._consume_errors() + raise ValueError("Unable to load CRL") + + x509_crl = self._ffi.gc(x509_crl, self._lib.X509_CRL_free) + return _CertificateRevocationList(self, x509_crl) + + def load_der_x509_crl(self, data): + mem_bio = self._bytes_to_bio(data) + x509_crl = self._lib.d2i_X509_CRL_bio(mem_bio.bio, self._ffi.NULL) + if x509_crl == self._ffi.NULL: + self._consume_errors() + raise ValueError("Unable to load CRL") + + x509_crl = self._ffi.gc(x509_crl, self._lib.X509_CRL_free) + return _CertificateRevocationList(self, x509_crl) + + def load_pem_x509_csr(self, data): + mem_bio = self._bytes_to_bio(data) + x509_req = self._lib.PEM_read_bio_X509_REQ( + mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL + ) + if x509_req == self._ffi.NULL: + self._consume_errors() + raise ValueError("Unable to load request") + + x509_req = self._ffi.gc(x509_req, self._lib.X509_REQ_free) + return _CertificateSigningRequest(self, x509_req) + + def load_der_x509_csr(self, data): + mem_bio = self._bytes_to_bio(data) + x509_req = self._lib.d2i_X509_REQ_bio(mem_bio.bio, self._ffi.NULL) + if x509_req == self._ffi.NULL: + self._consume_errors() + raise ValueError("Unable to load request") + + x509_req = self._ffi.gc(x509_req, self._lib.X509_REQ_free) + return _CertificateSigningRequest(self, x509_req) + + def _load_key(self, openssl_read_func, convert_func, data, password): + mem_bio = self._bytes_to_bio(data) + + password_cb, userdata = self._pem_password_cb(password) + userdata_handle = self._ffi.new_handle(userdata) + + evp_pkey = openssl_read_func( + mem_bio.bio, + self._ffi.NULL, + password_cb, + userdata_handle, + ) + + if evp_pkey == self._ffi.NULL: + if userdata.exception is not None: + errors = self._consume_errors() + self.openssl_assert(errors) + raise userdata.exception + else: + self._handle_key_loading_error() + + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + + if password is not None and userdata.called == 0: + raise TypeError( + "Password was given but private key is not encrypted.") + + assert ( + (password is not None and userdata.called == 1) or + password is None + ) + + return convert_func(evp_pkey) + + def _handle_key_loading_error(self): + errors = self._consume_errors() + + if not errors: + raise ValueError("Could not unserialize key data.") + + elif errors[0][1:] in ( + ( + self._lib.ERR_LIB_EVP, + self._lib.EVP_F_EVP_DECRYPTFINAL_EX, + self._lib.EVP_R_BAD_DECRYPT + ), + ( + self._lib.ERR_LIB_PKCS12, + self._lib.PKCS12_F_PKCS12_PBE_CRYPT, + self._lib.PKCS12_R_PKCS12_CIPHERFINAL_ERROR, + ) + ): + raise ValueError("Bad decrypt. Incorrect password?") + + elif errors[0][1:] in ( + ( + self._lib.ERR_LIB_PEM, + self._lib.PEM_F_PEM_GET_EVP_CIPHER_INFO, + self._lib.PEM_R_UNSUPPORTED_ENCRYPTION + ), + + ( + self._lib.ERR_LIB_EVP, + self._lib.EVP_F_EVP_PBE_CIPHERINIT, + self._lib.EVP_R_UNKNOWN_PBE_ALGORITHM + ) + ): + raise UnsupportedAlgorithm( + "PEM data is encrypted with an unsupported cipher", + _Reasons.UNSUPPORTED_CIPHER + ) + + elif any( + error[1:] == ( + self._lib.ERR_LIB_EVP, + self._lib.EVP_F_EVP_PKCS82PKEY, + self._lib.EVP_R_UNSUPPORTED_PRIVATE_KEY_ALGORITHM + ) + for error in errors + ): + raise UnsupportedAlgorithm( + "Unsupported public key algorithm.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM + ) + + else: + assert errors[0][1] in ( + self._lib.ERR_LIB_EVP, + self._lib.ERR_LIB_PEM, + self._lib.ERR_LIB_ASN1, + ) + raise ValueError("Could not unserialize key data.") + + def elliptic_curve_supported(self, curve): + if self._lib.Cryptography_HAS_EC != 1: + return False + + try: + curve_nid = self._elliptic_curve_to_nid(curve) + except UnsupportedAlgorithm: + curve_nid = self._lib.NID_undef + + ctx = self._lib.EC_GROUP_new_by_curve_name(curve_nid) + + if ctx == self._ffi.NULL: + errors = self._consume_errors() + self.openssl_assert( + curve_nid == self._lib.NID_undef or + errors[0][1:] == ( + self._lib.ERR_LIB_EC, + self._lib.EC_F_EC_GROUP_NEW_BY_CURVE_NAME, + self._lib.EC_R_UNKNOWN_GROUP + ) + ) + return False + else: + self.openssl_assert(curve_nid != self._lib.NID_undef) + self._lib.EC_GROUP_free(ctx) + return True + + def elliptic_curve_signature_algorithm_supported( + self, signature_algorithm, curve + ): + if self._lib.Cryptography_HAS_EC != 1: + return False + + # We only support ECDSA right now. + if not isinstance(signature_algorithm, ec.ECDSA): + return False + + # Before 0.9.8m OpenSSL can't cope with digests longer than the curve. + if ( + self._lib.OPENSSL_VERSION_NUMBER < 0x009080df and + curve.key_size < signature_algorithm.algorithm.digest_size * 8 + ): + return False + + return self.elliptic_curve_supported(curve) + + def generate_elliptic_curve_private_key(self, curve): + """ + Generate a new private key on the named curve. + """ + + if self.elliptic_curve_supported(curve): + curve_nid = self._elliptic_curve_to_nid(curve) + + ec_cdata = self._lib.EC_KEY_new_by_curve_name(curve_nid) + self.openssl_assert(ec_cdata != self._ffi.NULL) + ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free) + + res = self._lib.EC_KEY_generate_key(ec_cdata) + self.openssl_assert(res == 1) + + res = self._lib.EC_KEY_check_key(ec_cdata) + self.openssl_assert(res == 1) + + evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata) + + return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey) + else: + raise UnsupportedAlgorithm( + "Backend object does not support {0}.".format(curve.name), + _Reasons.UNSUPPORTED_ELLIPTIC_CURVE + ) + + def load_elliptic_curve_private_numbers(self, numbers): + public = numbers.public_numbers + + curve_nid = self._elliptic_curve_to_nid(public.curve) + + ec_cdata = self._lib.EC_KEY_new_by_curve_name(curve_nid) + self.openssl_assert(ec_cdata != self._ffi.NULL) + ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free) + + ec_cdata = self._ec_key_set_public_key_affine_coordinates( + ec_cdata, public.x, public.y) + + res = self._lib.EC_KEY_set_private_key( + ec_cdata, self._int_to_bn(numbers.private_value)) + self.openssl_assert(res == 1) + evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata) + + return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey) + + def load_elliptic_curve_public_numbers(self, numbers): + curve_nid = self._elliptic_curve_to_nid(numbers.curve) + + ec_cdata = self._lib.EC_KEY_new_by_curve_name(curve_nid) + self.openssl_assert(ec_cdata != self._ffi.NULL) + ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free) + + ec_cdata = self._ec_key_set_public_key_affine_coordinates( + ec_cdata, numbers.x, numbers.y) + evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata) + + return _EllipticCurvePublicKey(self, ec_cdata, evp_pkey) + + def elliptic_curve_exchange_algorithm_supported(self, algorithm, curve): + return ( + self.elliptic_curve_supported(curve) and + self._lib.Cryptography_HAS_ECDH == 1 and + isinstance(algorithm, ec.ECDH) + ) + + def _ec_cdata_to_evp_pkey(self, ec_cdata): + evp_pkey = self._create_evp_pkey_gc() + res = self._lib.EVP_PKEY_set1_EC_KEY(evp_pkey, ec_cdata) + self.openssl_assert(res == 1) + return evp_pkey + + def _elliptic_curve_to_nid(self, curve): + """ + Get the NID for a curve name. + """ + + curve_aliases = { + "secp192r1": "prime192v1", + "secp256r1": "prime256v1" + } + + curve_name = curve_aliases.get(curve.name, curve.name) + + curve_nid = self._lib.OBJ_sn2nid(curve_name.encode()) + if curve_nid == self._lib.NID_undef: + raise UnsupportedAlgorithm( + "{0} is not a supported elliptic curve".format(curve.name), + _Reasons.UNSUPPORTED_ELLIPTIC_CURVE + ) + return curve_nid + + @contextmanager + def _tmp_bn_ctx(self): + bn_ctx = self._lib.BN_CTX_new() + self.openssl_assert(bn_ctx != self._ffi.NULL) + bn_ctx = self._ffi.gc(bn_ctx, self._lib.BN_CTX_free) + self._lib.BN_CTX_start(bn_ctx) + try: + yield bn_ctx + finally: + self._lib.BN_CTX_end(bn_ctx) + + def _ec_key_determine_group_get_set_funcs(self, ctx): + """ + Given an EC_KEY determine the group and what methods are required to + get/set point coordinates. + """ + self.openssl_assert(ctx != self._ffi.NULL) + + nid_two_field = self._lib.OBJ_sn2nid(b"characteristic-two-field") + self.openssl_assert(nid_two_field != self._lib.NID_undef) + + group = self._lib.EC_KEY_get0_group(ctx) + self.openssl_assert(group != self._ffi.NULL) + + method = self._lib.EC_GROUP_method_of(group) + self.openssl_assert(method != self._ffi.NULL) + + nid = self._lib.EC_METHOD_get_field_type(method) + self.openssl_assert(nid != self._lib.NID_undef) + + if nid == nid_two_field and self._lib.Cryptography_HAS_EC2M: + set_func = self._lib.EC_POINT_set_affine_coordinates_GF2m + get_func = self._lib.EC_POINT_get_affine_coordinates_GF2m + else: + set_func = self._lib.EC_POINT_set_affine_coordinates_GFp + get_func = self._lib.EC_POINT_get_affine_coordinates_GFp + + assert set_func and get_func + + return set_func, get_func, group + + def _ec_key_set_public_key_affine_coordinates(self, ctx, x, y): + """ + This is a port of EC_KEY_set_public_key_affine_coordinates that was + added in 1.0.1. + + Sets the public key point in the EC_KEY context to the affine x and y + values. + """ + + if x < 0 or y < 0: + raise ValueError( + "Invalid EC key. Both x and y must be non-negative." + ) + + set_func, get_func, group = ( + self._ec_key_determine_group_get_set_funcs(ctx) + ) + + point = self._lib.EC_POINT_new(group) + self.openssl_assert(point != self._ffi.NULL) + point = self._ffi.gc(point, self._lib.EC_POINT_free) + + bn_x = self._int_to_bn(x) + bn_y = self._int_to_bn(y) + + with self._tmp_bn_ctx() as bn_ctx: + check_x = self._lib.BN_CTX_get(bn_ctx) + check_y = self._lib.BN_CTX_get(bn_ctx) + + res = set_func(group, point, bn_x, bn_y, bn_ctx) + self.openssl_assert(res == 1) + + res = get_func(group, point, check_x, check_y, bn_ctx) + self.openssl_assert(res == 1) + + res = self._lib.BN_cmp(bn_x, check_x) + if res != 0: + self._consume_errors() + raise ValueError("Invalid EC Key X point.") + res = self._lib.BN_cmp(bn_y, check_y) + if res != 0: + self._consume_errors() + raise ValueError("Invalid EC Key Y point.") + + res = self._lib.EC_KEY_set_public_key(ctx, point) + self.openssl_assert(res == 1) + + res = self._lib.EC_KEY_check_key(ctx) + if res != 1: + self._consume_errors() + raise ValueError("Invalid EC key.") + + return ctx + + def _private_key_bytes(self, encoding, format, encryption_algorithm, + evp_pkey, cdata): + if not isinstance(format, serialization.PrivateFormat): + raise TypeError( + "format must be an item from the PrivateFormat enum" + ) + + if not isinstance(encryption_algorithm, + serialization.KeySerializationEncryption): + raise TypeError( + "Encryption algorithm must be a KeySerializationEncryption " + "instance" + ) + + if isinstance(encryption_algorithm, serialization.NoEncryption): + password = b"" + passlen = 0 + evp_cipher = self._ffi.NULL + elif isinstance(encryption_algorithm, + serialization.BestAvailableEncryption): + # This is a curated value that we will update over time. + evp_cipher = self._lib.EVP_get_cipherbyname( + b"aes-256-cbc" + ) + password = encryption_algorithm.password + passlen = len(password) + if passlen > 1023: + raise ValueError( + "Passwords longer than 1023 bytes are not supported by " + "this backend" + ) + else: + raise ValueError("Unsupported encryption type") + + key_type = self._lib.Cryptography_EVP_PKEY_id(evp_pkey) + if encoding is serialization.Encoding.PEM: + if format is serialization.PrivateFormat.PKCS8: + write_bio = self._lib.PEM_write_bio_PKCS8PrivateKey + key = evp_pkey + else: + assert format is serialization.PrivateFormat.TraditionalOpenSSL + if key_type == self._lib.EVP_PKEY_RSA: + write_bio = self._lib.PEM_write_bio_RSAPrivateKey + elif key_type == self._lib.EVP_PKEY_DSA: + write_bio = self._lib.PEM_write_bio_DSAPrivateKey + else: + assert self._lib.Cryptography_HAS_EC == 1 + assert key_type == self._lib.EVP_PKEY_EC + write_bio = self._lib.PEM_write_bio_ECPrivateKey + + key = cdata + elif encoding is serialization.Encoding.DER: + if format is serialization.PrivateFormat.TraditionalOpenSSL: + if not isinstance( + encryption_algorithm, serialization.NoEncryption + ): + raise ValueError( + "Encryption is not supported for DER encoded " + "traditional OpenSSL keys" + ) + + return self._private_key_bytes_traditional_der(key_type, cdata) + else: + assert format is serialization.PrivateFormat.PKCS8 + write_bio = self._lib.i2d_PKCS8PrivateKey_bio + key = evp_pkey + else: + raise TypeError("encoding must be an item from the Encoding enum") + + bio = self._create_mem_bio_gc() + res = write_bio( + bio, + key, + evp_cipher, + password, + passlen, + self._ffi.NULL, + self._ffi.NULL + ) + self.openssl_assert(res == 1) + return self._read_mem_bio(bio) + + def _private_key_bytes_traditional_der(self, key_type, cdata): + if key_type == self._lib.EVP_PKEY_RSA: + write_bio = self._lib.i2d_RSAPrivateKey_bio + elif (self._lib.Cryptography_HAS_EC == 1 and + key_type == self._lib.EVP_PKEY_EC): + write_bio = self._lib.i2d_ECPrivateKey_bio + else: + self.openssl_assert(key_type == self._lib.EVP_PKEY_DSA) + write_bio = self._lib.i2d_DSAPrivateKey_bio + + bio = self._create_mem_bio_gc() + res = write_bio(bio, cdata) + self.openssl_assert(res == 1) + return self._read_mem_bio(bio) + + def _public_key_bytes(self, encoding, format, key, evp_pkey, cdata): + if not isinstance(encoding, serialization.Encoding): + raise TypeError("encoding must be an item from the Encoding enum") + + if ( + format is serialization.PublicFormat.OpenSSH or + encoding is serialization.Encoding.OpenSSH + ): + if ( + format is not serialization.PublicFormat.OpenSSH or + encoding is not serialization.Encoding.OpenSSH + ): + raise ValueError( + "OpenSSH format must be used with OpenSSH encoding" + ) + return self._openssh_public_key_bytes(key) + elif format is serialization.PublicFormat.SubjectPublicKeyInfo: + if encoding is serialization.Encoding.PEM: + write_bio = self._lib.PEM_write_bio_PUBKEY + else: + assert encoding is serialization.Encoding.DER + write_bio = self._lib.i2d_PUBKEY_bio + + key = evp_pkey + elif format is serialization.PublicFormat.PKCS1: + # Only RSA is supported here. + assert self._lib.Cryptography_EVP_PKEY_id( + evp_pkey + ) == self._lib.EVP_PKEY_RSA + if encoding is serialization.Encoding.PEM: + write_bio = self._lib.PEM_write_bio_RSAPublicKey + else: + assert encoding is serialization.Encoding.DER + write_bio = self._lib.i2d_RSAPublicKey_bio + + key = cdata + else: + raise TypeError( + "format must be an item from the PublicFormat enum" + ) + + bio = self._create_mem_bio_gc() + res = write_bio(bio, key) + self.openssl_assert(res == 1) + return self._read_mem_bio(bio) + + def _openssh_public_key_bytes(self, key): + if isinstance(key, rsa.RSAPublicKey): + public_numbers = key.public_numbers() + return b"ssh-rsa " + base64.b64encode( + serialization._ssh_write_string(b"ssh-rsa") + + serialization._ssh_write_mpint(public_numbers.e) + + serialization._ssh_write_mpint(public_numbers.n) + ) + elif isinstance(key, dsa.DSAPublicKey): + public_numbers = key.public_numbers() + parameter_numbers = public_numbers.parameter_numbers + return b"ssh-dss " + base64.b64encode( + serialization._ssh_write_string(b"ssh-dss") + + serialization._ssh_write_mpint(parameter_numbers.p) + + serialization._ssh_write_mpint(parameter_numbers.q) + + serialization._ssh_write_mpint(parameter_numbers.g) + + serialization._ssh_write_mpint(public_numbers.y) + ) + else: + assert isinstance(key, ec.EllipticCurvePublicKey) + public_numbers = key.public_numbers() + try: + curve_name = { + ec.SECP256R1: b"nistp256", + ec.SECP384R1: b"nistp384", + ec.SECP521R1: b"nistp521", + }[type(public_numbers.curve)] + except KeyError: + raise ValueError( + "Only SECP256R1, SECP384R1, and SECP521R1 curves are " + "supported by the SSH public key format" + ) + return b"ecdsa-sha2-" + curve_name + b" " + base64.b64encode( + serialization._ssh_write_string(b"ecdsa-sha2-" + curve_name) + + serialization._ssh_write_string(curve_name) + + serialization._ssh_write_string(public_numbers.encode_point()) + ) + + +class GetCipherByName(object): + def __init__(self, fmt): + self._fmt = fmt + + def __call__(self, backend, cipher, mode): + cipher_name = self._fmt.format(cipher=cipher, mode=mode).lower() + return backend._lib.EVP_get_cipherbyname(cipher_name.encode("ascii")) + + +backend = Backend() diff --git a/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/ciphers.py b/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/ciphers.py new file mode 100644 index 0000000..a80708a --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/ciphers.py @@ -0,0 +1,213 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import InvalidTag, UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.primitives import ciphers +from cryptography.hazmat.primitives.ciphers import modes + + +@utils.register_interface(ciphers.CipherContext) +@utils.register_interface(ciphers.AEADCipherContext) +@utils.register_interface(ciphers.AEADEncryptionContext) +class _CipherContext(object): + _ENCRYPT = 1 + _DECRYPT = 0 + + def __init__(self, backend, cipher, mode, operation): + self._backend = backend + self._cipher = cipher + self._mode = mode + self._operation = operation + self._tag = None + + if isinstance(self._cipher, ciphers.BlockCipherAlgorithm): + self._block_size = self._cipher.block_size + else: + self._block_size = 1 + + ctx = self._backend._lib.EVP_CIPHER_CTX_new() + ctx = self._backend._ffi.gc( + ctx, self._backend._lib.EVP_CIPHER_CTX_free + ) + + registry = self._backend._cipher_registry + try: + adapter = registry[type(cipher), type(mode)] + except KeyError: + raise UnsupportedAlgorithm( + "cipher {0} in {1} mode is not supported " + "by this backend.".format( + cipher.name, mode.name if mode else mode), + _Reasons.UNSUPPORTED_CIPHER + ) + + evp_cipher = adapter(self._backend, cipher, mode) + if evp_cipher == self._backend._ffi.NULL: + raise UnsupportedAlgorithm( + "cipher {0} in {1} mode is not supported " + "by this backend.".format( + cipher.name, mode.name if mode else mode), + _Reasons.UNSUPPORTED_CIPHER + ) + + if isinstance(mode, modes.ModeWithInitializationVector): + iv_nonce = mode.initialization_vector + elif isinstance(mode, modes.ModeWithNonce): + iv_nonce = mode.nonce + else: + iv_nonce = self._backend._ffi.NULL + # begin init with cipher and operation type + res = self._backend._lib.EVP_CipherInit_ex(ctx, evp_cipher, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + operation) + self._backend.openssl_assert(res != 0) + # set the key length to handle variable key ciphers + res = self._backend._lib.EVP_CIPHER_CTX_set_key_length( + ctx, len(cipher.key) + ) + self._backend.openssl_assert(res != 0) + if isinstance(mode, modes.GCM): + res = self._backend._lib.EVP_CIPHER_CTX_ctrl( + ctx, self._backend._lib.EVP_CTRL_GCM_SET_IVLEN, + len(iv_nonce), self._backend._ffi.NULL + ) + self._backend.openssl_assert(res != 0) + if operation == self._DECRYPT: + res = self._backend._lib.EVP_CIPHER_CTX_ctrl( + ctx, self._backend._lib.EVP_CTRL_GCM_SET_TAG, + len(mode.tag), mode.tag + ) + self._backend.openssl_assert(res != 0) + + # pass key/iv + res = self._backend._lib.EVP_CipherInit_ex( + ctx, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + cipher.key, + iv_nonce, + operation + ) + self._backend.openssl_assert(res != 0) + # We purposely disable padding here as it's handled higher up in the + # API. + self._backend._lib.EVP_CIPHER_CTX_set_padding(ctx, 0) + self._ctx = ctx + + def update(self, data): + # OpenSSL 0.9.8e has an assertion in its EVP code that causes it + # to SIGABRT if you call update with an empty byte string. This can be + # removed when we drop support for 0.9.8e (CentOS/RHEL 5). This branch + # should be taken only when length is zero and mode is not GCM because + # AES GCM can return improper tag values if you don't call update + # with empty plaintext when authenticating AAD for ...reasons. + if len(data) == 0 and not isinstance(self._mode, modes.GCM): + return b"" + + buf = self._backend._ffi.new("unsigned char[]", + len(data) + self._block_size - 1) + outlen = self._backend._ffi.new("int *") + res = self._backend._lib.EVP_CipherUpdate(self._ctx, buf, outlen, data, + len(data)) + self._backend.openssl_assert(res != 0) + return self._backend._ffi.buffer(buf)[:outlen[0]] + + def finalize(self): + # OpenSSL 1.0.1 on Ubuntu 12.04 (and possibly other distributions) + # appears to have a bug where you must make at least one call to update + # even if you are only using authenticate_additional_data or the + # GCM tag will be wrong. An (empty) call to update resolves this + # and is harmless for all other versions of OpenSSL. + if isinstance(self._mode, modes.GCM): + self.update(b"") + + buf = self._backend._ffi.new("unsigned char[]", self._block_size) + outlen = self._backend._ffi.new("int *") + res = self._backend._lib.EVP_CipherFinal_ex(self._ctx, buf, outlen) + if res == 0: + errors = self._backend._consume_errors() + + if not errors and isinstance(self._mode, modes.GCM): + raise InvalidTag + + self._backend.openssl_assert( + errors[0][1:] == ( + self._backend._lib.ERR_LIB_EVP, + self._backend._lib.EVP_F_EVP_ENCRYPTFINAL_EX, + self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH + ) or errors[0][1:] == ( + self._backend._lib.ERR_LIB_EVP, + self._backend._lib.EVP_F_EVP_DECRYPTFINAL_EX, + self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH + ) + ) + raise ValueError( + "The length of the provided data is not a multiple of " + "the block length." + ) + + if (isinstance(self._mode, modes.GCM) and + self._operation == self._ENCRYPT): + block_byte_size = self._block_size // 8 + tag_buf = self._backend._ffi.new( + "unsigned char[]", block_byte_size + ) + res = self._backend._lib.EVP_CIPHER_CTX_ctrl( + self._ctx, self._backend._lib.EVP_CTRL_GCM_GET_TAG, + block_byte_size, tag_buf + ) + self._backend.openssl_assert(res != 0) + self._tag = self._backend._ffi.buffer(tag_buf)[:] + + res = self._backend._lib.EVP_CIPHER_CTX_cleanup(self._ctx) + self._backend.openssl_assert(res == 1) + return self._backend._ffi.buffer(buf)[:outlen[0]] + + def authenticate_additional_data(self, data): + outlen = self._backend._ffi.new("int *") + res = self._backend._lib.EVP_CipherUpdate( + self._ctx, self._backend._ffi.NULL, outlen, data, len(data) + ) + self._backend.openssl_assert(res != 0) + + tag = utils.read_only_property("_tag") + + +@utils.register_interface(ciphers.CipherContext) +class _AESCTRCipherContext(object): + """ + This is needed to provide support for AES CTR mode in OpenSSL 0.9.8. It can + be removed when we drop 0.9.8 support (RHEL5 extended life ends 2020). + """ + def __init__(self, backend, cipher, mode): + self._backend = backend + + self._key = self._backend._ffi.new("AES_KEY *") + res = self._backend._lib.AES_set_encrypt_key( + cipher.key, len(cipher.key) * 8, self._key + ) + self._backend.openssl_assert(res == 0) + self._ecount = self._backend._ffi.new("char[]", 16) + self._nonce = self._backend._ffi.new("char[16]", mode.nonce) + self._num = self._backend._ffi.new("unsigned int *", 0) + + def update(self, data): + buf = self._backend._ffi.new("unsigned char[]", len(data)) + self._backend._lib.AES_ctr128_encrypt( + data, buf, len(data), self._key, self._nonce, + self._ecount, self._num + ) + return self._backend._ffi.buffer(buf)[:] + + def finalize(self): + self._key = None + self._ecount = None + self._nonce = None + self._num = None + return b"" diff --git a/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/cmac.py b/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/cmac.py new file mode 100644 index 0000000..eaefc27 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/cmac.py @@ -0,0 +1,80 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + + +from cryptography import utils +from cryptography.exceptions import ( + InvalidSignature, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.primitives import constant_time, interfaces +from cryptography.hazmat.primitives.ciphers.modes import CBC + + +@utils.register_interface(interfaces.MACContext) +class _CMACContext(object): + def __init__(self, backend, algorithm, ctx=None): + if not backend.cmac_algorithm_supported(algorithm): + raise UnsupportedAlgorithm("This backend does not support CMAC.", + _Reasons.UNSUPPORTED_CIPHER) + + self._backend = backend + self._key = algorithm.key + self._algorithm = algorithm + self._output_length = algorithm.block_size // 8 + + if ctx is None: + registry = self._backend._cipher_registry + adapter = registry[type(algorithm), CBC] + + evp_cipher = adapter(self._backend, algorithm, CBC) + + ctx = self._backend._lib.CMAC_CTX_new() + + self._backend.openssl_assert(ctx != self._backend._ffi.NULL) + ctx = self._backend._ffi.gc(ctx, self._backend._lib.CMAC_CTX_free) + + self._backend._lib.CMAC_Init( + ctx, self._key, len(self._key), + evp_cipher, self._backend._ffi.NULL + ) + + self._ctx = ctx + + algorithm = utils.read_only_property("_algorithm") + + def update(self, data): + res = self._backend._lib.CMAC_Update(self._ctx, data, len(data)) + self._backend.openssl_assert(res == 1) + + def finalize(self): + buf = self._backend._ffi.new("unsigned char[]", self._output_length) + length = self._backend._ffi.new("size_t *", self._output_length) + res = self._backend._lib.CMAC_Final( + self._ctx, buf, length + ) + self._backend.openssl_assert(res == 1) + + self._ctx = None + + return self._backend._ffi.buffer(buf)[:] + + def copy(self): + copied_ctx = self._backend._lib.CMAC_CTX_new() + copied_ctx = self._backend._ffi.gc( + copied_ctx, self._backend._lib.CMAC_CTX_free + ) + res = self._backend._lib.CMAC_CTX_copy( + copied_ctx, self._ctx + ) + self._backend.openssl_assert(res == 1) + return _CMACContext( + self._backend, self._algorithm, ctx=copied_ctx + ) + + def verify(self, signature): + digest = self.finalize() + if not constant_time.bytes_eq(digest, signature): + raise InvalidSignature("Signature did not match digest.") diff --git a/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/decode_asn1.py b/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/decode_asn1.py new file mode 100644 index 0000000..26f56d1 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/decode_asn1.py @@ -0,0 +1,804 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import datetime +import ipaddress + +from email.utils import parseaddr + +import idna + +import six + +from six.moves import urllib_parse + +from cryptography import x509 +from cryptography.x509.oid import ( + CRLEntryExtensionOID, CertificatePoliciesOID, ExtensionOID +) + + +def _obj2txt(backend, obj): + # Set to 80 on the recommendation of + # https://www.openssl.org/docs/crypto/OBJ_nid2ln.html#return_values + buf_len = 80 + buf = backend._ffi.new("char[]", buf_len) + res = backend._lib.OBJ_obj2txt(buf, buf_len, obj, 1) + backend.openssl_assert(res > 0) + return backend._ffi.buffer(buf, res)[:].decode() + + +def _decode_x509_name_entry(backend, x509_name_entry): + obj = backend._lib.X509_NAME_ENTRY_get_object(x509_name_entry) + backend.openssl_assert(obj != backend._ffi.NULL) + data = backend._lib.X509_NAME_ENTRY_get_data(x509_name_entry) + backend.openssl_assert(data != backend._ffi.NULL) + value = _asn1_string_to_utf8(backend, data) + oid = _obj2txt(backend, obj) + + return x509.NameAttribute(x509.ObjectIdentifier(oid), value) + + +def _decode_x509_name(backend, x509_name): + count = backend._lib.X509_NAME_entry_count(x509_name) + attributes = [] + for x in range(count): + entry = backend._lib.X509_NAME_get_entry(x509_name, x) + attributes.append(_decode_x509_name_entry(backend, entry)) + + return x509.Name(attributes) + + +def _decode_general_names(backend, gns): + num = backend._lib.sk_GENERAL_NAME_num(gns) + names = [] + for i in range(num): + gn = backend._lib.sk_GENERAL_NAME_value(gns, i) + backend.openssl_assert(gn != backend._ffi.NULL) + names.append(_decode_general_name(backend, gn)) + + return names + + +def _decode_general_name(backend, gn): + if gn.type == backend._lib.GEN_DNS: + data = _asn1_string_to_bytes(backend, gn.d.dNSName) + if not data: + decoded = u"" + elif data.startswith(b"*."): + # This is a wildcard name. We need to remove the leading wildcard, + # IDNA decode, then re-add the wildcard. Wildcard characters should + # always be left-most (RFC 2595 section 2.4). + decoded = u"*." + idna.decode(data[2:]) + else: + # Not a wildcard, decode away. If the string has a * in it anywhere + # invalid this will raise an InvalidCodePoint + decoded = idna.decode(data) + if data.startswith(b"."): + # idna strips leading periods. Name constraints can have that + # so we need to re-add it. Sigh. + decoded = u"." + decoded + + return x509.DNSName(decoded) + elif gn.type == backend._lib.GEN_URI: + data = _asn1_string_to_ascii(backend, gn.d.uniformResourceIdentifier) + parsed = urllib_parse.urlparse(data) + if parsed.hostname: + hostname = idna.decode(parsed.hostname) + else: + hostname = "" + if parsed.port: + netloc = hostname + u":" + six.text_type(parsed.port) + else: + netloc = hostname + + # Note that building a URL in this fashion means it should be + # semantically indistinguishable from the original but is not + # guaranteed to be exactly the same. + uri = urllib_parse.urlunparse(( + parsed.scheme, + netloc, + parsed.path, + parsed.params, + parsed.query, + parsed.fragment + )) + return x509.UniformResourceIdentifier(uri) + elif gn.type == backend._lib.GEN_RID: + oid = _obj2txt(backend, gn.d.registeredID) + return x509.RegisteredID(x509.ObjectIdentifier(oid)) + elif gn.type == backend._lib.GEN_IPADD: + data = _asn1_string_to_bytes(backend, gn.d.iPAddress) + data_len = len(data) + if data_len == 8 or data_len == 32: + # This is an IPv4 or IPv6 Network and not a single IP. This + # type of data appears in Name Constraints. Unfortunately, + # ipaddress doesn't support packed bytes + netmask. Additionally, + # IPv6Network can only handle CIDR rather than the full 16 byte + # netmask. To handle this we convert the netmask to integer, then + # find the first 0 bit, which will be the prefix. If another 1 + # bit is present after that the netmask is invalid. + base = ipaddress.ip_address(data[:data_len // 2]) + netmask = ipaddress.ip_address(data[data_len // 2:]) + bits = bin(int(netmask))[2:] + prefix = bits.find('0') + # If no 0 bits are found it is a /32 or /128 + if prefix == -1: + prefix = len(bits) + + if "1" in bits[prefix:]: + raise ValueError("Invalid netmask") + + ip = ipaddress.ip_network(base.exploded + u"/{0}".format(prefix)) + else: + ip = ipaddress.ip_address(data) + + return x509.IPAddress(ip) + elif gn.type == backend._lib.GEN_DIRNAME: + return x509.DirectoryName( + _decode_x509_name(backend, gn.d.directoryName) + ) + elif gn.type == backend._lib.GEN_EMAIL: + data = _asn1_string_to_ascii(backend, gn.d.rfc822Name) + name, address = parseaddr(data) + parts = address.split(u"@") + if name or not address: + # parseaddr has found a name (e.g. Name ) or the entire + # value is an empty string. + raise ValueError("Invalid rfc822name value") + elif len(parts) == 1: + # Single label email name. This is valid for local delivery. No + # IDNA decoding can be done since there is no domain component. + return x509.RFC822Name(address) + else: + # A normal email of the form user@domain.com. Let's attempt to + # decode the domain component and return the entire address. + return x509.RFC822Name( + parts[0] + u"@" + idna.decode(parts[1]) + ) + elif gn.type == backend._lib.GEN_OTHERNAME: + type_id = _obj2txt(backend, gn.d.otherName.type_id) + value = _asn1_to_der(backend, gn.d.otherName.value) + return x509.OtherName(x509.ObjectIdentifier(type_id), value) + else: + # x400Address or ediPartyName + raise x509.UnsupportedGeneralNameType( + "{0} is not a supported type".format( + x509._GENERAL_NAMES.get(gn.type, gn.type) + ), + gn.type + ) + + +def _decode_ocsp_no_check(backend, ext): + return x509.OCSPNoCheck() + + +def _decode_crl_number(backend, ext): + asn1_int = backend._ffi.cast("ASN1_INTEGER *", ext) + asn1_int = backend._ffi.gc(asn1_int, backend._lib.ASN1_INTEGER_free) + return x509.CRLNumber(_asn1_integer_to_int(backend, asn1_int)) + + +class _X509ExtensionParser(object): + def __init__(self, ext_count, get_ext, handlers, unsupported_exts=None): + self.ext_count = ext_count + self.get_ext = get_ext + self.handlers = handlers + self.unsupported_exts = unsupported_exts + + def parse(self, backend, x509_obj): + extensions = [] + seen_oids = set() + for i in range(self.ext_count(backend, x509_obj)): + ext = self.get_ext(backend, x509_obj, i) + backend.openssl_assert(ext != backend._ffi.NULL) + crit = backend._lib.X509_EXTENSION_get_critical(ext) + critical = crit == 1 + oid = x509.ObjectIdentifier( + _obj2txt(backend, backend._lib.X509_EXTENSION_get_object(ext)) + ) + if oid in seen_oids: + raise x509.DuplicateExtension( + "Duplicate {0} extension found".format(oid), oid + ) + try: + handler = self.handlers[oid] + except KeyError: + if critical: + raise x509.UnsupportedExtension( + "Critical extension {0} is not currently supported" + .format(oid), oid + ) + else: + # Dump the DER payload into an UnrecognizedExtension object + data = backend._lib.X509_EXTENSION_get_data(ext) + backend.openssl_assert(data != backend._ffi.NULL) + der = backend._ffi.buffer(data.data, data.length)[:] + unrecognized = x509.UnrecognizedExtension(oid, der) + extensions.append( + x509.Extension(oid, critical, unrecognized) + ) + else: + # For extensions which are not supported by OpenSSL we pass the + # extension object directly to the parsing routine so it can + # be decoded manually. + if self.unsupported_exts and oid in self.unsupported_exts: + ext_data = ext + else: + ext_data = backend._lib.X509V3_EXT_d2i(ext) + if ext_data == backend._ffi.NULL: + backend._consume_errors() + raise ValueError( + "The {0} extension is invalid and can't be " + "parsed".format(oid) + ) + + value = handler(backend, ext_data) + extensions.append(x509.Extension(oid, critical, value)) + + seen_oids.add(oid) + + return x509.Extensions(extensions) + + +def _decode_certificate_policies(backend, cp): + cp = backend._ffi.cast("Cryptography_STACK_OF_POLICYINFO *", cp) + cp = backend._ffi.gc(cp, backend._lib.sk_POLICYINFO_free) + num = backend._lib.sk_POLICYINFO_num(cp) + certificate_policies = [] + for i in range(num): + qualifiers = None + pi = backend._lib.sk_POLICYINFO_value(cp, i) + oid = x509.ObjectIdentifier(_obj2txt(backend, pi.policyid)) + if pi.qualifiers != backend._ffi.NULL: + qnum = backend._lib.sk_POLICYQUALINFO_num(pi.qualifiers) + qualifiers = [] + for j in range(qnum): + pqi = backend._lib.sk_POLICYQUALINFO_value( + pi.qualifiers, j + ) + pqualid = x509.ObjectIdentifier( + _obj2txt(backend, pqi.pqualid) + ) + if pqualid == CertificatePoliciesOID.CPS_QUALIFIER: + cpsuri = backend._ffi.buffer( + pqi.d.cpsuri.data, pqi.d.cpsuri.length + )[:].decode('ascii') + qualifiers.append(cpsuri) + else: + assert pqualid == CertificatePoliciesOID.CPS_USER_NOTICE + user_notice = _decode_user_notice( + backend, pqi.d.usernotice + ) + qualifiers.append(user_notice) + + certificate_policies.append( + x509.PolicyInformation(oid, qualifiers) + ) + + return x509.CertificatePolicies(certificate_policies) + + +def _decode_user_notice(backend, un): + explicit_text = None + notice_reference = None + + if un.exptext != backend._ffi.NULL: + explicit_text = _asn1_string_to_utf8(backend, un.exptext) + + if un.noticeref != backend._ffi.NULL: + organization = _asn1_string_to_utf8( + backend, un.noticeref.organization + ) + + num = backend._lib.sk_ASN1_INTEGER_num( + un.noticeref.noticenos + ) + notice_numbers = [] + for i in range(num): + asn1_int = backend._lib.sk_ASN1_INTEGER_value( + un.noticeref.noticenos, i + ) + notice_num = _asn1_integer_to_int(backend, asn1_int) + notice_numbers.append(notice_num) + + notice_reference = x509.NoticeReference( + organization, notice_numbers + ) + + return x509.UserNotice(notice_reference, explicit_text) + + +def _decode_basic_constraints(backend, bc_st): + basic_constraints = backend._ffi.cast("BASIC_CONSTRAINTS *", bc_st) + basic_constraints = backend._ffi.gc( + basic_constraints, backend._lib.BASIC_CONSTRAINTS_free + ) + # The byte representation of an ASN.1 boolean true is \xff. OpenSSL + # chooses to just map this to its ordinal value, so true is 255 and + # false is 0. + ca = basic_constraints.ca == 255 + path_length = _asn1_integer_to_int_or_none( + backend, basic_constraints.pathlen + ) + + return x509.BasicConstraints(ca, path_length) + + +def _decode_subject_key_identifier(backend, asn1_string): + asn1_string = backend._ffi.cast("ASN1_OCTET_STRING *", asn1_string) + asn1_string = backend._ffi.gc( + asn1_string, backend._lib.ASN1_OCTET_STRING_free + ) + return x509.SubjectKeyIdentifier( + backend._ffi.buffer(asn1_string.data, asn1_string.length)[:] + ) + + +def _decode_authority_key_identifier(backend, akid): + akid = backend._ffi.cast("AUTHORITY_KEYID *", akid) + akid = backend._ffi.gc(akid, backend._lib.AUTHORITY_KEYID_free) + key_identifier = None + authority_cert_issuer = None + + if akid.keyid != backend._ffi.NULL: + key_identifier = backend._ffi.buffer( + akid.keyid.data, akid.keyid.length + )[:] + + if akid.issuer != backend._ffi.NULL: + authority_cert_issuer = _decode_general_names( + backend, akid.issuer + ) + + authority_cert_serial_number = _asn1_integer_to_int_or_none( + backend, akid.serial + ) + + return x509.AuthorityKeyIdentifier( + key_identifier, authority_cert_issuer, authority_cert_serial_number + ) + + +def _decode_authority_information_access(backend, aia): + aia = backend._ffi.cast("Cryptography_STACK_OF_ACCESS_DESCRIPTION *", aia) + aia = backend._ffi.gc(aia, backend._lib.sk_ACCESS_DESCRIPTION_free) + num = backend._lib.sk_ACCESS_DESCRIPTION_num(aia) + access_descriptions = [] + for i in range(num): + ad = backend._lib.sk_ACCESS_DESCRIPTION_value(aia, i) + backend.openssl_assert(ad.method != backend._ffi.NULL) + oid = x509.ObjectIdentifier(_obj2txt(backend, ad.method)) + backend.openssl_assert(ad.location != backend._ffi.NULL) + gn = _decode_general_name(backend, ad.location) + access_descriptions.append(x509.AccessDescription(oid, gn)) + + return x509.AuthorityInformationAccess(access_descriptions) + + +def _decode_key_usage(backend, bit_string): + bit_string = backend._ffi.cast("ASN1_BIT_STRING *", bit_string) + bit_string = backend._ffi.gc(bit_string, backend._lib.ASN1_BIT_STRING_free) + get_bit = backend._lib.ASN1_BIT_STRING_get_bit + digital_signature = get_bit(bit_string, 0) == 1 + content_commitment = get_bit(bit_string, 1) == 1 + key_encipherment = get_bit(bit_string, 2) == 1 + data_encipherment = get_bit(bit_string, 3) == 1 + key_agreement = get_bit(bit_string, 4) == 1 + key_cert_sign = get_bit(bit_string, 5) == 1 + crl_sign = get_bit(bit_string, 6) == 1 + encipher_only = get_bit(bit_string, 7) == 1 + decipher_only = get_bit(bit_string, 8) == 1 + return x509.KeyUsage( + digital_signature, + content_commitment, + key_encipherment, + data_encipherment, + key_agreement, + key_cert_sign, + crl_sign, + encipher_only, + decipher_only + ) + + +def _decode_general_names_extension(backend, gns): + gns = backend._ffi.cast("GENERAL_NAMES *", gns) + gns = backend._ffi.gc(gns, backend._lib.GENERAL_NAMES_free) + general_names = _decode_general_names(backend, gns) + return general_names + + +def _decode_subject_alt_name(backend, ext): + return x509.SubjectAlternativeName( + _decode_general_names_extension(backend, ext) + ) + + +def _decode_issuer_alt_name(backend, ext): + return x509.IssuerAlternativeName( + _decode_general_names_extension(backend, ext) + ) + + +def _decode_name_constraints(backend, nc): + nc = backend._ffi.cast("NAME_CONSTRAINTS *", nc) + nc = backend._ffi.gc(nc, backend._lib.NAME_CONSTRAINTS_free) + permitted = _decode_general_subtrees(backend, nc.permittedSubtrees) + excluded = _decode_general_subtrees(backend, nc.excludedSubtrees) + return x509.NameConstraints( + permitted_subtrees=permitted, excluded_subtrees=excluded + ) + + +def _decode_general_subtrees(backend, stack_subtrees): + if stack_subtrees == backend._ffi.NULL: + return None + + num = backend._lib.sk_GENERAL_SUBTREE_num(stack_subtrees) + subtrees = [] + + for i in range(num): + obj = backend._lib.sk_GENERAL_SUBTREE_value(stack_subtrees, i) + backend.openssl_assert(obj != backend._ffi.NULL) + name = _decode_general_name(backend, obj.base) + subtrees.append(name) + + return subtrees + + +def _decode_policy_constraints(backend, pc): + pc = backend._ffi.cast("POLICY_CONSTRAINTS *", pc) + pc = backend._ffi.gc(pc, backend._lib.POLICY_CONSTRAINTS_free) + + require_explicit_policy = _asn1_integer_to_int_or_none( + backend, pc.requireExplicitPolicy + ) + inhibit_policy_mapping = _asn1_integer_to_int_or_none( + backend, pc.inhibitPolicyMapping + ) + + return x509.PolicyConstraints( + require_explicit_policy, inhibit_policy_mapping + ) + + +def _decode_extended_key_usage(backend, sk): + sk = backend._ffi.cast("Cryptography_STACK_OF_ASN1_OBJECT *", sk) + sk = backend._ffi.gc(sk, backend._lib.sk_ASN1_OBJECT_free) + num = backend._lib.sk_ASN1_OBJECT_num(sk) + ekus = [] + + for i in range(num): + obj = backend._lib.sk_ASN1_OBJECT_value(sk, i) + backend.openssl_assert(obj != backend._ffi.NULL) + oid = x509.ObjectIdentifier(_obj2txt(backend, obj)) + ekus.append(oid) + + return x509.ExtendedKeyUsage(ekus) + + +_DISTPOINT_TYPE_FULLNAME = 0 +_DISTPOINT_TYPE_RELATIVENAME = 1 + + +def _decode_crl_distribution_points(backend, cdps): + cdps = backend._ffi.cast("Cryptography_STACK_OF_DIST_POINT *", cdps) + cdps = backend._ffi.gc(cdps, backend._lib.sk_DIST_POINT_free) + num = backend._lib.sk_DIST_POINT_num(cdps) + + dist_points = [] + for i in range(num): + full_name = None + relative_name = None + crl_issuer = None + reasons = None + cdp = backend._lib.sk_DIST_POINT_value(cdps, i) + if cdp.reasons != backend._ffi.NULL: + # We will check each bit from RFC 5280 + # ReasonFlags ::= BIT STRING { + # unused (0), + # keyCompromise (1), + # cACompromise (2), + # affiliationChanged (3), + # superseded (4), + # cessationOfOperation (5), + # certificateHold (6), + # privilegeWithdrawn (7), + # aACompromise (8) } + reasons = [] + get_bit = backend._lib.ASN1_BIT_STRING_get_bit + if get_bit(cdp.reasons, 1): + reasons.append(x509.ReasonFlags.key_compromise) + + if get_bit(cdp.reasons, 2): + reasons.append(x509.ReasonFlags.ca_compromise) + + if get_bit(cdp.reasons, 3): + reasons.append(x509.ReasonFlags.affiliation_changed) + + if get_bit(cdp.reasons, 4): + reasons.append(x509.ReasonFlags.superseded) + + if get_bit(cdp.reasons, 5): + reasons.append(x509.ReasonFlags.cessation_of_operation) + + if get_bit(cdp.reasons, 6): + reasons.append(x509.ReasonFlags.certificate_hold) + + if get_bit(cdp.reasons, 7): + reasons.append(x509.ReasonFlags.privilege_withdrawn) + + if get_bit(cdp.reasons, 8): + reasons.append(x509.ReasonFlags.aa_compromise) + + reasons = frozenset(reasons) + + if cdp.CRLissuer != backend._ffi.NULL: + crl_issuer = _decode_general_names(backend, cdp.CRLissuer) + + # Certificates may have a crl_issuer/reasons and no distribution + # point so make sure it's not null. + if cdp.distpoint != backend._ffi.NULL: + # Type 0 is fullName, there is no #define for it in the code. + if cdp.distpoint.type == _DISTPOINT_TYPE_FULLNAME: + full_name = _decode_general_names( + backend, cdp.distpoint.name.fullname + ) + # OpenSSL code doesn't test for a specific type for + # relativename, everything that isn't fullname is considered + # relativename. + else: + rns = cdp.distpoint.name.relativename + rnum = backend._lib.sk_X509_NAME_ENTRY_num(rns) + attributes = [] + for i in range(rnum): + rn = backend._lib.sk_X509_NAME_ENTRY_value( + rns, i + ) + backend.openssl_assert(rn != backend._ffi.NULL) + attributes.append( + _decode_x509_name_entry(backend, rn) + ) + + relative_name = x509.Name(attributes) + + dist_points.append( + x509.DistributionPoint( + full_name, relative_name, reasons, crl_issuer + ) + ) + + return x509.CRLDistributionPoints(dist_points) + + +def _decode_inhibit_any_policy(backend, asn1_int): + asn1_int = backend._ffi.cast("ASN1_INTEGER *", asn1_int) + asn1_int = backend._ffi.gc(asn1_int, backend._lib.ASN1_INTEGER_free) + skip_certs = _asn1_integer_to_int(backend, asn1_int) + return x509.InhibitAnyPolicy(skip_certs) + + +# CRLReason ::= ENUMERATED { +# unspecified (0), +# keyCompromise (1), +# cACompromise (2), +# affiliationChanged (3), +# superseded (4), +# cessationOfOperation (5), +# certificateHold (6), +# -- value 7 is not used +# removeFromCRL (8), +# privilegeWithdrawn (9), +# aACompromise (10) } +_CRL_ENTRY_REASON_CODE_TO_ENUM = { + 0: x509.ReasonFlags.unspecified, + 1: x509.ReasonFlags.key_compromise, + 2: x509.ReasonFlags.ca_compromise, + 3: x509.ReasonFlags.affiliation_changed, + 4: x509.ReasonFlags.superseded, + 5: x509.ReasonFlags.cessation_of_operation, + 6: x509.ReasonFlags.certificate_hold, + 8: x509.ReasonFlags.remove_from_crl, + 9: x509.ReasonFlags.privilege_withdrawn, + 10: x509.ReasonFlags.aa_compromise, +} + + +_CRL_ENTRY_REASON_ENUM_TO_CODE = { + x509.ReasonFlags.unspecified: 0, + x509.ReasonFlags.key_compromise: 1, + x509.ReasonFlags.ca_compromise: 2, + x509.ReasonFlags.affiliation_changed: 3, + x509.ReasonFlags.superseded: 4, + x509.ReasonFlags.cessation_of_operation: 5, + x509.ReasonFlags.certificate_hold: 6, + x509.ReasonFlags.remove_from_crl: 8, + x509.ReasonFlags.privilege_withdrawn: 9, + x509.ReasonFlags.aa_compromise: 10 +} + + +def _decode_crl_reason(backend, enum): + enum = backend._ffi.cast("ASN1_ENUMERATED *", enum) + enum = backend._ffi.gc(enum, backend._lib.ASN1_ENUMERATED_free) + code = backend._lib.ASN1_ENUMERATED_get(enum) + + try: + return x509.CRLReason(_CRL_ENTRY_REASON_CODE_TO_ENUM[code]) + except KeyError: + raise ValueError("Unsupported reason code: {0}".format(code)) + + +def _decode_invalidity_date(backend, inv_date): + generalized_time = backend._ffi.cast( + "ASN1_GENERALIZEDTIME *", inv_date + ) + generalized_time = backend._ffi.gc( + generalized_time, backend._lib.ASN1_GENERALIZEDTIME_free + ) + return x509.InvalidityDate( + _parse_asn1_generalized_time(backend, generalized_time) + ) + + +def _decode_cert_issuer(backend, ext): + """ + This handler decodes the CertificateIssuer entry extension directly + from the X509_EXTENSION object. This is necessary because this entry + extension is not directly supported by OpenSSL 0.9.8. + """ + + data_ptr_ptr = backend._ffi.new("const unsigned char **") + value = backend._lib.X509_EXTENSION_get_data(ext) + data_ptr_ptr[0] = value.data + gns = backend._lib.d2i_GENERAL_NAMES( + backend._ffi.NULL, data_ptr_ptr, value.length + ) + + # Check the result of d2i_GENERAL_NAMES() is valid. Usually this is covered + # in _X509ExtensionParser but since we are responsible for decoding this + # entry extension ourselves, we have to this here. + if gns == backend._ffi.NULL: + backend._consume_errors() + raise ValueError( + "The {0} extension is corrupted and can't be parsed".format( + CRLEntryExtensionOID.CERTIFICATE_ISSUER)) + + gns = backend._ffi.gc(gns, backend._lib.GENERAL_NAMES_free) + return x509.CertificateIssuer(_decode_general_names(backend, gns)) + + +def _asn1_to_der(backend, asn1_type): + buf = backend._ffi.new("unsigned char **") + res = backend._lib.i2d_ASN1_TYPE(asn1_type, buf) + backend.openssl_assert(res >= 0) + backend.openssl_assert(buf[0] != backend._ffi.NULL) + buf = backend._ffi.gc( + buf, lambda buffer: backend._lib.OPENSSL_free(buffer[0]) + ) + return backend._ffi.buffer(buf[0], res)[:] + + +def _asn1_integer_to_int(backend, asn1_int): + bn = backend._lib.ASN1_INTEGER_to_BN(asn1_int, backend._ffi.NULL) + backend.openssl_assert(bn != backend._ffi.NULL) + bn = backend._ffi.gc(bn, backend._lib.BN_free) + return backend._bn_to_int(bn) + + +def _asn1_integer_to_int_or_none(backend, asn1_int): + if asn1_int == backend._ffi.NULL: + return None + else: + return _asn1_integer_to_int(backend, asn1_int) + + +def _asn1_string_to_bytes(backend, asn1_string): + return backend._ffi.buffer(asn1_string.data, asn1_string.length)[:] + + +def _asn1_string_to_ascii(backend, asn1_string): + return _asn1_string_to_bytes(backend, asn1_string).decode("ascii") + + +def _asn1_string_to_utf8(backend, asn1_string): + buf = backend._ffi.new("unsigned char **") + res = backend._lib.ASN1_STRING_to_UTF8(buf, asn1_string) + if res == -1: + raise ValueError( + "Unsupported ASN1 string type. Type: {0}".format(asn1_string.type) + ) + + backend.openssl_assert(buf[0] != backend._ffi.NULL) + buf = backend._ffi.gc( + buf, lambda buffer: backend._lib.OPENSSL_free(buffer[0]) + ) + return backend._ffi.buffer(buf[0], res)[:].decode('utf8') + + +def _parse_asn1_time(backend, asn1_time): + backend.openssl_assert(asn1_time != backend._ffi.NULL) + generalized_time = backend._lib.ASN1_TIME_to_generalizedtime( + asn1_time, backend._ffi.NULL + ) + backend.openssl_assert(generalized_time != backend._ffi.NULL) + generalized_time = backend._ffi.gc( + generalized_time, backend._lib.ASN1_GENERALIZEDTIME_free + ) + return _parse_asn1_generalized_time(backend, generalized_time) + + +def _parse_asn1_generalized_time(backend, generalized_time): + time = _asn1_string_to_ascii( + backend, backend._ffi.cast("ASN1_STRING *", generalized_time) + ) + return datetime.datetime.strptime(time, "%Y%m%d%H%M%SZ") + + +_EXTENSION_HANDLERS = { + ExtensionOID.BASIC_CONSTRAINTS: _decode_basic_constraints, + ExtensionOID.SUBJECT_KEY_IDENTIFIER: _decode_subject_key_identifier, + ExtensionOID.KEY_USAGE: _decode_key_usage, + ExtensionOID.SUBJECT_ALTERNATIVE_NAME: _decode_subject_alt_name, + ExtensionOID.EXTENDED_KEY_USAGE: _decode_extended_key_usage, + ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _decode_authority_key_identifier, + ExtensionOID.AUTHORITY_INFORMATION_ACCESS: ( + _decode_authority_information_access + ), + ExtensionOID.CERTIFICATE_POLICIES: _decode_certificate_policies, + ExtensionOID.CRL_DISTRIBUTION_POINTS: _decode_crl_distribution_points, + ExtensionOID.OCSP_NO_CHECK: _decode_ocsp_no_check, + ExtensionOID.INHIBIT_ANY_POLICY: _decode_inhibit_any_policy, + ExtensionOID.ISSUER_ALTERNATIVE_NAME: _decode_issuer_alt_name, + ExtensionOID.NAME_CONSTRAINTS: _decode_name_constraints, + ExtensionOID.POLICY_CONSTRAINTS: _decode_policy_constraints, +} + +_REVOKED_EXTENSION_HANDLERS = { + CRLEntryExtensionOID.CRL_REASON: _decode_crl_reason, + CRLEntryExtensionOID.INVALIDITY_DATE: _decode_invalidity_date, + CRLEntryExtensionOID.CERTIFICATE_ISSUER: _decode_cert_issuer, +} + +_REVOKED_UNSUPPORTED_EXTENSIONS = set([ + CRLEntryExtensionOID.CERTIFICATE_ISSUER, +]) + +_CRL_EXTENSION_HANDLERS = { + ExtensionOID.CRL_NUMBER: _decode_crl_number, + ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _decode_authority_key_identifier, + ExtensionOID.ISSUER_ALTERNATIVE_NAME: _decode_issuer_alt_name, + ExtensionOID.AUTHORITY_INFORMATION_ACCESS: ( + _decode_authority_information_access + ), +} + +_CERTIFICATE_EXTENSION_PARSER = _X509ExtensionParser( + ext_count=lambda backend, x: backend._lib.X509_get_ext_count(x), + get_ext=lambda backend, x, i: backend._lib.X509_get_ext(x, i), + handlers=_EXTENSION_HANDLERS +) + +_CSR_EXTENSION_PARSER = _X509ExtensionParser( + ext_count=lambda backend, x: backend._lib.sk_X509_EXTENSION_num(x), + get_ext=lambda backend, x, i: backend._lib.sk_X509_EXTENSION_value(x, i), + handlers=_EXTENSION_HANDLERS +) + +_REVOKED_CERTIFICATE_EXTENSION_PARSER = _X509ExtensionParser( + ext_count=lambda backend, x: backend._lib.X509_REVOKED_get_ext_count(x), + get_ext=lambda backend, x, i: backend._lib.X509_REVOKED_get_ext(x, i), + handlers=_REVOKED_EXTENSION_HANDLERS, + unsupported_exts=_REVOKED_UNSUPPORTED_EXTENSIONS +) + +_CRL_EXTENSION_PARSER = _X509ExtensionParser( + ext_count=lambda backend, x: backend._lib.X509_CRL_get_ext_count(x), + get_ext=lambda backend, x, i: backend._lib.X509_CRL_get_ext(x, i), + handlers=_CRL_EXTENSION_HANDLERS, +) diff --git a/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/dsa.py b/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/dsa.py new file mode 100644 index 0000000..1608df0 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/dsa.py @@ -0,0 +1,303 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.backends.openssl.utils import _truncate_digest +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import ( + AsymmetricSignatureContext, AsymmetricVerificationContext, dsa +) + + +def _truncate_digest_for_dsa(dsa_cdata, digest, backend): + """ + This function truncates digests that are longer than a given DS + key's length so they can be signed. OpenSSL does this for us in + 1.0.0c+ and it isn't needed in 0.9.8, but that leaves us with three + releases (1.0.0, 1.0.0a, and 1.0.0b) where this is a problem. This + truncation is not required in 0.9.8 because DSA is limited to SHA-1. + """ + + q = backend._ffi.new("BIGNUM **") + backend._lib.DSA_get0_pqg( + dsa_cdata, backend._ffi.NULL, q, backend._ffi.NULL + ) + backend.openssl_assert(q[0] != backend._ffi.NULL) + + order_bits = backend._lib.BN_num_bits(q[0]) + return _truncate_digest(digest, order_bits) + + +@utils.register_interface(AsymmetricVerificationContext) +class _DSAVerificationContext(object): + def __init__(self, backend, public_key, signature, algorithm): + self._backend = backend + self._public_key = public_key + self._signature = signature + self._algorithm = algorithm + + self._hash_ctx = hashes.Hash(self._algorithm, self._backend) + + def update(self, data): + self._hash_ctx.update(data) + + def verify(self): + data_to_verify = self._hash_ctx.finalize() + + data_to_verify = _truncate_digest_for_dsa( + self._public_key._dsa_cdata, data_to_verify, self._backend + ) + + # The first parameter passed to DSA_verify is unused by OpenSSL but + # must be an integer. + res = self._backend._lib.DSA_verify( + 0, data_to_verify, len(data_to_verify), self._signature, + len(self._signature), self._public_key._dsa_cdata) + + if res != 1: + self._backend._consume_errors() + raise InvalidSignature + + +@utils.register_interface(AsymmetricSignatureContext) +class _DSASignatureContext(object): + def __init__(self, backend, private_key, algorithm): + self._backend = backend + self._private_key = private_key + self._algorithm = algorithm + self._hash_ctx = hashes.Hash(self._algorithm, self._backend) + + def update(self, data): + self._hash_ctx.update(data) + + def finalize(self): + data_to_sign = self._hash_ctx.finalize() + data_to_sign = _truncate_digest_for_dsa( + self._private_key._dsa_cdata, data_to_sign, self._backend + ) + sig_buf_len = self._backend._lib.DSA_size(self._private_key._dsa_cdata) + sig_buf = self._backend._ffi.new("unsigned char[]", sig_buf_len) + buflen = self._backend._ffi.new("unsigned int *") + + # The first parameter passed to DSA_sign is unused by OpenSSL but + # must be an integer. + res = self._backend._lib.DSA_sign( + 0, data_to_sign, len(data_to_sign), sig_buf, + buflen, self._private_key._dsa_cdata) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0]) + + return self._backend._ffi.buffer(sig_buf)[:buflen[0]] + + +@utils.register_interface(dsa.DSAParametersWithNumbers) +class _DSAParameters(object): + def __init__(self, backend, dsa_cdata): + self._backend = backend + self._dsa_cdata = dsa_cdata + + def parameter_numbers(self): + p = self._backend._ffi.new("BIGNUM **") + q = self._backend._ffi.new("BIGNUM **") + g = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g) + self._backend.openssl_assert(p[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(q[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(g[0] != self._backend._ffi.NULL) + return dsa.DSAParameterNumbers( + p=self._backend._bn_to_int(p[0]), + q=self._backend._bn_to_int(q[0]), + g=self._backend._bn_to_int(g[0]) + ) + + def generate_private_key(self): + return self._backend.generate_dsa_private_key(self) + + +@utils.register_interface(dsa.DSAPrivateKeyWithSerialization) +class _DSAPrivateKey(object): + def __init__(self, backend, dsa_cdata, evp_pkey): + self._backend = backend + self._dsa_cdata = dsa_cdata + self._evp_pkey = evp_pkey + + p = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DSA_get0_pqg( + dsa_cdata, p, self._backend._ffi.NULL, self._backend._ffi.NULL + ) + self._backend.openssl_assert(p[0] != backend._ffi.NULL) + self._key_size = self._backend._lib.BN_num_bits(p[0]) + + key_size = utils.read_only_property("_key_size") + + def signer(self, signature_algorithm): + return _DSASignatureContext(self._backend, self, signature_algorithm) + + def private_numbers(self): + p = self._backend._ffi.new("BIGNUM **") + q = self._backend._ffi.new("BIGNUM **") + g = self._backend._ffi.new("BIGNUM **") + pub_key = self._backend._ffi.new("BIGNUM **") + priv_key = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g) + self._backend.openssl_assert(p[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(q[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(g[0] != self._backend._ffi.NULL) + self._backend._lib.DSA_get0_key(self._dsa_cdata, pub_key, priv_key) + self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(priv_key[0] != self._backend._ffi.NULL) + return dsa.DSAPrivateNumbers( + public_numbers=dsa.DSAPublicNumbers( + parameter_numbers=dsa.DSAParameterNumbers( + p=self._backend._bn_to_int(p[0]), + q=self._backend._bn_to_int(q[0]), + g=self._backend._bn_to_int(g[0]) + ), + y=self._backend._bn_to_int(pub_key[0]) + ), + x=self._backend._bn_to_int(priv_key[0]) + ) + + def public_key(self): + dsa_cdata = self._backend._lib.DSA_new() + self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL) + dsa_cdata = self._backend._ffi.gc( + dsa_cdata, self._backend._lib.DSA_free + ) + p = self._backend._ffi.new("BIGNUM **") + q = self._backend._ffi.new("BIGNUM **") + g = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g) + self._backend.openssl_assert(p[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(q[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(g[0] != self._backend._ffi.NULL) + p_dup = self._backend._lib.BN_dup(p[0]) + q_dup = self._backend._lib.BN_dup(q[0]) + g_dup = self._backend._lib.BN_dup(g[0]) + res = self._backend._lib.DSA_set0_pqg(dsa_cdata, p_dup, q_dup, g_dup) + self._backend.openssl_assert(res == 1) + pub_key = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DSA_get0_key( + self._dsa_cdata, pub_key, self._backend._ffi.NULL + ) + self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL) + pub_key_dup = self._backend._lib.BN_dup(pub_key[0]) + res = self._backend._lib.DSA_set0_key( + dsa_cdata, pub_key_dup, self._backend._ffi.NULL + ) + self._backend.openssl_assert(res == 1) + evp_pkey = self._backend._dsa_cdata_to_evp_pkey(dsa_cdata) + return _DSAPublicKey(self._backend, dsa_cdata, evp_pkey) + + def parameters(self): + dsa_cdata = self._backend._lib.DSA_new() + self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL) + dsa_cdata = self._backend._ffi.gc( + dsa_cdata, self._backend._lib.DSA_free + ) + p = self._backend._ffi.new("BIGNUM **") + q = self._backend._ffi.new("BIGNUM **") + g = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g) + self._backend.openssl_assert(p[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(q[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(g[0] != self._backend._ffi.NULL) + p_dup = self._backend._lib.BN_dup(p[0]) + q_dup = self._backend._lib.BN_dup(q[0]) + g_dup = self._backend._lib.BN_dup(g[0]) + res = self._backend._lib.DSA_set0_pqg(dsa_cdata, p_dup, q_dup, g_dup) + self._backend.openssl_assert(res == 1) + return _DSAParameters(self._backend, dsa_cdata) + + def private_bytes(self, encoding, format, encryption_algorithm): + return self._backend._private_key_bytes( + encoding, + format, + encryption_algorithm, + self._evp_pkey, + self._dsa_cdata + ) + + +@utils.register_interface(dsa.DSAPublicKeyWithSerialization) +class _DSAPublicKey(object): + def __init__(self, backend, dsa_cdata, evp_pkey): + self._backend = backend + self._dsa_cdata = dsa_cdata + self._evp_pkey = evp_pkey + p = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DSA_get0_pqg( + dsa_cdata, p, self._backend._ffi.NULL, self._backend._ffi.NULL + ) + self._backend.openssl_assert(p[0] != backend._ffi.NULL) + self._key_size = self._backend._lib.BN_num_bits(p[0]) + + key_size = utils.read_only_property("_key_size") + + def verifier(self, signature, signature_algorithm): + if not isinstance(signature, bytes): + raise TypeError("signature must be bytes.") + + return _DSAVerificationContext( + self._backend, self, signature, signature_algorithm + ) + + def public_numbers(self): + p = self._backend._ffi.new("BIGNUM **") + q = self._backend._ffi.new("BIGNUM **") + g = self._backend._ffi.new("BIGNUM **") + pub_key = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g) + self._backend.openssl_assert(p[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(q[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(g[0] != self._backend._ffi.NULL) + self._backend._lib.DSA_get0_key( + self._dsa_cdata, pub_key, self._backend._ffi.NULL + ) + self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL) + return dsa.DSAPublicNumbers( + parameter_numbers=dsa.DSAParameterNumbers( + p=self._backend._bn_to_int(p[0]), + q=self._backend._bn_to_int(q[0]), + g=self._backend._bn_to_int(g[0]) + ), + y=self._backend._bn_to_int(pub_key[0]) + ) + + def parameters(self): + dsa_cdata = self._backend._lib.DSA_new() + self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL) + dsa_cdata = self._backend._ffi.gc( + dsa_cdata, self._backend._lib.DSA_free + ) + p = self._backend._ffi.new("BIGNUM **") + q = self._backend._ffi.new("BIGNUM **") + g = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g) + self._backend.openssl_assert(p[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(q[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(g[0] != self._backend._ffi.NULL) + p_dup = self._backend._lib.BN_dup(p[0]) + q_dup = self._backend._lib.BN_dup(q[0]) + g_dup = self._backend._lib.BN_dup(g[0]) + res = self._backend._lib.DSA_set0_pqg(dsa_cdata, p_dup, q_dup, g_dup) + self._backend.openssl_assert(res == 1) + return _DSAParameters(self._backend, dsa_cdata) + + def public_bytes(self, encoding, format): + if format is serialization.PublicFormat.PKCS1: + raise ValueError( + "DSA public keys do not support PKCS1 serialization" + ) + + return self._backend._public_key_bytes( + encoding, + format, + self, + self._evp_pkey, + None + ) diff --git a/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/ec.py b/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/ec.py new file mode 100644 index 0000000..2f47603 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/ec.py @@ -0,0 +1,305 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import ( + InvalidSignature, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.openssl.utils import _truncate_digest +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import ( + AsymmetricSignatureContext, AsymmetricVerificationContext, ec +) + + +def _truncate_digest_for_ecdsa(ec_key_cdata, digest, backend): + """ + This function truncates digests that are longer than a given elliptic + curve key's length so they can be signed. Since elliptic curve keys are + much shorter than RSA keys many digests (e.g. SHA-512) may require + truncation. + """ + + _lib = backend._lib + _ffi = backend._ffi + + group = _lib.EC_KEY_get0_group(ec_key_cdata) + + with backend._tmp_bn_ctx() as bn_ctx: + order = _lib.BN_CTX_get(bn_ctx) + backend.openssl_assert(order != _ffi.NULL) + + res = _lib.EC_GROUP_get_order(group, order, bn_ctx) + backend.openssl_assert(res == 1) + + order_bits = _lib.BN_num_bits(order) + + return _truncate_digest(digest, order_bits) + + +def _ec_key_curve_sn(backend, ec_key): + group = backend._lib.EC_KEY_get0_group(ec_key) + backend.openssl_assert(group != backend._ffi.NULL) + + nid = backend._lib.EC_GROUP_get_curve_name(group) + # The following check is to find EC keys with unnamed curves and raise + # an error for now. + if nid == backend._lib.NID_undef: + raise NotImplementedError( + "ECDSA certificates with unnamed curves are unsupported " + "at this time" + ) + + curve_name = backend._lib.OBJ_nid2sn(nid) + backend.openssl_assert(curve_name != backend._ffi.NULL) + + sn = backend._ffi.string(curve_name).decode('ascii') + return sn + + +def _mark_asn1_named_ec_curve(backend, ec_cdata): + """ + Set the named curve flag on the EC_KEY. This causes OpenSSL to + serialize EC keys along with their curve OID which makes + deserialization easier. + """ + + backend._lib.EC_KEY_set_asn1_flag( + ec_cdata, backend._lib.OPENSSL_EC_NAMED_CURVE + ) + + +def _sn_to_elliptic_curve(backend, sn): + try: + return ec._CURVE_TYPES[sn]() + except KeyError: + raise UnsupportedAlgorithm( + "{0} is not a supported elliptic curve".format(sn), + _Reasons.UNSUPPORTED_ELLIPTIC_CURVE + ) + + +@utils.register_interface(AsymmetricSignatureContext) +class _ECDSASignatureContext(object): + def __init__(self, backend, private_key, algorithm): + self._backend = backend + self._private_key = private_key + self._digest = hashes.Hash(algorithm, backend) + + def update(self, data): + self._digest.update(data) + + def finalize(self): + ec_key = self._private_key._ec_key + + digest = self._digest.finalize() + + digest = _truncate_digest_for_ecdsa(ec_key, digest, self._backend) + + max_size = self._backend._lib.ECDSA_size(ec_key) + self._backend.openssl_assert(max_size > 0) + + sigbuf = self._backend._ffi.new("char[]", max_size) + siglen_ptr = self._backend._ffi.new("unsigned int[]", 1) + res = self._backend._lib.ECDSA_sign( + 0, + digest, + len(digest), + sigbuf, + siglen_ptr, + ec_key + ) + self._backend.openssl_assert(res == 1) + return self._backend._ffi.buffer(sigbuf)[:siglen_ptr[0]] + + +@utils.register_interface(AsymmetricVerificationContext) +class _ECDSAVerificationContext(object): + def __init__(self, backend, public_key, signature, algorithm): + self._backend = backend + self._public_key = public_key + self._signature = signature + self._digest = hashes.Hash(algorithm, backend) + + def update(self, data): + self._digest.update(data) + + def verify(self): + ec_key = self._public_key._ec_key + + digest = self._digest.finalize() + + digest = _truncate_digest_for_ecdsa(ec_key, digest, self._backend) + + res = self._backend._lib.ECDSA_verify( + 0, + digest, + len(digest), + self._signature, + len(self._signature), + ec_key + ) + if res != 1: + self._backend._consume_errors() + raise InvalidSignature + return True + + +@utils.register_interface(ec.EllipticCurvePrivateKeyWithSerialization) +class _EllipticCurvePrivateKey(object): + def __init__(self, backend, ec_key_cdata, evp_pkey): + self._backend = backend + _mark_asn1_named_ec_curve(backend, ec_key_cdata) + self._ec_key = ec_key_cdata + self._evp_pkey = evp_pkey + + sn = _ec_key_curve_sn(backend, ec_key_cdata) + self._curve = _sn_to_elliptic_curve(backend, sn) + + curve = utils.read_only_property("_curve") + + def signer(self, signature_algorithm): + if isinstance(signature_algorithm, ec.ECDSA): + return _ECDSASignatureContext( + self._backend, self, signature_algorithm.algorithm + ) + else: + raise UnsupportedAlgorithm( + "Unsupported elliptic curve signature algorithm.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def exchange(self, algorithm, peer_public_key): + if not ( + self._backend.elliptic_curve_exchange_algorithm_supported( + algorithm, self.curve + ) + ): + raise UnsupportedAlgorithm( + "This backend does not support the ECDH algorithm.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM + ) + + if peer_public_key.curve.name != self.curve.name: + raise ValueError( + "peer_public_key and self are not on the same curve" + ) + + group = self._backend._lib.EC_KEY_get0_group(self._ec_key) + z_len = (self._backend._lib.EC_GROUP_get_degree(group) + 7) // 8 + self._backend.openssl_assert(z_len > 0) + z_buf = self._backend._ffi.new("uint8_t[]", z_len) + peer_key = self._backend._lib.EC_KEY_get0_public_key( + peer_public_key._ec_key + ) + + r = self._backend._lib.ECDH_compute_key( + z_buf, z_len, peer_key, self._ec_key, self._backend._ffi.NULL + ) + self._backend.openssl_assert(r > 0) + return self._backend._ffi.buffer(z_buf)[:z_len] + + def public_key(self): + group = self._backend._lib.EC_KEY_get0_group(self._ec_key) + self._backend.openssl_assert(group != self._backend._ffi.NULL) + + curve_nid = self._backend._lib.EC_GROUP_get_curve_name(group) + + public_ec_key = self._backend._lib.EC_KEY_new_by_curve_name(curve_nid) + self._backend.openssl_assert(public_ec_key != self._backend._ffi.NULL) + public_ec_key = self._backend._ffi.gc( + public_ec_key, self._backend._lib.EC_KEY_free + ) + + point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key) + self._backend.openssl_assert(point != self._backend._ffi.NULL) + + res = self._backend._lib.EC_KEY_set_public_key(public_ec_key, point) + self._backend.openssl_assert(res == 1) + + evp_pkey = self._backend._ec_cdata_to_evp_pkey(public_ec_key) + + return _EllipticCurvePublicKey(self._backend, public_ec_key, evp_pkey) + + def private_numbers(self): + bn = self._backend._lib.EC_KEY_get0_private_key(self._ec_key) + private_value = self._backend._bn_to_int(bn) + return ec.EllipticCurvePrivateNumbers( + private_value=private_value, + public_numbers=self.public_key().public_numbers() + ) + + def private_bytes(self, encoding, format, encryption_algorithm): + return self._backend._private_key_bytes( + encoding, + format, + encryption_algorithm, + self._evp_pkey, + self._ec_key + ) + + +@utils.register_interface(ec.EllipticCurvePublicKeyWithSerialization) +class _EllipticCurvePublicKey(object): + def __init__(self, backend, ec_key_cdata, evp_pkey): + self._backend = backend + _mark_asn1_named_ec_curve(backend, ec_key_cdata) + self._ec_key = ec_key_cdata + self._evp_pkey = evp_pkey + + sn = _ec_key_curve_sn(backend, ec_key_cdata) + self._curve = _sn_to_elliptic_curve(backend, sn) + + curve = utils.read_only_property("_curve") + + def verifier(self, signature, signature_algorithm): + if not isinstance(signature, bytes): + raise TypeError("signature must be bytes.") + + if isinstance(signature_algorithm, ec.ECDSA): + return _ECDSAVerificationContext( + self._backend, self, signature, signature_algorithm.algorithm + ) + else: + raise UnsupportedAlgorithm( + "Unsupported elliptic curve signature algorithm.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def public_numbers(self): + set_func, get_func, group = ( + self._backend._ec_key_determine_group_get_set_funcs(self._ec_key) + ) + point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key) + self._backend.openssl_assert(point != self._backend._ffi.NULL) + + with self._backend._tmp_bn_ctx() as bn_ctx: + bn_x = self._backend._lib.BN_CTX_get(bn_ctx) + bn_y = self._backend._lib.BN_CTX_get(bn_ctx) + + res = get_func(group, point, bn_x, bn_y, bn_ctx) + self._backend.openssl_assert(res == 1) + + x = self._backend._bn_to_int(bn_x) + y = self._backend._bn_to_int(bn_y) + + return ec.EllipticCurvePublicNumbers( + x=x, + y=y, + curve=self._curve + ) + + def public_bytes(self, encoding, format): + if format is serialization.PublicFormat.PKCS1: + raise ValueError( + "EC public keys do not support PKCS1 serialization" + ) + + return self._backend._public_key_bytes( + encoding, + format, + self, + self._evp_pkey, + None + ) diff --git a/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/encode_asn1.py b/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/encode_asn1.py new file mode 100644 index 0000000..b0e2e73 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/encode_asn1.py @@ -0,0 +1,592 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import calendar + +import idna + +import six + +from cryptography import x509 +from cryptography.hazmat.backends.openssl.decode_asn1 import ( + _CRL_ENTRY_REASON_ENUM_TO_CODE, _DISTPOINT_TYPE_FULLNAME, + _DISTPOINT_TYPE_RELATIVENAME +) +from cryptography.x509.oid import CRLEntryExtensionOID, ExtensionOID, NameOID + + +def _encode_asn1_int(backend, x): + """ + Converts a python integer to an ASN1_INTEGER. The returned ASN1_INTEGER + will not be garbage collected (to support adding them to structs that take + ownership of the object). Be sure to register it for GC if it will be + discarded after use. + + """ + # Convert Python integer to OpenSSL "bignum" in case value exceeds + # machine's native integer limits (note: `int_to_bn` doesn't automatically + # GC). + i = backend._int_to_bn(x) + i = backend._ffi.gc(i, backend._lib.BN_free) + + # Wrap in an ASN.1 integer. Don't GC -- as documented. + i = backend._lib.BN_to_ASN1_INTEGER(i, backend._ffi.NULL) + backend.openssl_assert(i != backend._ffi.NULL) + return i + + +def _encode_asn1_int_gc(backend, x): + i = _encode_asn1_int(backend, x) + i = backend._ffi.gc(i, backend._lib.ASN1_INTEGER_free) + return i + + +def _encode_asn1_str(backend, data, length): + """ + Create an ASN1_OCTET_STRING from a Python byte string. + """ + s = backend._lib.ASN1_OCTET_STRING_new() + res = backend._lib.ASN1_OCTET_STRING_set(s, data, length) + backend.openssl_assert(res == 1) + return s + + +def _encode_asn1_utf8_str(backend, string): + """ + Create an ASN1_UTF8STRING from a Python unicode string. + This object will be an ASN1_STRING with UTF8 type in OpenSSL and + can be decoded with ASN1_STRING_to_UTF8. + """ + s = backend._lib.ASN1_UTF8STRING_new() + res = backend._lib.ASN1_STRING_set( + s, string.encode("utf8"), len(string.encode("utf8")) + ) + backend.openssl_assert(res == 1) + return s + + +def _encode_asn1_str_gc(backend, data, length): + s = _encode_asn1_str(backend, data, length) + s = backend._ffi.gc(s, backend._lib.ASN1_OCTET_STRING_free) + return s + + +def _encode_inhibit_any_policy(backend, inhibit_any_policy): + return _encode_asn1_int_gc(backend, inhibit_any_policy.skip_certs) + + +def _encode_name(backend, attributes): + """ + The X509_NAME created will not be gc'd. Use _encode_name_gc if needed. + """ + subject = backend._lib.X509_NAME_new() + for attribute in attributes: + name_entry = _encode_name_entry(backend, attribute) + res = backend._lib.X509_NAME_add_entry(subject, name_entry, -1, 0) + backend.openssl_assert(res == 1) + return subject + + +def _encode_name_gc(backend, attributes): + subject = _encode_name(backend, attributes) + subject = backend._ffi.gc(subject, backend._lib.X509_NAME_free) + return subject + + +def _encode_sk_name_entry(backend, attributes): + """ + The sk_X50_NAME_ENTRY created will not be gc'd. + """ + stack = backend._lib.sk_X509_NAME_ENTRY_new_null() + for attribute in attributes: + name_entry = _encode_name_entry(backend, attribute) + res = backend._lib.sk_X509_NAME_ENTRY_push(stack, name_entry) + backend.openssl_assert(res == 1) + return stack + + +def _encode_name_entry(backend, attribute): + value = attribute.value.encode('utf8') + obj = _txt2obj_gc(backend, attribute.oid.dotted_string) + if attribute.oid == NameOID.COUNTRY_NAME: + # Per RFC5280 Appendix A.1 countryName should be encoded as + # PrintableString, not UTF8String + type = backend._lib.MBSTRING_ASC + else: + type = backend._lib.MBSTRING_UTF8 + name_entry = backend._lib.X509_NAME_ENTRY_create_by_OBJ( + backend._ffi.NULL, obj, type, value, -1 + ) + return name_entry + + +def _encode_crl_number(backend, crl_number): + return _encode_asn1_int_gc(backend, crl_number.crl_number) + + +def _encode_crl_reason(backend, crl_reason): + asn1enum = backend._lib.ASN1_ENUMERATED_new() + backend.openssl_assert(asn1enum != backend._ffi.NULL) + asn1enum = backend._ffi.gc(asn1enum, backend._lib.ASN1_ENUMERATED_free) + res = backend._lib.ASN1_ENUMERATED_set( + asn1enum, _CRL_ENTRY_REASON_ENUM_TO_CODE[crl_reason.reason] + ) + backend.openssl_assert(res == 1) + + return asn1enum + + +def _encode_invalidity_date(backend, invalidity_date): + time = backend._lib.ASN1_GENERALIZEDTIME_set( + backend._ffi.NULL, calendar.timegm( + invalidity_date.invalidity_date.timetuple() + ) + ) + backend.openssl_assert(time != backend._ffi.NULL) + time = backend._ffi.gc(time, backend._lib.ASN1_GENERALIZEDTIME_free) + + return time + + +def _encode_certificate_policies(backend, certificate_policies): + cp = backend._lib.sk_POLICYINFO_new_null() + backend.openssl_assert(cp != backend._ffi.NULL) + cp = backend._ffi.gc(cp, backend._lib.sk_POLICYINFO_free) + for policy_info in certificate_policies: + pi = backend._lib.POLICYINFO_new() + backend.openssl_assert(pi != backend._ffi.NULL) + res = backend._lib.sk_POLICYINFO_push(cp, pi) + backend.openssl_assert(res >= 1) + oid = _txt2obj(backend, policy_info.policy_identifier.dotted_string) + pi.policyid = oid + if policy_info.policy_qualifiers: + pqis = backend._lib.sk_POLICYQUALINFO_new_null() + backend.openssl_assert(pqis != backend._ffi.NULL) + for qualifier in policy_info.policy_qualifiers: + pqi = backend._lib.POLICYQUALINFO_new() + backend.openssl_assert(pqi != backend._ffi.NULL) + res = backend._lib.sk_POLICYQUALINFO_push(pqis, pqi) + backend.openssl_assert(res >= 1) + if isinstance(qualifier, six.text_type): + pqi.pqualid = _txt2obj( + backend, x509.OID_CPS_QUALIFIER.dotted_string + ) + pqi.d.cpsuri = _encode_asn1_str( + backend, + qualifier.encode("ascii"), + len(qualifier.encode("ascii")) + ) + else: + assert isinstance(qualifier, x509.UserNotice) + pqi.pqualid = _txt2obj( + backend, x509.OID_CPS_USER_NOTICE.dotted_string + ) + un = backend._lib.USERNOTICE_new() + backend.openssl_assert(un != backend._ffi.NULL) + pqi.d.usernotice = un + if qualifier.explicit_text: + un.exptext = _encode_asn1_utf8_str( + backend, qualifier.explicit_text + ) + + un.noticeref = _encode_notice_reference( + backend, qualifier.notice_reference + ) + + pi.qualifiers = pqis + + return cp + + +def _encode_notice_reference(backend, notice): + if notice is None: + return backend._ffi.NULL + else: + nr = backend._lib.NOTICEREF_new() + backend.openssl_assert(nr != backend._ffi.NULL) + # organization is a required field + nr.organization = _encode_asn1_utf8_str(backend, notice.organization) + + notice_stack = backend._lib.sk_ASN1_INTEGER_new_null() + nr.noticenos = notice_stack + for number in notice.notice_numbers: + num = _encode_asn1_int(backend, number) + res = backend._lib.sk_ASN1_INTEGER_push(notice_stack, num) + backend.openssl_assert(res >= 1) + + return nr + + +def _txt2obj(backend, name): + """ + Converts a Python string with an ASN.1 object ID in dotted form to a + ASN1_OBJECT. + """ + name = name.encode('ascii') + obj = backend._lib.OBJ_txt2obj(name, 1) + backend.openssl_assert(obj != backend._ffi.NULL) + return obj + + +def _txt2obj_gc(backend, name): + obj = _txt2obj(backend, name) + obj = backend._ffi.gc(obj, backend._lib.ASN1_OBJECT_free) + return obj + + +def _encode_ocsp_nocheck(backend, ext): + """ + The OCSP No Check extension is defined as a null ASN.1 value embedded in + an ASN.1 string. + """ + return _encode_asn1_str_gc(backend, b"\x05\x00", 2) + + +def _encode_key_usage(backend, key_usage): + set_bit = backend._lib.ASN1_BIT_STRING_set_bit + ku = backend._lib.ASN1_BIT_STRING_new() + ku = backend._ffi.gc(ku, backend._lib.ASN1_BIT_STRING_free) + res = set_bit(ku, 0, key_usage.digital_signature) + backend.openssl_assert(res == 1) + res = set_bit(ku, 1, key_usage.content_commitment) + backend.openssl_assert(res == 1) + res = set_bit(ku, 2, key_usage.key_encipherment) + backend.openssl_assert(res == 1) + res = set_bit(ku, 3, key_usage.data_encipherment) + backend.openssl_assert(res == 1) + res = set_bit(ku, 4, key_usage.key_agreement) + backend.openssl_assert(res == 1) + res = set_bit(ku, 5, key_usage.key_cert_sign) + backend.openssl_assert(res == 1) + res = set_bit(ku, 6, key_usage.crl_sign) + backend.openssl_assert(res == 1) + if key_usage.key_agreement: + res = set_bit(ku, 7, key_usage.encipher_only) + backend.openssl_assert(res == 1) + res = set_bit(ku, 8, key_usage.decipher_only) + backend.openssl_assert(res == 1) + else: + res = set_bit(ku, 7, 0) + backend.openssl_assert(res == 1) + res = set_bit(ku, 8, 0) + backend.openssl_assert(res == 1) + + return ku + + +def _encode_authority_key_identifier(backend, authority_keyid): + akid = backend._lib.AUTHORITY_KEYID_new() + backend.openssl_assert(akid != backend._ffi.NULL) + akid = backend._ffi.gc(akid, backend._lib.AUTHORITY_KEYID_free) + if authority_keyid.key_identifier is not None: + akid.keyid = _encode_asn1_str( + backend, + authority_keyid.key_identifier, + len(authority_keyid.key_identifier) + ) + + if authority_keyid.authority_cert_issuer is not None: + akid.issuer = _encode_general_names( + backend, authority_keyid.authority_cert_issuer + ) + + if authority_keyid.authority_cert_serial_number is not None: + akid.serial = _encode_asn1_int( + backend, authority_keyid.authority_cert_serial_number + ) + + return akid + + +def _encode_basic_constraints(backend, basic_constraints): + constraints = backend._lib.BASIC_CONSTRAINTS_new() + constraints = backend._ffi.gc( + constraints, backend._lib.BASIC_CONSTRAINTS_free + ) + constraints.ca = 255 if basic_constraints.ca else 0 + if basic_constraints.ca and basic_constraints.path_length is not None: + constraints.pathlen = _encode_asn1_int( + backend, basic_constraints.path_length + ) + + return constraints + + +def _encode_authority_information_access(backend, authority_info_access): + aia = backend._lib.sk_ACCESS_DESCRIPTION_new_null() + backend.openssl_assert(aia != backend._ffi.NULL) + aia = backend._ffi.gc( + aia, backend._lib.sk_ACCESS_DESCRIPTION_free + ) + for access_description in authority_info_access: + ad = backend._lib.ACCESS_DESCRIPTION_new() + method = _txt2obj( + backend, access_description.access_method.dotted_string + ) + gn = _encode_general_name(backend, access_description.access_location) + ad.method = method + ad.location = gn + res = backend._lib.sk_ACCESS_DESCRIPTION_push(aia, ad) + backend.openssl_assert(res >= 1) + + return aia + + +def _encode_general_names(backend, names): + general_names = backend._lib.GENERAL_NAMES_new() + backend.openssl_assert(general_names != backend._ffi.NULL) + for name in names: + gn = _encode_general_name(backend, name) + res = backend._lib.sk_GENERAL_NAME_push(general_names, gn) + backend.openssl_assert(res != 0) + + return general_names + + +def _encode_alt_name(backend, san): + general_names = _encode_general_names(backend, san) + general_names = backend._ffi.gc( + general_names, backend._lib.GENERAL_NAMES_free + ) + return general_names + + +def _encode_subject_key_identifier(backend, ski): + return _encode_asn1_str_gc(backend, ski.digest, len(ski.digest)) + + +def _encode_general_name(backend, name): + if isinstance(name, x509.DNSName): + gn = backend._lib.GENERAL_NAME_new() + backend.openssl_assert(gn != backend._ffi.NULL) + gn.type = backend._lib.GEN_DNS + + ia5 = backend._lib.ASN1_IA5STRING_new() + backend.openssl_assert(ia5 != backend._ffi.NULL) + + if name.value.startswith(u"*."): + value = b"*." + idna.encode(name.value[2:]) + else: + value = idna.encode(name.value) + + res = backend._lib.ASN1_STRING_set(ia5, value, len(value)) + backend.openssl_assert(res == 1) + gn.d.dNSName = ia5 + elif isinstance(name, x509.RegisteredID): + gn = backend._lib.GENERAL_NAME_new() + backend.openssl_assert(gn != backend._ffi.NULL) + gn.type = backend._lib.GEN_RID + obj = backend._lib.OBJ_txt2obj( + name.value.dotted_string.encode('ascii'), 1 + ) + backend.openssl_assert(obj != backend._ffi.NULL) + gn.d.registeredID = obj + elif isinstance(name, x509.DirectoryName): + gn = backend._lib.GENERAL_NAME_new() + backend.openssl_assert(gn != backend._ffi.NULL) + dir_name = _encode_name(backend, name.value) + gn.type = backend._lib.GEN_DIRNAME + gn.d.directoryName = dir_name + elif isinstance(name, x509.IPAddress): + gn = backend._lib.GENERAL_NAME_new() + backend.openssl_assert(gn != backend._ffi.NULL) + ipaddr = _encode_asn1_str( + backend, name.value.packed, len(name.value.packed) + ) + gn.type = backend._lib.GEN_IPADD + gn.d.iPAddress = ipaddr + elif isinstance(name, x509.OtherName): + gn = backend._lib.GENERAL_NAME_new() + backend.openssl_assert(gn != backend._ffi.NULL) + other_name = backend._lib.OTHERNAME_new() + backend.openssl_assert(other_name != backend._ffi.NULL) + + type_id = backend._lib.OBJ_txt2obj( + name.type_id.dotted_string.encode('ascii'), 1 + ) + backend.openssl_assert(type_id != backend._ffi.NULL) + data = backend._ffi.new("unsigned char[]", name.value) + data_ptr_ptr = backend._ffi.new("unsigned char **") + data_ptr_ptr[0] = data + value = backend._lib.d2i_ASN1_TYPE( + backend._ffi.NULL, data_ptr_ptr, len(name.value) + ) + if value == backend._ffi.NULL: + backend._consume_errors() + raise ValueError("Invalid ASN.1 data") + other_name.type_id = type_id + other_name.value = value + gn.type = backend._lib.GEN_OTHERNAME + gn.d.otherName = other_name + elif isinstance(name, x509.RFC822Name): + gn = backend._lib.GENERAL_NAME_new() + backend.openssl_assert(gn != backend._ffi.NULL) + asn1_str = _encode_asn1_str( + backend, name._encoded, len(name._encoded) + ) + gn.type = backend._lib.GEN_EMAIL + gn.d.rfc822Name = asn1_str + elif isinstance(name, x509.UniformResourceIdentifier): + gn = backend._lib.GENERAL_NAME_new() + backend.openssl_assert(gn != backend._ffi.NULL) + asn1_str = _encode_asn1_str( + backend, name._encoded, len(name._encoded) + ) + gn.type = backend._lib.GEN_URI + gn.d.uniformResourceIdentifier = asn1_str + else: + raise ValueError( + "{0} is an unknown GeneralName type".format(name) + ) + + return gn + + +def _encode_extended_key_usage(backend, extended_key_usage): + eku = backend._lib.sk_ASN1_OBJECT_new_null() + eku = backend._ffi.gc(eku, backend._lib.sk_ASN1_OBJECT_free) + for oid in extended_key_usage: + obj = _txt2obj(backend, oid.dotted_string) + res = backend._lib.sk_ASN1_OBJECT_push(eku, obj) + backend.openssl_assert(res >= 1) + + return eku + + +_CRLREASONFLAGS = { + x509.ReasonFlags.key_compromise: 1, + x509.ReasonFlags.ca_compromise: 2, + x509.ReasonFlags.affiliation_changed: 3, + x509.ReasonFlags.superseded: 4, + x509.ReasonFlags.cessation_of_operation: 5, + x509.ReasonFlags.certificate_hold: 6, + x509.ReasonFlags.privilege_withdrawn: 7, + x509.ReasonFlags.aa_compromise: 8, +} + + +def _encode_crl_distribution_points(backend, crl_distribution_points): + cdp = backend._lib.sk_DIST_POINT_new_null() + cdp = backend._ffi.gc(cdp, backend._lib.sk_DIST_POINT_free) + for point in crl_distribution_points: + dp = backend._lib.DIST_POINT_new() + backend.openssl_assert(dp != backend._ffi.NULL) + + if point.reasons: + bitmask = backend._lib.ASN1_BIT_STRING_new() + backend.openssl_assert(bitmask != backend._ffi.NULL) + dp.reasons = bitmask + for reason in point.reasons: + res = backend._lib.ASN1_BIT_STRING_set_bit( + bitmask, _CRLREASONFLAGS[reason], 1 + ) + backend.openssl_assert(res == 1) + + if point.full_name: + dpn = backend._lib.DIST_POINT_NAME_new() + backend.openssl_assert(dpn != backend._ffi.NULL) + dpn.type = _DISTPOINT_TYPE_FULLNAME + dpn.name.fullname = _encode_general_names(backend, point.full_name) + dp.distpoint = dpn + + if point.relative_name: + dpn = backend._lib.DIST_POINT_NAME_new() + backend.openssl_assert(dpn != backend._ffi.NULL) + dpn.type = _DISTPOINT_TYPE_RELATIVENAME + relativename = _encode_sk_name_entry(backend, point.relative_name) + backend.openssl_assert(relativename != backend._ffi.NULL) + dpn.name.relativename = relativename + dp.distpoint = dpn + + if point.crl_issuer: + dp.CRLissuer = _encode_general_names(backend, point.crl_issuer) + + res = backend._lib.sk_DIST_POINT_push(cdp, dp) + backend.openssl_assert(res >= 1) + + return cdp + + +def _encode_name_constraints(backend, name_constraints): + nc = backend._lib.NAME_CONSTRAINTS_new() + backend.openssl_assert(nc != backend._ffi.NULL) + nc = backend._ffi.gc(nc, backend._lib.NAME_CONSTRAINTS_free) + permitted = _encode_general_subtree( + backend, name_constraints.permitted_subtrees + ) + nc.permittedSubtrees = permitted + excluded = _encode_general_subtree( + backend, name_constraints.excluded_subtrees + ) + nc.excludedSubtrees = excluded + + return nc + + +def _encode_policy_constraints(backend, policy_constraints): + pc = backend._lib.POLICY_CONSTRAINTS_new() + backend.openssl_assert(pc != backend._ffi.NULL) + pc = backend._ffi.gc(pc, backend._lib.POLICY_CONSTRAINTS_free) + if policy_constraints.require_explicit_policy is not None: + pc.requireExplicitPolicy = _encode_asn1_int( + backend, policy_constraints.require_explicit_policy + ) + + if policy_constraints.inhibit_policy_mapping is not None: + pc.inhibitPolicyMapping = _encode_asn1_int( + backend, policy_constraints.inhibit_policy_mapping + ) + + return pc + + +def _encode_general_subtree(backend, subtrees): + if subtrees is None: + return backend._ffi.NULL + else: + general_subtrees = backend._lib.sk_GENERAL_SUBTREE_new_null() + for name in subtrees: + gs = backend._lib.GENERAL_SUBTREE_new() + gs.base = _encode_general_name(backend, name) + res = backend._lib.sk_GENERAL_SUBTREE_push(general_subtrees, gs) + assert res >= 1 + + return general_subtrees + + +_EXTENSION_ENCODE_HANDLERS = { + ExtensionOID.BASIC_CONSTRAINTS: _encode_basic_constraints, + ExtensionOID.SUBJECT_KEY_IDENTIFIER: _encode_subject_key_identifier, + ExtensionOID.KEY_USAGE: _encode_key_usage, + ExtensionOID.SUBJECT_ALTERNATIVE_NAME: _encode_alt_name, + ExtensionOID.ISSUER_ALTERNATIVE_NAME: _encode_alt_name, + ExtensionOID.EXTENDED_KEY_USAGE: _encode_extended_key_usage, + ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _encode_authority_key_identifier, + ExtensionOID.CERTIFICATE_POLICIES: _encode_certificate_policies, + ExtensionOID.AUTHORITY_INFORMATION_ACCESS: ( + _encode_authority_information_access + ), + ExtensionOID.CRL_DISTRIBUTION_POINTS: _encode_crl_distribution_points, + ExtensionOID.INHIBIT_ANY_POLICY: _encode_inhibit_any_policy, + ExtensionOID.OCSP_NO_CHECK: _encode_ocsp_nocheck, + ExtensionOID.NAME_CONSTRAINTS: _encode_name_constraints, + ExtensionOID.POLICY_CONSTRAINTS: _encode_policy_constraints, +} + +_CRL_EXTENSION_ENCODE_HANDLERS = { + ExtensionOID.ISSUER_ALTERNATIVE_NAME: _encode_alt_name, + ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _encode_authority_key_identifier, + ExtensionOID.AUTHORITY_INFORMATION_ACCESS: ( + _encode_authority_information_access + ), + ExtensionOID.CRL_NUMBER: _encode_crl_number, +} + +_CRL_ENTRY_EXTENSION_ENCODE_HANDLERS = { + CRLEntryExtensionOID.CERTIFICATE_ISSUER: _encode_alt_name, + CRLEntryExtensionOID.CRL_REASON: _encode_crl_reason, + CRLEntryExtensionOID.INVALIDITY_DATE: _encode_invalidity_date, +} diff --git a/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/hashes.py b/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/hashes.py new file mode 100644 index 0000000..2c8fce1 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/hashes.py @@ -0,0 +1,61 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.primitives import hashes + + +@utils.register_interface(hashes.HashContext) +class _HashContext(object): + def __init__(self, backend, algorithm, ctx=None): + self._algorithm = algorithm + + self._backend = backend + + if ctx is None: + ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new() + ctx = self._backend._ffi.gc( + ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free + ) + evp_md = self._backend._lib.EVP_get_digestbyname( + algorithm.name.encode("ascii")) + if evp_md == self._backend._ffi.NULL: + raise UnsupportedAlgorithm( + "{0} is not a supported hash on this backend.".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + res = self._backend._lib.EVP_DigestInit_ex(ctx, evp_md, + self._backend._ffi.NULL) + self._backend.openssl_assert(res != 0) + + self._ctx = ctx + + algorithm = utils.read_only_property("_algorithm") + + def copy(self): + copied_ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new() + copied_ctx = self._backend._ffi.gc( + copied_ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free + ) + res = self._backend._lib.EVP_MD_CTX_copy_ex(copied_ctx, self._ctx) + self._backend.openssl_assert(res != 0) + return _HashContext(self._backend, self.algorithm, ctx=copied_ctx) + + def update(self, data): + res = self._backend._lib.EVP_DigestUpdate(self._ctx, data, len(data)) + self._backend.openssl_assert(res != 0) + + def finalize(self): + buf = self._backend._ffi.new("unsigned char[]", + self._backend._lib.EVP_MAX_MD_SIZE) + outlen = self._backend._ffi.new("unsigned int *") + res = self._backend._lib.EVP_DigestFinal_ex(self._ctx, buf, outlen) + self._backend.openssl_assert(res != 0) + self._backend.openssl_assert(outlen[0] == self.algorithm.digest_size) + return self._backend._ffi.buffer(buf)[:outlen[0]] diff --git a/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/hmac.py b/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/hmac.py new file mode 100644 index 0000000..ab1ad46 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/hmac.py @@ -0,0 +1,80 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + + +from cryptography import utils +from cryptography.exceptions import ( + InvalidSignature, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.primitives import constant_time, hashes, interfaces + + +@utils.register_interface(interfaces.MACContext) +@utils.register_interface(hashes.HashContext) +class _HMACContext(object): + def __init__(self, backend, key, algorithm, ctx=None): + self._algorithm = algorithm + self._backend = backend + + if ctx is None: + ctx = self._backend._lib.Cryptography_HMAC_CTX_new() + self._backend.openssl_assert(ctx != self._backend._ffi.NULL) + ctx = self._backend._ffi.gc( + ctx, self._backend._lib.Cryptography_HMAC_CTX_free + ) + evp_md = self._backend._lib.EVP_get_digestbyname( + algorithm.name.encode('ascii')) + if evp_md == self._backend._ffi.NULL: + raise UnsupportedAlgorithm( + "{0} is not a supported hash on this backend.".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + res = self._backend._lib.Cryptography_HMAC_Init_ex( + ctx, key, len(key), evp_md, self._backend._ffi.NULL + ) + self._backend.openssl_assert(res != 0) + + self._ctx = ctx + self._key = key + + algorithm = utils.read_only_property("_algorithm") + + def copy(self): + copied_ctx = self._backend._lib.Cryptography_HMAC_CTX_new() + self._backend.openssl_assert(copied_ctx != self._backend._ffi.NULL) + copied_ctx = self._backend._ffi.gc( + copied_ctx, self._backend._lib.Cryptography_HMAC_CTX_free + ) + res = self._backend._lib.Cryptography_HMAC_CTX_copy( + copied_ctx, self._ctx + ) + self._backend.openssl_assert(res != 0) + return _HMACContext( + self._backend, self._key, self.algorithm, ctx=copied_ctx + ) + + def update(self, data): + res = self._backend._lib.Cryptography_HMAC_Update( + self._ctx, data, len(data) + ) + self._backend.openssl_assert(res != 0) + + def finalize(self): + buf = self._backend._ffi.new("unsigned char[]", + self._backend._lib.EVP_MAX_MD_SIZE) + outlen = self._backend._ffi.new("unsigned int *") + res = self._backend._lib.Cryptography_HMAC_Final( + self._ctx, buf, outlen + ) + self._backend.openssl_assert(res != 0) + self._backend.openssl_assert(outlen[0] == self.algorithm.digest_size) + return self._backend._ffi.buffer(buf)[:outlen[0]] + + def verify(self, signature): + digest = self.finalize() + if not constant_time.bytes_eq(digest, signature): + raise InvalidSignature("Signature did not match digest.") diff --git a/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/rsa.py b/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/rsa.py new file mode 100644 index 0000000..10c51fe --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/rsa.py @@ -0,0 +1,674 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import math + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, InvalidSignature, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import ( + AsymmetricSignatureContext, AsymmetricVerificationContext, rsa +) +from cryptography.hazmat.primitives.asymmetric.padding import ( + AsymmetricPadding, MGF1, OAEP, PKCS1v15, PSS +) +from cryptography.hazmat.primitives.asymmetric.rsa import ( + RSAPrivateKeyWithSerialization, RSAPublicKeyWithSerialization +) + + +def _get_rsa_pss_salt_length(pss, key_size, digest_size): + salt = pss._salt_length + + if salt is MGF1.MAX_LENGTH or salt is PSS.MAX_LENGTH: + # bit length - 1 per RFC 3447 + emlen = int(math.ceil((key_size - 1) / 8.0)) + salt_length = emlen - digest_size - 2 + assert salt_length >= 0 + return salt_length + else: + return salt + + +def _enc_dec_rsa(backend, key, data, padding): + if not isinstance(padding, AsymmetricPadding): + raise TypeError("Padding must be an instance of AsymmetricPadding.") + + if isinstance(padding, PKCS1v15): + padding_enum = backend._lib.RSA_PKCS1_PADDING + elif isinstance(padding, OAEP): + padding_enum = backend._lib.RSA_PKCS1_OAEP_PADDING + + if not isinstance(padding._mgf, MGF1): + raise UnsupportedAlgorithm( + "Only MGF1 is supported by this backend.", + _Reasons.UNSUPPORTED_MGF + ) + + if not backend.rsa_padding_supported(padding): + raise UnsupportedAlgorithm( + "This combination of padding and hash algorithm is not " + "supported by this backend.", + _Reasons.UNSUPPORTED_PADDING + ) + + if padding._label is not None and padding._label != b"": + raise ValueError("This backend does not support OAEP labels.") + + else: + raise UnsupportedAlgorithm( + "{0} is not supported by this backend.".format( + padding.name + ), + _Reasons.UNSUPPORTED_PADDING + ) + + if backend._lib.Cryptography_HAS_PKEY_CTX: + return _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum, padding) + else: + return _enc_dec_rsa_098(backend, key, data, padding_enum) + + +def _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum, padding): + if isinstance(key, _RSAPublicKey): + init = backend._lib.EVP_PKEY_encrypt_init + crypt = backend._lib.Cryptography_EVP_PKEY_encrypt + else: + init = backend._lib.EVP_PKEY_decrypt_init + crypt = backend._lib.Cryptography_EVP_PKEY_decrypt + + pkey_ctx = backend._lib.EVP_PKEY_CTX_new( + key._evp_pkey, backend._ffi.NULL + ) + backend.openssl_assert(pkey_ctx != backend._ffi.NULL) + pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free) + res = init(pkey_ctx) + backend.openssl_assert(res == 1) + res = backend._lib.EVP_PKEY_CTX_set_rsa_padding( + pkey_ctx, padding_enum) + backend.openssl_assert(res > 0) + buf_size = backend._lib.EVP_PKEY_size(key._evp_pkey) + backend.openssl_assert(buf_size > 0) + if ( + isinstance(padding, OAEP) and + backend._lib.Cryptography_HAS_RSA_OAEP_MD + ): + mgf1_md = backend._lib.EVP_get_digestbyname( + padding._mgf._algorithm.name.encode("ascii")) + backend.openssl_assert(mgf1_md != backend._ffi.NULL) + res = backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(pkey_ctx, mgf1_md) + backend.openssl_assert(res > 0) + oaep_md = backend._lib.EVP_get_digestbyname( + padding._algorithm.name.encode("ascii")) + backend.openssl_assert(oaep_md != backend._ffi.NULL) + res = backend._lib.EVP_PKEY_CTX_set_rsa_oaep_md(pkey_ctx, oaep_md) + backend.openssl_assert(res > 0) + + outlen = backend._ffi.new("size_t *", buf_size) + buf = backend._ffi.new("char[]", buf_size) + res = crypt(pkey_ctx, buf, outlen, data, len(data)) + if res <= 0: + _handle_rsa_enc_dec_error(backend, key) + + return backend._ffi.buffer(buf)[:outlen[0]] + + +def _enc_dec_rsa_098(backend, key, data, padding_enum): + if isinstance(key, _RSAPublicKey): + crypt = backend._lib.RSA_public_encrypt + else: + crypt = backend._lib.RSA_private_decrypt + + key_size = backend._lib.RSA_size(key._rsa_cdata) + backend.openssl_assert(key_size > 0) + buf = backend._ffi.new("unsigned char[]", key_size) + res = crypt(len(data), data, buf, key._rsa_cdata, padding_enum) + if res < 0: + _handle_rsa_enc_dec_error(backend, key) + + return backend._ffi.buffer(buf)[:res] + + +def _handle_rsa_enc_dec_error(backend, key): + errors = backend._consume_errors() + assert errors + assert errors[0].lib == backend._lib.ERR_LIB_RSA + if isinstance(key, _RSAPublicKey): + assert (errors[0].reason == + backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE) + raise ValueError( + "Data too long for key size. Encrypt less data or use a " + "larger key size." + ) + else: + decoding_errors = [ + backend._lib.RSA_R_BLOCK_TYPE_IS_NOT_01, + backend._lib.RSA_R_BLOCK_TYPE_IS_NOT_02, + backend._lib.RSA_R_OAEP_DECODING_ERROR, + # Though this error looks similar to the + # RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE, this occurs on decrypts, + # rather than on encrypts + backend._lib.RSA_R_DATA_TOO_LARGE_FOR_MODULUS, + ] + if backend._lib.Cryptography_HAS_RSA_R_PKCS_DECODING_ERROR: + decoding_errors.append(backend._lib.RSA_R_PKCS_DECODING_ERROR) + + assert errors[0].reason in decoding_errors + raise ValueError("Decryption failed.") + + +@utils.register_interface(AsymmetricSignatureContext) +class _RSASignatureContext(object): + def __init__(self, backend, private_key, padding, algorithm): + self._backend = backend + self._private_key = private_key + + if not isinstance(padding, AsymmetricPadding): + raise TypeError("Expected provider of AsymmetricPadding.") + + self._pkey_size = self._backend._lib.EVP_PKEY_size( + self._private_key._evp_pkey + ) + self._backend.openssl_assert(self._pkey_size > 0) + + if isinstance(padding, PKCS1v15): + if self._backend._lib.Cryptography_HAS_PKEY_CTX: + self._finalize_method = self._finalize_pkey_ctx + self._padding_enum = self._backend._lib.RSA_PKCS1_PADDING + else: + self._finalize_method = self._finalize_pkcs1 + elif isinstance(padding, PSS): + if not isinstance(padding._mgf, MGF1): + raise UnsupportedAlgorithm( + "Only MGF1 is supported by this backend.", + _Reasons.UNSUPPORTED_MGF + ) + + # Size of key in bytes - 2 is the maximum + # PSS signature length (salt length is checked later) + if self._pkey_size - algorithm.digest_size - 2 < 0: + raise ValueError("Digest too large for key size. Use a larger " + "key.") + + if not self._backend._pss_mgf1_hash_supported( + padding._mgf._algorithm + ): + raise UnsupportedAlgorithm( + "When OpenSSL is older than 1.0.1 then only SHA1 is " + "supported with MGF1.", + _Reasons.UNSUPPORTED_HASH + ) + + if self._backend._lib.Cryptography_HAS_PKEY_CTX: + self._finalize_method = self._finalize_pkey_ctx + self._padding_enum = self._backend._lib.RSA_PKCS1_PSS_PADDING + else: + self._finalize_method = self._finalize_pss + else: + raise UnsupportedAlgorithm( + "{0} is not supported by this backend.".format(padding.name), + _Reasons.UNSUPPORTED_PADDING + ) + + self._padding = padding + self._algorithm = algorithm + self._hash_ctx = hashes.Hash(self._algorithm, self._backend) + + def update(self, data): + self._hash_ctx.update(data) + + def finalize(self): + evp_md = self._backend._lib.EVP_get_digestbyname( + self._algorithm.name.encode("ascii")) + self._backend.openssl_assert(evp_md != self._backend._ffi.NULL) + + return self._finalize_method(evp_md) + + def _finalize_pkey_ctx(self, evp_md): + pkey_ctx = self._backend._lib.EVP_PKEY_CTX_new( + self._private_key._evp_pkey, self._backend._ffi.NULL + ) + self._backend.openssl_assert(pkey_ctx != self._backend._ffi.NULL) + pkey_ctx = self._backend._ffi.gc(pkey_ctx, + self._backend._lib.EVP_PKEY_CTX_free) + res = self._backend._lib.EVP_PKEY_sign_init(pkey_ctx) + self._backend.openssl_assert(res == 1) + res = self._backend._lib.EVP_PKEY_CTX_set_signature_md( + pkey_ctx, evp_md) + self._backend.openssl_assert(res > 0) + + res = self._backend._lib.EVP_PKEY_CTX_set_rsa_padding( + pkey_ctx, self._padding_enum) + self._backend.openssl_assert(res > 0) + if isinstance(self._padding, PSS): + res = self._backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen( + pkey_ctx, + _get_rsa_pss_salt_length( + self._padding, + self._private_key.key_size, + self._hash_ctx.algorithm.digest_size + ) + ) + self._backend.openssl_assert(res > 0) + + if self._backend._lib.Cryptography_HAS_MGF1_MD: + # MGF1 MD is configurable in OpenSSL 1.0.1+ + mgf1_md = self._backend._lib.EVP_get_digestbyname( + self._padding._mgf._algorithm.name.encode("ascii")) + self._backend.openssl_assert( + mgf1_md != self._backend._ffi.NULL + ) + res = self._backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md( + pkey_ctx, mgf1_md + ) + self._backend.openssl_assert(res > 0) + data_to_sign = self._hash_ctx.finalize() + buflen = self._backend._ffi.new("size_t *") + res = self._backend._lib.EVP_PKEY_sign( + pkey_ctx, + self._backend._ffi.NULL, + buflen, + data_to_sign, + len(data_to_sign) + ) + self._backend.openssl_assert(res == 1) + buf = self._backend._ffi.new("unsigned char[]", buflen[0]) + res = self._backend._lib.EVP_PKEY_sign( + pkey_ctx, buf, buflen, data_to_sign, len(data_to_sign)) + if res != 1: + errors = self._backend._consume_errors() + assert errors[0].lib == self._backend._lib.ERR_LIB_RSA + reason = None + if (errors[0].reason == + self._backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE): + reason = ("Salt length too long for key size. Try using " + "MAX_LENGTH instead.") + else: + assert (errors[0].reason == + self._backend._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY) + reason = "Digest too large for key size. Use a larger key." + assert reason is not None + raise ValueError(reason) + + return self._backend._ffi.buffer(buf)[:] + + def _finalize_pkcs1(self, evp_md): + if self._hash_ctx._ctx is None: + raise AlreadyFinalized("Context has already been finalized.") + + sig_buf = self._backend._ffi.new("char[]", self._pkey_size) + sig_len = self._backend._ffi.new("unsigned int *") + res = self._backend._lib.EVP_SignFinal( + self._hash_ctx._ctx._ctx, + sig_buf, + sig_len, + self._private_key._evp_pkey + ) + self._hash_ctx.finalize() + if res == 0: + errors = self._backend._consume_errors() + assert errors[0].lib == self._backend._lib.ERR_LIB_RSA + assert (errors[0].reason == + self._backend._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY) + raise ValueError("Digest too large for key size. Use a larger " + "key.") + + return self._backend._ffi.buffer(sig_buf)[:sig_len[0]] + + def _finalize_pss(self, evp_md): + data_to_sign = self._hash_ctx.finalize() + padded = self._backend._ffi.new("unsigned char[]", self._pkey_size) + res = self._backend._lib.RSA_padding_add_PKCS1_PSS( + self._private_key._rsa_cdata, + padded, + data_to_sign, + evp_md, + _get_rsa_pss_salt_length( + self._padding, + self._private_key.key_size, + len(data_to_sign) + ) + ) + if res != 1: + errors = self._backend._consume_errors() + assert errors[0].lib == self._backend._lib.ERR_LIB_RSA + assert (errors[0].reason == + self._backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE) + raise ValueError("Salt length too long for key size. Try using " + "MAX_LENGTH instead.") + + sig_buf = self._backend._ffi.new("char[]", self._pkey_size) + sig_len = self._backend._lib.RSA_private_encrypt( + self._pkey_size, + padded, + sig_buf, + self._private_key._rsa_cdata, + self._backend._lib.RSA_NO_PADDING + ) + self._backend.openssl_assert(sig_len != -1) + return self._backend._ffi.buffer(sig_buf)[:sig_len] + + +@utils.register_interface(AsymmetricVerificationContext) +class _RSAVerificationContext(object): + def __init__(self, backend, public_key, signature, padding, algorithm): + self._backend = backend + self._public_key = public_key + self._signature = signature + + if not isinstance(padding, AsymmetricPadding): + raise TypeError("Expected provider of AsymmetricPadding.") + + self._pkey_size = self._backend._lib.EVP_PKEY_size( + self._public_key._evp_pkey + ) + self._backend.openssl_assert(self._pkey_size > 0) + + if isinstance(padding, PKCS1v15): + if self._backend._lib.Cryptography_HAS_PKEY_CTX: + self._verify_method = self._verify_pkey_ctx + self._padding_enum = self._backend._lib.RSA_PKCS1_PADDING + else: + self._verify_method = self._verify_pkcs1 + elif isinstance(padding, PSS): + if not isinstance(padding._mgf, MGF1): + raise UnsupportedAlgorithm( + "Only MGF1 is supported by this backend.", + _Reasons.UNSUPPORTED_MGF + ) + + # Size of key in bytes - 2 is the maximum + # PSS signature length (salt length is checked later) + if self._pkey_size - algorithm.digest_size - 2 < 0: + raise ValueError( + "Digest too large for key size. Check that you have the " + "correct key and digest algorithm." + ) + + if not self._backend._pss_mgf1_hash_supported( + padding._mgf._algorithm + ): + raise UnsupportedAlgorithm( + "When OpenSSL is older than 1.0.1 then only SHA1 is " + "supported with MGF1.", + _Reasons.UNSUPPORTED_HASH + ) + + if self._backend._lib.Cryptography_HAS_PKEY_CTX: + self._verify_method = self._verify_pkey_ctx + self._padding_enum = self._backend._lib.RSA_PKCS1_PSS_PADDING + else: + self._verify_method = self._verify_pss + else: + raise UnsupportedAlgorithm( + "{0} is not supported by this backend.".format(padding.name), + _Reasons.UNSUPPORTED_PADDING + ) + + self._padding = padding + self._algorithm = algorithm + self._hash_ctx = hashes.Hash(self._algorithm, self._backend) + + def update(self, data): + self._hash_ctx.update(data) + + def verify(self): + evp_md = self._backend._lib.EVP_get_digestbyname( + self._algorithm.name.encode("ascii")) + self._backend.openssl_assert(evp_md != self._backend._ffi.NULL) + + self._verify_method(evp_md) + + def _verify_pkey_ctx(self, evp_md): + pkey_ctx = self._backend._lib.EVP_PKEY_CTX_new( + self._public_key._evp_pkey, self._backend._ffi.NULL + ) + self._backend.openssl_assert(pkey_ctx != self._backend._ffi.NULL) + pkey_ctx = self._backend._ffi.gc(pkey_ctx, + self._backend._lib.EVP_PKEY_CTX_free) + res = self._backend._lib.EVP_PKEY_verify_init(pkey_ctx) + self._backend.openssl_assert(res == 1) + res = self._backend._lib.EVP_PKEY_CTX_set_signature_md( + pkey_ctx, evp_md) + self._backend.openssl_assert(res > 0) + + res = self._backend._lib.EVP_PKEY_CTX_set_rsa_padding( + pkey_ctx, self._padding_enum) + self._backend.openssl_assert(res > 0) + if isinstance(self._padding, PSS): + res = self._backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen( + pkey_ctx, + _get_rsa_pss_salt_length( + self._padding, + self._public_key.key_size, + self._hash_ctx.algorithm.digest_size + ) + ) + self._backend.openssl_assert(res > 0) + if self._backend._lib.Cryptography_HAS_MGF1_MD: + # MGF1 MD is configurable in OpenSSL 1.0.1+ + mgf1_md = self._backend._lib.EVP_get_digestbyname( + self._padding._mgf._algorithm.name.encode("ascii")) + self._backend.openssl_assert( + mgf1_md != self._backend._ffi.NULL + ) + res = self._backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md( + pkey_ctx, mgf1_md + ) + self._backend.openssl_assert(res > 0) + + data_to_verify = self._hash_ctx.finalize() + res = self._backend._lib.EVP_PKEY_verify( + pkey_ctx, + self._signature, + len(self._signature), + data_to_verify, + len(data_to_verify) + ) + # The previous call can return negative numbers in the event of an + # error. This is not a signature failure but we need to fail if it + # occurs. + self._backend.openssl_assert(res >= 0) + if res == 0: + errors = self._backend._consume_errors() + assert errors + raise InvalidSignature + + def _verify_pkcs1(self, evp_md): + if self._hash_ctx._ctx is None: + raise AlreadyFinalized("Context has already been finalized.") + + res = self._backend._lib.EVP_VerifyFinal( + self._hash_ctx._ctx._ctx, + self._signature, + len(self._signature), + self._public_key._evp_pkey + ) + self._hash_ctx.finalize() + # The previous call can return negative numbers in the event of an + # error. This is not a signature failure but we need to fail if it + # occurs. + self._backend.openssl_assert(res >= 0) + if res == 0: + errors = self._backend._consume_errors() + assert errors + raise InvalidSignature + + def _verify_pss(self, evp_md): + buf = self._backend._ffi.new("unsigned char[]", self._pkey_size) + res = self._backend._lib.RSA_public_decrypt( + len(self._signature), + self._signature, + buf, + self._public_key._rsa_cdata, + self._backend._lib.RSA_NO_PADDING + ) + if res != self._pkey_size: + errors = self._backend._consume_errors() + assert errors + raise InvalidSignature + + data_to_verify = self._hash_ctx.finalize() + res = self._backend._lib.RSA_verify_PKCS1_PSS( + self._public_key._rsa_cdata, + data_to_verify, + evp_md, + buf, + _get_rsa_pss_salt_length( + self._padding, + self._public_key.key_size, + len(data_to_verify) + ) + ) + if res != 1: + errors = self._backend._consume_errors() + assert errors + raise InvalidSignature + + +@utils.register_interface(RSAPrivateKeyWithSerialization) +class _RSAPrivateKey(object): + def __init__(self, backend, rsa_cdata, evp_pkey): + self._backend = backend + self._rsa_cdata = rsa_cdata + self._evp_pkey = evp_pkey + + n = self._backend._ffi.new("BIGNUM **") + self._backend._lib.RSA_get0_key( + self._rsa_cdata, n, self._backend._ffi.NULL, + self._backend._ffi.NULL + ) + self._backend.openssl_assert(n[0] != self._backend._ffi.NULL) + self._key_size = self._backend._lib.BN_num_bits(n[0]) + + key_size = utils.read_only_property("_key_size") + + def signer(self, padding, algorithm): + return _RSASignatureContext(self._backend, self, padding, algorithm) + + def decrypt(self, ciphertext, padding): + key_size_bytes = int(math.ceil(self.key_size / 8.0)) + if key_size_bytes != len(ciphertext): + raise ValueError("Ciphertext length must be equal to key size.") + + return _enc_dec_rsa(self._backend, self, ciphertext, padding) + + def public_key(self): + ctx = self._backend._lib.RSAPublicKey_dup(self._rsa_cdata) + self._backend.openssl_assert(ctx != self._backend._ffi.NULL) + ctx = self._backend._ffi.gc(ctx, self._backend._lib.RSA_free) + res = self._backend._lib.RSA_blinding_on(ctx, self._backend._ffi.NULL) + self._backend.openssl_assert(res == 1) + evp_pkey = self._backend._rsa_cdata_to_evp_pkey(ctx) + return _RSAPublicKey(self._backend, ctx, evp_pkey) + + def private_numbers(self): + n = self._backend._ffi.new("BIGNUM **") + e = self._backend._ffi.new("BIGNUM **") + d = self._backend._ffi.new("BIGNUM **") + p = self._backend._ffi.new("BIGNUM **") + q = self._backend._ffi.new("BIGNUM **") + dmp1 = self._backend._ffi.new("BIGNUM **") + dmq1 = self._backend._ffi.new("BIGNUM **") + iqmp = self._backend._ffi.new("BIGNUM **") + self._backend._lib.RSA_get0_key(self._rsa_cdata, n, e, d) + self._backend.openssl_assert(n[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(e[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(d[0] != self._backend._ffi.NULL) + self._backend._lib.RSA_get0_factors(self._rsa_cdata, p, q) + self._backend.openssl_assert(p[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(q[0] != self._backend._ffi.NULL) + self._backend._lib.RSA_get0_crt_params( + self._rsa_cdata, dmp1, dmq1, iqmp + ) + self._backend.openssl_assert(dmp1[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(dmq1[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(iqmp[0] != self._backend._ffi.NULL) + return rsa.RSAPrivateNumbers( + p=self._backend._bn_to_int(p[0]), + q=self._backend._bn_to_int(q[0]), + d=self._backend._bn_to_int(d[0]), + dmp1=self._backend._bn_to_int(dmp1[0]), + dmq1=self._backend._bn_to_int(dmq1[0]), + iqmp=self._backend._bn_to_int(iqmp[0]), + public_numbers=rsa.RSAPublicNumbers( + e=self._backend._bn_to_int(e[0]), + n=self._backend._bn_to_int(n[0]), + ) + ) + + def private_bytes(self, encoding, format, encryption_algorithm): + return self._backend._private_key_bytes( + encoding, + format, + encryption_algorithm, + self._evp_pkey, + self._rsa_cdata + ) + + def sign(self, data, padding, algorithm): + signer = self.signer(padding, algorithm) + signer.update(data) + signature = signer.finalize() + return signature + + +@utils.register_interface(RSAPublicKeyWithSerialization) +class _RSAPublicKey(object): + def __init__(self, backend, rsa_cdata, evp_pkey): + self._backend = backend + self._rsa_cdata = rsa_cdata + self._evp_pkey = evp_pkey + + n = self._backend._ffi.new("BIGNUM **") + self._backend._lib.RSA_get0_key( + self._rsa_cdata, n, self._backend._ffi.NULL, + self._backend._ffi.NULL + ) + self._backend.openssl_assert(n[0] != self._backend._ffi.NULL) + self._key_size = self._backend._lib.BN_num_bits(n[0]) + + key_size = utils.read_only_property("_key_size") + + def verifier(self, signature, padding, algorithm): + if not isinstance(signature, bytes): + raise TypeError("signature must be bytes.") + + return _RSAVerificationContext( + self._backend, self, signature, padding, algorithm + ) + + def encrypt(self, plaintext, padding): + return _enc_dec_rsa(self._backend, self, plaintext, padding) + + def public_numbers(self): + n = self._backend._ffi.new("BIGNUM **") + e = self._backend._ffi.new("BIGNUM **") + self._backend._lib.RSA_get0_key( + self._rsa_cdata, n, e, self._backend._ffi.NULL + ) + self._backend.openssl_assert(n[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(e[0] != self._backend._ffi.NULL) + return rsa.RSAPublicNumbers( + e=self._backend._bn_to_int(e[0]), + n=self._backend._bn_to_int(n[0]), + ) + + def public_bytes(self, encoding, format): + return self._backend._public_key_bytes( + encoding, + format, + self, + self._evp_pkey, + self._rsa_cdata + ) + + def verify(self, signature, data, padding, algorithm): + verifier = self.verifier(signature, padding, algorithm) + verifier.update(data) + verifier.verify() diff --git a/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/utils.py b/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/utils.py new file mode 100644 index 0000000..001121f --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/utils.py @@ -0,0 +1,26 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import six + + +def _truncate_digest(digest, order_bits): + digest_len = len(digest) + + if 8 * digest_len > order_bits: + digest_len = (order_bits + 7) // 8 + digest = digest[:digest_len] + + if 8 * digest_len > order_bits: + rshift = 8 - (order_bits & 0x7) + assert 0 < rshift < 8 + + mask = 0xFF >> rshift << rshift + + # Set the bottom rshift bits to 0 + digest = digest[:-1] + six.int2byte(six.indexbytes(digest, -1) & mask) + + return digest diff --git a/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/x509.py b/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/x509.py new file mode 100644 index 0000000..4851dfc --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/x509.py @@ -0,0 +1,420 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import operator +import warnings + +from cryptography import utils, x509 +from cryptography.exceptions import UnsupportedAlgorithm +from cryptography.hazmat.backends.openssl.decode_asn1 import ( + _CERTIFICATE_EXTENSION_PARSER, _CRL_EXTENSION_PARSER, + _CSR_EXTENSION_PARSER, _REVOKED_CERTIFICATE_EXTENSION_PARSER, + _asn1_integer_to_int, _asn1_string_to_bytes, _decode_x509_name, _obj2txt, + _parse_asn1_time +) +from cryptography.hazmat.primitives import hashes, serialization + + +@utils.register_interface(x509.Certificate) +class _Certificate(object): + def __init__(self, backend, x509): + self._backend = backend + self._x509 = x509 + + def __repr__(self): + return "".format(self.subject) + + def __eq__(self, other): + if not isinstance(other, x509.Certificate): + return NotImplemented + + res = self._backend._lib.X509_cmp(self._x509, other._x509) + return res == 0 + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.public_bytes(serialization.Encoding.DER)) + + def fingerprint(self, algorithm): + h = hashes.Hash(algorithm, self._backend) + h.update(self.public_bytes(serialization.Encoding.DER)) + return h.finalize() + + @property + def version(self): + version = self._backend._lib.X509_get_version(self._x509) + if version == 0: + return x509.Version.v1 + elif version == 2: + return x509.Version.v3 + else: + raise x509.InvalidVersion( + "{0} is not a valid X509 version".format(version), version + ) + + @property + def serial(self): + warnings.warn( + "Certificate serial is deprecated, use serial_number instead.", + utils.DeprecatedIn14, + stacklevel=2 + ) + return self.serial_number + + @property + def serial_number(self): + asn1_int = self._backend._lib.X509_get_serialNumber(self._x509) + self._backend.openssl_assert(asn1_int != self._backend._ffi.NULL) + return _asn1_integer_to_int(self._backend, asn1_int) + + def public_key(self): + pkey = self._backend._lib.X509_get_pubkey(self._x509) + if pkey == self._backend._ffi.NULL: + # Remove errors from the stack. + self._backend._consume_errors() + raise ValueError("Certificate public key is of an unknown type") + + pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free) + + return self._backend._evp_pkey_to_public_key(pkey) + + @property + def not_valid_before(self): + asn1_time = self._backend._lib.X509_get_notBefore(self._x509) + return _parse_asn1_time(self._backend, asn1_time) + + @property + def not_valid_after(self): + asn1_time = self._backend._lib.X509_get_notAfter(self._x509) + return _parse_asn1_time(self._backend, asn1_time) + + @property + def issuer(self): + issuer = self._backend._lib.X509_get_issuer_name(self._x509) + self._backend.openssl_assert(issuer != self._backend._ffi.NULL) + return _decode_x509_name(self._backend, issuer) + + @property + def subject(self): + subject = self._backend._lib.X509_get_subject_name(self._x509) + self._backend.openssl_assert(subject != self._backend._ffi.NULL) + return _decode_x509_name(self._backend, subject) + + @property + def signature_hash_algorithm(self): + alg = self._backend._ffi.new("X509_ALGOR **") + self._backend._lib.X509_get0_signature( + self._backend._ffi.NULL, alg, self._x509 + ) + self._backend.openssl_assert(alg[0] != self._backend._ffi.NULL) + oid = _obj2txt(self._backend, alg[0].algorithm) + try: + return x509._SIG_OIDS_TO_HASH[oid] + except KeyError: + raise UnsupportedAlgorithm( + "Signature algorithm OID:{0} not recognized".format(oid) + ) + + @property + def extensions(self): + return _CERTIFICATE_EXTENSION_PARSER.parse(self._backend, self._x509) + + @property + def signature(self): + sig = self._backend._ffi.new("ASN1_BIT_STRING **") + self._backend._lib.X509_get0_signature( + sig, self._backend._ffi.NULL, self._x509 + ) + self._backend.openssl_assert(sig[0] != self._backend._ffi.NULL) + return _asn1_string_to_bytes(self._backend, sig[0]) + + @property + def tbs_certificate_bytes(self): + pp = self._backend._ffi.new("unsigned char **") + res = self._backend._lib.i2d_re_X509_tbs(self._x509, pp) + self._backend.openssl_assert(res > 0) + pp = self._backend._ffi.gc( + pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0]) + ) + return self._backend._ffi.buffer(pp[0], res)[:] + + def public_bytes(self, encoding): + bio = self._backend._create_mem_bio_gc() + if encoding is serialization.Encoding.PEM: + res = self._backend._lib.PEM_write_bio_X509(bio, self._x509) + elif encoding is serialization.Encoding.DER: + res = self._backend._lib.i2d_X509_bio(bio, self._x509) + else: + raise TypeError("encoding must be an item from the Encoding enum") + + self._backend.openssl_assert(res == 1) + return self._backend._read_mem_bio(bio) + + +@utils.register_interface(x509.RevokedCertificate) +class _RevokedCertificate(object): + def __init__(self, backend, crl, x509_revoked): + self._backend = backend + # The X509_REVOKED_value is a X509_REVOKED * that has + # no reference counting. This means when X509_CRL_free is + # called then the CRL and all X509_REVOKED * are freed. Since + # you can retain a reference to a single revoked certificate + # and let the CRL fall out of scope we need to retain a + # private reference to the CRL inside the RevokedCertificate + # object to prevent the gc from being called inappropriately. + self._crl = crl + self._x509_revoked = x509_revoked + + @property + def serial_number(self): + asn1_int = self._backend._lib.X509_REVOKED_get0_serialNumber( + self._x509_revoked + ) + self._backend.openssl_assert(asn1_int != self._backend._ffi.NULL) + return _asn1_integer_to_int(self._backend, asn1_int) + + @property + def revocation_date(self): + return _parse_asn1_time( + self._backend, + self._backend._lib.X509_REVOKED_get0_revocationDate( + self._x509_revoked + ) + ) + + @property + def extensions(self): + return _REVOKED_CERTIFICATE_EXTENSION_PARSER.parse( + self._backend, self._x509_revoked + ) + + +@utils.register_interface(x509.CertificateRevocationList) +class _CertificateRevocationList(object): + def __init__(self, backend, x509_crl): + self._backend = backend + self._x509_crl = x509_crl + + def __eq__(self, other): + if not isinstance(other, x509.CertificateRevocationList): + return NotImplemented + + res = self._backend._lib.X509_CRL_cmp(self._x509_crl, other._x509_crl) + return res == 0 + + def __ne__(self, other): + return not self == other + + def fingerprint(self, algorithm): + h = hashes.Hash(algorithm, self._backend) + bio = self._backend._create_mem_bio_gc() + res = self._backend._lib.i2d_X509_CRL_bio( + bio, self._x509_crl + ) + self._backend.openssl_assert(res == 1) + der = self._backend._read_mem_bio(bio) + h.update(der) + return h.finalize() + + @property + def signature_hash_algorithm(self): + alg = self._backend._ffi.new("X509_ALGOR **") + self._backend._lib.X509_CRL_get0_signature( + self._backend._ffi.NULL, alg, self._x509_crl + ) + self._backend.openssl_assert(alg[0] != self._backend._ffi.NULL) + oid = _obj2txt(self._backend, alg[0].algorithm) + try: + return x509._SIG_OIDS_TO_HASH[oid] + except KeyError: + raise UnsupportedAlgorithm( + "Signature algorithm OID:{0} not recognized".format(oid) + ) + + @property + def issuer(self): + issuer = self._backend._lib.X509_CRL_get_issuer(self._x509_crl) + self._backend.openssl_assert(issuer != self._backend._ffi.NULL) + return _decode_x509_name(self._backend, issuer) + + @property + def next_update(self): + nu = self._backend._lib.X509_CRL_get_nextUpdate(self._x509_crl) + self._backend.openssl_assert(nu != self._backend._ffi.NULL) + return _parse_asn1_time(self._backend, nu) + + @property + def last_update(self): + lu = self._backend._lib.X509_CRL_get_lastUpdate(self._x509_crl) + self._backend.openssl_assert(lu != self._backend._ffi.NULL) + return _parse_asn1_time(self._backend, lu) + + @property + def signature(self): + sig = self._backend._ffi.new("ASN1_BIT_STRING **") + self._backend._lib.X509_CRL_get0_signature( + sig, self._backend._ffi.NULL, self._x509_crl + ) + self._backend.openssl_assert(sig[0] != self._backend._ffi.NULL) + return _asn1_string_to_bytes(self._backend, sig[0]) + + @property + def tbs_certlist_bytes(self): + pp = self._backend._ffi.new("unsigned char **") + res = self._backend._lib.i2d_re_X509_CRL_tbs(self._x509_crl, pp) + self._backend.openssl_assert(res > 0) + pp = self._backend._ffi.gc( + pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0]) + ) + return self._backend._ffi.buffer(pp[0], res)[:] + + def public_bytes(self, encoding): + bio = self._backend._create_mem_bio_gc() + if encoding is serialization.Encoding.PEM: + res = self._backend._lib.PEM_write_bio_X509_CRL( + bio, self._x509_crl + ) + elif encoding is serialization.Encoding.DER: + res = self._backend._lib.i2d_X509_CRL_bio(bio, self._x509_crl) + else: + raise TypeError("encoding must be an item from the Encoding enum") + + self._backend.openssl_assert(res == 1) + return self._backend._read_mem_bio(bio) + + def _revoked_cert(self, idx): + revoked = self._backend._lib.X509_CRL_get_REVOKED(self._x509_crl) + r = self._backend._lib.sk_X509_REVOKED_value(revoked, idx) + self._backend.openssl_assert(r != self._backend._ffi.NULL) + return _RevokedCertificate(self._backend, self, r) + + def __iter__(self): + for i in range(len(self)): + yield self._revoked_cert(i) + + def __getitem__(self, idx): + if isinstance(idx, slice): + start, stop, step = idx.indices(len(self)) + return [self._revoked_cert(i) for i in range(start, stop, step)] + else: + idx = operator.index(idx) + if idx < 0: + idx += len(self) + if not 0 <= idx < len(self): + raise IndexError + return self._revoked_cert(idx) + + def __len__(self): + revoked = self._backend._lib.X509_CRL_get_REVOKED(self._x509_crl) + if revoked == self._backend._ffi.NULL: + return 0 + else: + return self._backend._lib.sk_X509_REVOKED_num(revoked) + + @property + def extensions(self): + return _CRL_EXTENSION_PARSER.parse(self._backend, self._x509_crl) + + +@utils.register_interface(x509.CertificateSigningRequest) +class _CertificateSigningRequest(object): + def __init__(self, backend, x509_req): + self._backend = backend + self._x509_req = x509_req + + def __eq__(self, other): + if not isinstance(other, _CertificateSigningRequest): + return NotImplemented + + self_bytes = self.public_bytes(serialization.Encoding.DER) + other_bytes = other.public_bytes(serialization.Encoding.DER) + return self_bytes == other_bytes + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.public_bytes(serialization.Encoding.DER)) + + def public_key(self): + pkey = self._backend._lib.X509_REQ_get_pubkey(self._x509_req) + self._backend.openssl_assert(pkey != self._backend._ffi.NULL) + pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free) + return self._backend._evp_pkey_to_public_key(pkey) + + @property + def subject(self): + subject = self._backend._lib.X509_REQ_get_subject_name(self._x509_req) + self._backend.openssl_assert(subject != self._backend._ffi.NULL) + return _decode_x509_name(self._backend, subject) + + @property + def signature_hash_algorithm(self): + alg = self._backend._ffi.new("X509_ALGOR **") + self._backend._lib.X509_REQ_get0_signature( + self._backend._ffi.NULL, alg, self._x509_req + ) + self._backend.openssl_assert(alg[0] != self._backend._ffi.NULL) + oid = _obj2txt(self._backend, alg[0].algorithm) + try: + return x509._SIG_OIDS_TO_HASH[oid] + except KeyError: + raise UnsupportedAlgorithm( + "Signature algorithm OID:{0} not recognized".format(oid) + ) + + @property + def extensions(self): + x509_exts = self._backend._lib.X509_REQ_get_extensions(self._x509_req) + return _CSR_EXTENSION_PARSER.parse(self._backend, x509_exts) + + def public_bytes(self, encoding): + bio = self._backend._create_mem_bio_gc() + if encoding is serialization.Encoding.PEM: + res = self._backend._lib.PEM_write_bio_X509_REQ( + bio, self._x509_req + ) + elif encoding is serialization.Encoding.DER: + res = self._backend._lib.i2d_X509_REQ_bio(bio, self._x509_req) + else: + raise TypeError("encoding must be an item from the Encoding enum") + + self._backend.openssl_assert(res == 1) + return self._backend._read_mem_bio(bio) + + @property + def tbs_certrequest_bytes(self): + pp = self._backend._ffi.new("unsigned char **") + res = self._backend._lib.i2d_re_X509_REQ_tbs(self._x509_req, pp) + self._backend.openssl_assert(res > 0) + pp = self._backend._ffi.gc( + pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0]) + ) + return self._backend._ffi.buffer(pp[0], res)[:] + + @property + def signature(self): + sig = self._backend._ffi.new("ASN1_BIT_STRING **") + self._backend._lib.X509_REQ_get0_signature( + sig, self._backend._ffi.NULL, self._x509_req + ) + self._backend.openssl_assert(sig[0] != self._backend._ffi.NULL) + return _asn1_string_to_bytes(self._backend, sig[0]) + + @property + def is_signature_valid(self): + pkey = self._backend._lib.X509_REQ_get_pubkey(self._x509_req) + self._backend.openssl_assert(pkey != self._backend._ffi.NULL) + pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free) + res = self._backend._lib.X509_REQ_verify(self._x509_req, pkey) + + if res != 1: + self._backend._consume_errors() + return False + + return True diff --git a/lib/python3.4/site-packages/cryptography/hazmat/bindings/__init__.py b/lib/python3.4/site-packages/cryptography/hazmat/bindings/__init__.py new file mode 100644 index 0000000..4b54088 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/bindings/__init__.py @@ -0,0 +1,5 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function diff --git a/lib/python3.4/site-packages/cryptography/hazmat/bindings/_constant_time.cpython-34m.so b/lib/python3.4/site-packages/cryptography/hazmat/bindings/_constant_time.cpython-34m.so new file mode 100755 index 0000000..8d7a03d Binary files /dev/null and b/lib/python3.4/site-packages/cryptography/hazmat/bindings/_constant_time.cpython-34m.so differ diff --git a/lib/python3.4/site-packages/cryptography/hazmat/bindings/_openssl.cpython-34m.so b/lib/python3.4/site-packages/cryptography/hazmat/bindings/_openssl.cpython-34m.so new file mode 100755 index 0000000..7f16baa Binary files /dev/null and b/lib/python3.4/site-packages/cryptography/hazmat/bindings/_openssl.cpython-34m.so differ diff --git a/lib/python3.4/site-packages/cryptography/hazmat/bindings/_padding.cpython-34m.so b/lib/python3.4/site-packages/cryptography/hazmat/bindings/_padding.cpython-34m.so new file mode 100755 index 0000000..f4cf3f5 Binary files /dev/null and b/lib/python3.4/site-packages/cryptography/hazmat/bindings/_padding.cpython-34m.so differ diff --git a/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/__init__.py b/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/__init__.py new file mode 100644 index 0000000..4b54088 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/__init__.py @@ -0,0 +1,5 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function diff --git a/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/binding.py b/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/binding.py new file mode 100644 index 0000000..dfe046b --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/binding.py @@ -0,0 +1,15 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography.hazmat.bindings._commoncrypto import ffi, lib + + +class Binding(object): + """ + CommonCrypto API wrapper. + """ + lib = lib + ffi = ffi diff --git a/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/__init__.py b/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/__init__.py new file mode 100644 index 0000000..4b54088 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/__init__.py @@ -0,0 +1,5 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function diff --git a/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py b/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py new file mode 100644 index 0000000..18bb54b --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py @@ -0,0 +1,431 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +# This is a temporary copy of all the CONDITIONAL_NAMES from _cffi_src so +# we can loop over them and delete them at runtime. It will be removed when +# cffi supports #if in cdef + +CONDITIONAL_NAMES = { + "Cryptography_HAS_AES_WRAP": [ + "AES_wrap_key", + "AES_unwrap_key", + ], + "Cryptography_HAS_CMAC": [ + "CMAC_CTX_new", + "CMAC_Init", + "CMAC_Update", + "CMAC_Final", + "CMAC_CTX_copy", + "CMAC_CTX_free", + ], + "Cryptography_HAS_CMS": [ + "BIO_new_CMS", + "i2d_CMS_bio_stream", + "PEM_write_bio_CMS_stream", + "CMS_final", + "CMS_sign", + "CMS_verify", + "CMS_encrypt", + "CMS_decrypt", + "CMS_add1_signer", + "CMS_TEXT", + "CMS_NOCERTS", + "CMS_NO_CONTENT_VERIFY", + "CMS_NO_ATTR_VERIFY", + "CMS_NOSIGS", + "CMS_NOINTERN", + "CMS_NO_SIGNER_CERT_VERIFY", + "CMS_NOVERIFY", + "CMS_DETACHED", + "CMS_BINARY", + "CMS_NOATTR", + "CMS_NOSMIMECAP", + "CMS_NOOLDMIMETYPE", + "CMS_CRLFEOL", + "CMS_STREAM", + "CMS_NOCRL", + "CMS_PARTIAL", + "CMS_REUSE_DIGEST", + "CMS_USE_KEYID", + "CMS_DEBUG_DECRYPT", + ], + "Cryptography_HAS_CMS_BIO_FUNCTIONS": [ + "BIO_new_CMS", + "i2d_CMS_bio_stream", + "PEM_write_bio_CMS_stream", + ], + "Cryptography_HAS_EC": [ + "OPENSSL_EC_NAMED_CURVE", + "EC_GROUP_new", + "EC_GROUP_free", + "EC_GROUP_clear_free", + "EC_GROUP_new_curve_GFp", + "EC_GROUP_new_by_curve_name", + "EC_GROUP_set_curve_GFp", + "EC_GROUP_get_curve_GFp", + "EC_GROUP_method_of", + "EC_GROUP_get0_generator", + "EC_GROUP_get_curve_name", + "EC_GROUP_get_degree", + "EC_GROUP_set_asn1_flag", + "EC_GROUP_set_point_conversion_form", + "EC_KEY_new", + "EC_KEY_free", + "EC_get_builtin_curves", + "EC_KEY_new_by_curve_name", + "EC_KEY_copy", + "EC_KEY_dup", + "EC_KEY_up_ref", + "EC_KEY_set_group", + "EC_KEY_get0_private_key", + "EC_KEY_set_private_key", + "EC_KEY_set_public_key", + "EC_KEY_get_enc_flags", + "EC_KEY_set_enc_flags", + "EC_KEY_set_conv_form", + "EC_KEY_set_asn1_flag", + "EC_KEY_precompute_mult", + "EC_KEY_generate_key", + "EC_KEY_check_key", + "EC_POINT_new", + "EC_POINT_free", + "EC_POINT_clear_free", + "EC_POINT_copy", + "EC_POINT_dup", + "EC_POINT_method_of", + "EC_POINT_set_to_infinity", + "EC_POINT_set_Jprojective_coordinates_GFp", + "EC_POINT_get_Jprojective_coordinates_GFp", + "EC_POINT_set_affine_coordinates_GFp", + "EC_POINT_get_affine_coordinates_GFp", + "EC_POINT_set_compressed_coordinates_GFp", + "EC_POINT_point2oct", + "EC_POINT_oct2point", + "EC_POINT_point2bn", + "EC_POINT_bn2point", + "EC_POINT_point2hex", + "EC_POINT_hex2point", + "EC_POINT_add", + "EC_POINT_dbl", + "EC_POINT_invert", + "EC_POINT_is_at_infinity", + "EC_POINT_is_on_curve", + "EC_POINT_cmp", + "EC_POINT_make_affine", + "EC_POINTs_make_affine", + "EC_POINTs_mul", + "EC_POINT_mul", + "EC_GROUP_precompute_mult", + "EC_GROUP_have_precompute_mult", + "EC_GFp_simple_method", + "EC_GFp_mont_method", + "EC_GFp_nist_method", + "EC_METHOD_get_field_type", + "EVP_PKEY_assign_EC_KEY", + "EVP_PKEY_get1_EC_KEY", + "EVP_PKEY_set1_EC_KEY", + "PEM_write_bio_ECPrivateKey", + "i2d_EC_PUBKEY", + "d2i_EC_PUBKEY", + "d2i_EC_PUBKEY_bio", + "i2d_EC_PUBKEY_bio", + "d2i_ECPrivateKey", + "d2i_ECPrivateKey_bio", + "i2d_ECPrivateKey", + "i2d_ECPrivateKey_bio", + "i2o_ECPublicKey", + "o2i_ECPublicKey", + "SSL_CTX_set_tmp_ecdh", + "POINT_CONVERSION_COMPRESSED", + "POINT_CONVERSION_UNCOMPRESSED", + "POINT_CONVERSION_HYBRID", + ], + + "Cryptography_HAS_EC_1_0_1": [ + "EC_KEY_get_flags", + "EC_KEY_set_flags", + "EC_KEY_clear_flags", + "EC_KEY_set_public_key_affine_coordinates", + ], + + "Cryptography_HAS_EC2M": [ + "EC_GF2m_simple_method", + "EC_POINT_set_affine_coordinates_GF2m", + "EC_POINT_get_affine_coordinates_GF2m", + "EC_POINT_set_compressed_coordinates_GF2m", + "EC_GROUP_set_curve_GF2m", + "EC_GROUP_get_curve_GF2m", + "EC_GROUP_new_curve_GF2m", + ], + + "Cryptography_HAS_EC_1_0_2": [ + "EC_curve_nid2nist", + ], + "Cryptography_HAS_ECDH": [ + "ECDH_compute_key", + ], + "Cryptography_HAS_ECDSA": [ + "ECDSA_SIG_new", + "ECDSA_SIG_free", + "i2d_ECDSA_SIG", + "d2i_ECDSA_SIG", + "ECDSA_do_sign", + "ECDSA_do_sign_ex", + "ECDSA_do_verify", + "ECDSA_sign_setup", + "ECDSA_sign", + "ECDSA_sign_ex", + "ECDSA_verify", + "ECDSA_size", + ], + "Cryptography_HAS_ENGINE_CRYPTODEV": [ + "ENGINE_load_cryptodev" + ], + "Cryptography_HAS_098H_ERROR_CODES": [ + "ASN1_F_B64_READ_ASN1", + "ASN1_F_B64_WRITE_ASN1", + "ASN1_F_SMIME_READ_ASN1", + "ASN1_F_SMIME_TEXT", + "ASN1_R_NO_CONTENT_TYPE", + "ASN1_R_NO_MULTIPART_BODY_FAILURE", + "ASN1_R_NO_MULTIPART_BOUNDARY", + ], + "Cryptography_HAS_098C_CAMELLIA_CODES": [ + "EVP_F_CAMELLIA_INIT_KEY", + "EVP_R_CAMELLIA_KEY_SETUP_FAILED" + ], + "Cryptography_HAS_EC_CODES": [ + "EC_R_UNKNOWN_GROUP", + "EC_F_EC_GROUP_NEW_BY_CURVE_NAME" + ], + "Cryptography_HAS_TLSEXT_ERROR_CODES": [ + "SSL_TLSEXT_ERR_OK", + "SSL_TLSEXT_ERR_ALERT_WARNING", + "SSL_TLSEXT_ERR_ALERT_FATAL", + "SSL_TLSEXT_ERR_NOACK", + ], + "Cryptography_HAS_RSA_R_PKCS_DECODING_ERROR": [ + "RSA_R_PKCS_DECODING_ERROR" + ], + "Cryptography_HAS_GCM": [ + "EVP_CTRL_GCM_GET_TAG", + "EVP_CTRL_GCM_SET_TAG", + "EVP_CTRL_GCM_SET_IVLEN", + ], + "Cryptography_HAS_PBKDF2_HMAC": [ + "PKCS5_PBKDF2_HMAC" + ], + "Cryptography_HAS_PKEY_CTX": [ + "EVP_PKEY_CTX_new", + "EVP_PKEY_CTX_new_id", + "EVP_PKEY_CTX_dup", + "EVP_PKEY_CTX_free", + "EVP_PKEY_sign", + "EVP_PKEY_sign_init", + "EVP_PKEY_verify", + "EVP_PKEY_verify_init", + "Cryptography_EVP_PKEY_encrypt", + "EVP_PKEY_encrypt_init", + "Cryptography_EVP_PKEY_decrypt", + "EVP_PKEY_decrypt_init", + "EVP_PKEY_CTX_set_signature_md", + "EVP_PKEY_id", + "EVP_PKEY_CTX_set_rsa_padding", + "EVP_PKEY_CTX_set_rsa_pss_saltlen", + ], + "Cryptography_HAS_ECDSA_SHA2_NIDS": [ + "NID_ecdsa_with_SHA224", + "NID_ecdsa_with_SHA256", + "NID_ecdsa_with_SHA384", + "NID_ecdsa_with_SHA512", + ], + "Cryptography_HAS_EGD": [ + "RAND_egd", + "RAND_egd_bytes", + "RAND_query_egd_bytes", + ], + "Cryptography_HAS_PSS_PADDING": [ + "RSA_PKCS1_PSS_PADDING", + ], + "Cryptography_HAS_MGF1_MD": [ + "EVP_PKEY_CTX_set_rsa_mgf1_md", + ], + "Cryptography_HAS_RSA_OAEP_MD": [ + "EVP_PKEY_CTX_set_rsa_oaep_md", + ], + "Cryptography_HAS_TLSv1_1": [ + "SSL_OP_NO_TLSv1_1", + "TLSv1_1_method", + "TLSv1_1_server_method", + "TLSv1_1_client_method", + ], + + "Cryptography_HAS_TLSv1_2": [ + "SSL_OP_NO_TLSv1_2", + "TLSv1_2_method", + "TLSv1_2_server_method", + "TLSv1_2_client_method", + ], + + "Cryptography_HAS_SSL3_METHOD": [ + "SSLv3_method", + "SSLv3_client_method", + "SSLv3_server_method", + ], + + "Cryptography_HAS_TLSEXT_HOSTNAME": [ + "SSL_set_tlsext_host_name", + "SSL_get_servername", + "SSL_CTX_set_tlsext_servername_callback", + ], + + "Cryptography_HAS_TLSEXT_STATUS_REQ_CB": [ + "SSL_CTX_set_tlsext_status_cb", + "SSL_CTX_set_tlsext_status_arg" + ], + + "Cryptography_HAS_STATUS_REQ_OCSP_RESP": [ + "SSL_set_tlsext_status_ocsp_resp", + "SSL_get_tlsext_status_ocsp_resp", + ], + + "Cryptography_HAS_TLSEXT_STATUS_REQ_TYPE": [ + "SSL_set_tlsext_status_type", + ], + + "Cryptography_HAS_RELEASE_BUFFERS": [ + "SSL_MODE_RELEASE_BUFFERS", + ], + + "Cryptography_HAS_OP_NO_COMPRESSION": [ + "SSL_OP_NO_COMPRESSION", + ], + + "Cryptography_HAS_SSL_OP_MSIE_SSLV2_RSA_PADDING": [ + "SSL_OP_MSIE_SSLV2_RSA_PADDING", + ], + + "Cryptography_HAS_SSL_OP_NO_TICKET": [ + "SSL_OP_NO_TICKET", + ], + + "Cryptography_HAS_SSL_SET_SSL_CTX": [ + "SSL_set_SSL_CTX", + "TLSEXT_NAMETYPE_host_name", + ], + + "Cryptography_HAS_NETBSD_D1_METH": [ + "DTLSv1_method", + ], + + "Cryptography_HAS_NEXTPROTONEG": [ + "SSL_CTX_set_next_protos_advertised_cb", + "SSL_CTX_set_next_proto_select_cb", + "SSL_select_next_proto", + "SSL_get0_next_proto_negotiated", + ], + + "Cryptography_HAS_SECURE_RENEGOTIATION": [ + "SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION", + "SSL_OP_LEGACY_SERVER_CONNECT", + "SSL_get_secure_renegotiation_support", + ], + + "Cryptography_HAS_ALPN": [ + "SSL_CTX_set_alpn_protos", + "SSL_set_alpn_protos", + "SSL_CTX_set_alpn_select_cb", + "SSL_get0_alpn_selected", + ], + + "Cryptography_HAS_COMPRESSION": [ + "SSL_get_current_compression", + "SSL_get_current_expansion", + "SSL_COMP_get_name", + ], + + "Cryptography_HAS_GET_SERVER_TMP_KEY": [ + "SSL_get_server_tmp_key", + ], + + "Cryptography_HAS_SSL_CTX_SET_CLIENT_CERT_ENGINE": [ + "SSL_CTX_set_client_cert_engine", + ], + "Cryptography_HAS_SSL_CTX_CLEAR_OPTIONS": [ + "SSL_CTX_clear_options", + ], + "Cryptography_HAS_102_VERIFICATION_ERROR_CODES": [ + 'X509_V_ERR_SUITE_B_INVALID_VERSION', + 'X509_V_ERR_SUITE_B_INVALID_ALGORITHM', + 'X509_V_ERR_SUITE_B_INVALID_CURVE', + 'X509_V_ERR_SUITE_B_INVALID_SIGNATURE_ALGORITHM', + 'X509_V_ERR_SUITE_B_LOS_NOT_ALLOWED', + 'X509_V_ERR_SUITE_B_CANNOT_SIGN_P_384_WITH_P_256', + 'X509_V_ERR_HOSTNAME_MISMATCH', + 'X509_V_ERR_EMAIL_MISMATCH', + 'X509_V_ERR_IP_ADDRESS_MISMATCH' + ], + "Cryptography_HAS_102_VERIFICATION_PARAMS": [ + "X509_V_FLAG_SUITEB_128_LOS_ONLY", + "X509_V_FLAG_SUITEB_192_LOS", + "X509_V_FLAG_SUITEB_128_LOS", + "X509_VERIFY_PARAM_set1_host", + "X509_VERIFY_PARAM_set1_email", + "X509_VERIFY_PARAM_set1_ip", + "X509_VERIFY_PARAM_set1_ip_asc", + "X509_VERIFY_PARAM_set_hostflags", + ], + "Cryptography_HAS_X509_V_FLAG_TRUSTED_FIRST": [ + "X509_V_FLAG_TRUSTED_FIRST", + ], + "Cryptography_HAS_X509_V_FLAG_PARTIAL_CHAIN": [ + "X509_V_FLAG_PARTIAL_CHAIN", + ], + "Cryptography_HAS_100_VERIFICATION_ERROR_CODES": [ + 'X509_V_ERR_DIFFERENT_CRL_SCOPE', + 'X509_V_ERR_UNSUPPORTED_EXTENSION_FEATURE', + 'X509_V_ERR_UNNESTED_RESOURCE', + 'X509_V_ERR_PERMITTED_VIOLATION', + 'X509_V_ERR_EXCLUDED_VIOLATION', + 'X509_V_ERR_SUBTREE_MINMAX', + 'X509_V_ERR_UNSUPPORTED_CONSTRAINT_TYPE', + 'X509_V_ERR_UNSUPPORTED_CONSTRAINT_SYNTAX', + 'X509_V_ERR_UNSUPPORTED_NAME_SYNTAX', + 'X509_V_ERR_CRL_PATH_VALIDATION_ERROR', + ], + "Cryptography_HAS_100_VERIFICATION_PARAMS": [ + "Cryptography_HAS_100_VERIFICATION_PARAMS", + "X509_V_FLAG_EXTENDED_CRL_SUPPORT", + "X509_V_FLAG_USE_DELTAS", + ], + "Cryptography_HAS_X509_V_FLAG_CHECK_SS_SIGNATURE": [ + "X509_V_FLAG_CHECK_SS_SIGNATURE", + ], + "Cryptography_HAS_SET_CERT_CB": [ + "SSL_CTX_set_cert_cb", + "SSL_set_cert_cb", + ], + "Cryptography_HAS_AES_CTR128_ENCRYPT": [ + "AES_ctr128_encrypt", + ], + "Cryptography_HAS_SSL_ST": [ + "SSL_ST_BEFORE", + "SSL_ST_OK", + "SSL_ST_INIT", + "SSL_ST_RENEGOTIATE", + ], + "Cryptography_HAS_TLS_ST": [ + "TLS_ST_BEFORE", + "TLS_ST_OK", + ], + "Cryptography_HAS_LOCKING_CALLBACKS": [ + "CRYPTO_LOCK", + "CRYPTO_UNLOCK", + "CRYPTO_READ", + "CRYPTO_LOCK_SSL", + "CRYPTO_lock", + ] +} diff --git a/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/binding.py b/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/binding.py new file mode 100644 index 0000000..7727ad8 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/binding.py @@ -0,0 +1,250 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import collections +import os +import threading +import types +import warnings + +from cryptography.exceptions import InternalError +from cryptography.hazmat.bindings._openssl import ffi, lib +from cryptography.hazmat.bindings.openssl._conditional import CONDITIONAL_NAMES + +_OpenSSLError = collections.namedtuple("_OpenSSLError", + ["code", "lib", "func", "reason"]) +_OpenSSLErrorWithText = collections.namedtuple( + "_OpenSSLErrorWithText", ["code", "lib", "func", "reason", "reason_text"] +) + + +def _consume_errors(lib): + errors = [] + while True: + code = lib.ERR_get_error() + if code == 0: + break + + err_lib = lib.ERR_GET_LIB(code) + err_func = lib.ERR_GET_FUNC(code) + err_reason = lib.ERR_GET_REASON(code) + + errors.append(_OpenSSLError(code, err_lib, err_func, err_reason)) + + return errors + + +def _openssl_assert(lib, ok): + if not ok: + errors = _consume_errors(lib) + errors_with_text = [] + for err in errors: + err_text_reason = ffi.string( + lib.ERR_error_string(err.code, ffi.NULL) + ) + errors_with_text.append( + _OpenSSLErrorWithText( + err.code, err.lib, err.func, err.reason, err_text_reason + ) + ) + + raise InternalError( + "Unknown OpenSSL error. This error is commonly encountered when " + "another library is not cleaning up the OpenSSL error stack. If " + "you are using cryptography with another library that uses " + "OpenSSL try disabling it before reporting a bug. Otherwise " + "please file an issue at https://github.com/pyca/cryptography/" + "issues with information on how to reproduce " + "this. ({0!r})".format(errors_with_text), + errors_with_text + ) + + +def ffi_callback(signature, name, **kwargs): + """Callback dispatcher + + The ffi_callback() dispatcher keeps callbacks compatible between dynamic + and static callbacks. + """ + def wrapper(func): + if lib.Cryptography_STATIC_CALLBACKS: + # def_extern() returns a decorator that sets the internal + # function pointer and returns the original function unmodified. + ffi.def_extern(name=name, **kwargs)(func) + callback = getattr(lib, name) + else: + # callback() wraps the function in a cdata function. + callback = ffi.callback(signature, **kwargs)(func) + return callback + return wrapper + + +@ffi_callback("int (*)(unsigned char *, int)", + name="Cryptography_rand_bytes", + error=-1) +def _osrandom_rand_bytes(buf, size): + signed = ffi.cast("char *", buf) + result = os.urandom(size) + signed[0:size] = result + return 1 + + +@ffi_callback("int (*)(void)", name="Cryptography_rand_status") +def _osrandom_rand_status(): + return 1 + + +def build_conditional_library(lib, conditional_names): + conditional_lib = types.ModuleType("lib") + excluded_names = set() + for condition, names in conditional_names.items(): + if not getattr(lib, condition): + excluded_names |= set(names) + + for attr in dir(lib): + if attr not in excluded_names: + setattr(conditional_lib, attr, getattr(lib, attr)) + + return conditional_lib + + +class Binding(object): + """ + OpenSSL API wrapper. + """ + lib = None + ffi = ffi + _lib_loaded = False + _locks = None + _lock_cb_handle = None + _init_lock = threading.Lock() + _lock_init_lock = threading.Lock() + + _osrandom_engine_id = ffi.new("const char[]", b"osrandom") + _osrandom_engine_name = ffi.new("const char[]", b"osrandom_engine") + _osrandom_method = ffi.new( + "RAND_METHOD *", + dict(bytes=_osrandom_rand_bytes, + pseudorand=_osrandom_rand_bytes, + status=_osrandom_rand_status) + ) + + def __init__(self): + self._ensure_ffi_initialized() + + @classmethod + def _register_osrandom_engine(cls): + _openssl_assert(cls.lib, cls.lib.ERR_peek_error() == 0) + + engine = cls.lib.ENGINE_new() + _openssl_assert(cls.lib, engine != cls.ffi.NULL) + try: + result = cls.lib.ENGINE_set_id(engine, cls._osrandom_engine_id) + _openssl_assert(cls.lib, result == 1) + result = cls.lib.ENGINE_set_name(engine, cls._osrandom_engine_name) + _openssl_assert(cls.lib, result == 1) + result = cls.lib.ENGINE_set_RAND(engine, cls._osrandom_method) + _openssl_assert(cls.lib, result == 1) + result = cls.lib.ENGINE_add(engine) + if result != 1: + errors = _consume_errors(cls.lib) + _openssl_assert( + cls.lib, + errors[0].reason == cls.lib.ENGINE_R_CONFLICTING_ENGINE_ID + ) + + finally: + result = cls.lib.ENGINE_free(engine) + _openssl_assert(cls.lib, result == 1) + + @classmethod + def _ensure_ffi_initialized(cls): + with cls._init_lock: + if not cls._lib_loaded: + cls.lib = build_conditional_library(lib, CONDITIONAL_NAMES) + cls._lib_loaded = True + # initialize the SSL library + cls.lib.SSL_library_init() + # adds all ciphers/digests for EVP + cls.lib.OpenSSL_add_all_algorithms() + # loads error strings for libcrypto and libssl functions + cls.lib.SSL_load_error_strings() + cls._register_osrandom_engine() + + @classmethod + def init_static_locks(cls): + with cls._lock_init_lock: + cls._ensure_ffi_initialized() + + if not cls._lock_cb_handle: + wrapper = ffi_callback( + "void(int, int, const char *, int)", + name="Cryptography_locking_cb", + ) + cls._lock_cb_handle = wrapper(cls._lock_cb) + + # Use Python's implementation if available, importing _ssl triggers + # the setup for this. + __import__("_ssl") + + if cls.lib.CRYPTO_get_locking_callback() != cls.ffi.NULL: + return + + # If nothing else has setup a locking callback already, we set up + # our own + num_locks = cls.lib.CRYPTO_num_locks() + cls._locks = [threading.Lock() for n in range(num_locks)] + + cls.lib.CRYPTO_set_locking_callback(cls._lock_cb_handle) + + @classmethod + def _lock_cb(cls, mode, n, file, line): + lock = cls._locks[n] + + if mode & cls.lib.CRYPTO_LOCK: + lock.acquire() + elif mode & cls.lib.CRYPTO_UNLOCK: + lock.release() + else: + raise RuntimeError( + "Unknown lock mode {0}: lock={1}, file={2}, line={3}.".format( + mode, n, file, line + ) + ) + + +def _verify_openssl_version(version): + if version < 0x10000000: + if os.environ.get("CRYPTOGRAPHY_ALLOW_OPENSSL_098"): + warnings.warn( + "OpenSSL version 0.9.8 is no longer supported by the OpenSSL " + "project, please upgrade. The next version of cryptography " + "will completely remove support for it.", + DeprecationWarning + ) + else: + raise RuntimeError( + "You are linking against OpenSSL 0.9.8, which is no longer " + "support by the OpenSSL project. You need to upgrade to a " + "newer version of OpenSSL." + ) + elif version < 0x10001000: + warnings.warn( + "OpenSSL versions less than 1.0.1 are no longer supported by the " + "OpenSSL project, please upgrade. A future version of " + "cryptography will drop support for these versions of OpenSSL.", + DeprecationWarning + ) + + +# OpenSSL is not thread safe until the locks are initialized. We call this +# method in module scope so that it executes with the import lock. On +# Pythons < 3.4 this import lock is a global lock, which can prevent a race +# condition registering the OpenSSL locks. On Python 3.4+ the import lock +# is per module so this approach will not work. +Binding.init_static_locks() + +_verify_openssl_version(Binding.lib.SSLeay()) diff --git a/lib/python3.4/site-packages/cryptography/hazmat/primitives/__init__.py b/lib/python3.4/site-packages/cryptography/hazmat/primitives/__init__.py new file mode 100644 index 0000000..4b54088 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/primitives/__init__.py @@ -0,0 +1,5 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function diff --git a/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py b/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py new file mode 100644 index 0000000..494a7a1 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py @@ -0,0 +1,40 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + + +@six.add_metaclass(abc.ABCMeta) +class AsymmetricSignatureContext(object): + @abc.abstractmethod + def update(self, data): + """ + Processes the provided bytes and returns nothing. + """ + + @abc.abstractmethod + def finalize(self): + """ + Returns the signature as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class AsymmetricVerificationContext(object): + @abc.abstractmethod + def update(self, data): + """ + Processes the provided bytes and returns nothing. + """ + + @abc.abstractmethod + def verify(self): + """ + Raises an exception if the bytes provided to update do not match the + signature or the signature does not match the public key. + """ diff --git a/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py b/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py new file mode 100644 index 0000000..12d53ee --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py @@ -0,0 +1,166 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography import utils + + +class DHPrivateNumbers(object): + def __init__(self, x, public_numbers): + if not isinstance(x, six.integer_types): + raise TypeError("x must be an integer.") + + if not isinstance(public_numbers, DHPublicNumbers): + raise TypeError("public_numbers must be an instance of " + "DHPublicNumbers.") + + self._x = x + self._public_numbers = public_numbers + + def __eq__(self, other): + if not isinstance(other, DHPrivateNumbers): + return NotImplemented + + return ( + self._x == other._x and + self._public_numbers == other._public_numbers + ) + + def __ne__(self, other): + return not self == other + + public_numbers = utils.read_only_property("_public_numbers") + x = utils.read_only_property("_x") + + +class DHPublicNumbers(object): + def __init__(self, y, parameter_numbers): + if not isinstance(y, six.integer_types): + raise TypeError("y must be an integer.") + + if not isinstance(parameter_numbers, DHParameterNumbers): + raise TypeError( + "parameters must be an instance of DHParameterNumbers.") + + self._y = y + self._parameter_numbers = parameter_numbers + + def __eq__(self, other): + if not isinstance(other, DHPublicNumbers): + return NotImplemented + + return ( + self._y == other._y and + self._parameter_numbers == other._parameter_numbers + ) + + def __ne__(self, other): + return not self == other + + y = utils.read_only_property("_y") + parameter_numbers = utils.read_only_property("_parameter_numbers") + + +class DHParameterNumbers(object): + def __init__(self, p, g): + if ( + not isinstance(p, six.integer_types) or + not isinstance(g, six.integer_types) + ): + raise TypeError("p and g must be integers") + + self._p = p + self._g = g + + def __eq__(self, other): + if not isinstance(other, DHParameterNumbers): + return NotImplemented + + return ( + self._p == other._p and + self._g == other._g + ) + + def __ne__(self, other): + return not self == other + + p = utils.read_only_property("_p") + g = utils.read_only_property("_g") + + +@six.add_metaclass(abc.ABCMeta) +class DHParameters(object): + @abc.abstractmethod + def generate_private_key(self): + """ + Generates and returns a DHPrivateKey. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DHParametersWithSerialization(DHParameters): + @abc.abstractmethod + def parameter_numbers(self): + """ + Returns a DHParameterNumbers. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DHPrivateKey(object): + @abc.abstractproperty + def key_size(self): + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def public_key(self): + """ + The DHPublicKey associated with this private key. + """ + + @abc.abstractmethod + def parameters(self): + """ + The DHParameters object associated with this private key. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DHPrivateKeyWithSerialization(DHPrivateKey): + @abc.abstractmethod + def private_numbers(self): + """ + Returns a DHPrivateNumbers. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DHPublicKey(object): + @abc.abstractproperty + def key_size(self): + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def parameters(self): + """ + The DHParameters object associated with this public key. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DHPublicKeyWithSerialization(DHPublicKey): + @abc.abstractmethod + def public_numbers(self): + """ + Returns a DHPublicNumbers. + """ diff --git a/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py b/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py new file mode 100644 index 0000000..511d346 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py @@ -0,0 +1,242 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography import utils + + +@six.add_metaclass(abc.ABCMeta) +class DSAParameters(object): + @abc.abstractmethod + def generate_private_key(self): + """ + Generates and returns a DSAPrivateKey. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DSAParametersWithNumbers(DSAParameters): + @abc.abstractmethod + def parameter_numbers(self): + """ + Returns a DSAParameterNumbers. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DSAPrivateKey(object): + @abc.abstractproperty + def key_size(self): + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def public_key(self): + """ + The DSAPublicKey associated with this private key. + """ + + @abc.abstractmethod + def parameters(self): + """ + The DSAParameters object associated with this private key. + """ + + @abc.abstractmethod + def signer(self, signature_algorithm): + """ + Returns an AsymmetricSignatureContext used for signing data. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DSAPrivateKeyWithSerialization(DSAPrivateKey): + @abc.abstractmethod + def private_numbers(self): + """ + Returns a DSAPrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes(self, encoding, format, encryption_algorithm): + """ + Returns the key serialized as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DSAPublicKey(object): + @abc.abstractproperty + def key_size(self): + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def parameters(self): + """ + The DSAParameters object associated with this public key. + """ + + @abc.abstractmethod + def verifier(self, signature, signature_algorithm): + """ + Returns an AsymmetricVerificationContext used for signing data. + """ + + @abc.abstractmethod + def public_numbers(self): + """ + Returns a DSAPublicNumbers. + """ + + @abc.abstractmethod + def public_bytes(self, encoding, format): + """ + Returns the key serialized as bytes. + """ + + +DSAPublicKeyWithSerialization = DSAPublicKey + + +def generate_parameters(key_size, backend): + return backend.generate_dsa_parameters(key_size) + + +def generate_private_key(key_size, backend): + return backend.generate_dsa_private_key_and_parameters(key_size) + + +def _check_dsa_parameters(parameters): + if utils.bit_length(parameters.p) not in [1024, 2048, 3072]: + raise ValueError("p must be exactly 1024, 2048, or 3072 bits long") + if utils.bit_length(parameters.q) not in [160, 256]: + raise ValueError("q must be exactly 160 or 256 bits long") + + if not (1 < parameters.g < parameters.p): + raise ValueError("g, p don't satisfy 1 < g < p.") + + +def _check_dsa_private_numbers(numbers): + parameters = numbers.public_numbers.parameter_numbers + _check_dsa_parameters(parameters) + if numbers.x <= 0 or numbers.x >= parameters.q: + raise ValueError("x must be > 0 and < q.") + + if numbers.public_numbers.y != pow(parameters.g, numbers.x, parameters.p): + raise ValueError("y must be equal to (g ** x % p).") + + +class DSAParameterNumbers(object): + def __init__(self, p, q, g): + if ( + not isinstance(p, six.integer_types) or + not isinstance(q, six.integer_types) or + not isinstance(g, six.integer_types) + ): + raise TypeError( + "DSAParameterNumbers p, q, and g arguments must be integers." + ) + + self._p = p + self._q = q + self._g = g + + p = utils.read_only_property("_p") + q = utils.read_only_property("_q") + g = utils.read_only_property("_g") + + def parameters(self, backend): + return backend.load_dsa_parameter_numbers(self) + + def __eq__(self, other): + if not isinstance(other, DSAParameterNumbers): + return NotImplemented + + return self.p == other.p and self.q == other.q and self.g == other.g + + def __ne__(self, other): + return not self == other + + def __repr__(self): + return ( + "".format( + self=self + ) + ) + + +class DSAPublicNumbers(object): + def __init__(self, y, parameter_numbers): + if not isinstance(y, six.integer_types): + raise TypeError("DSAPublicNumbers y argument must be an integer.") + + if not isinstance(parameter_numbers, DSAParameterNumbers): + raise TypeError( + "parameter_numbers must be a DSAParameterNumbers instance." + ) + + self._y = y + self._parameter_numbers = parameter_numbers + + y = utils.read_only_property("_y") + parameter_numbers = utils.read_only_property("_parameter_numbers") + + def public_key(self, backend): + return backend.load_dsa_public_numbers(self) + + def __eq__(self, other): + if not isinstance(other, DSAPublicNumbers): + return NotImplemented + + return ( + self.y == other.y and + self.parameter_numbers == other.parameter_numbers + ) + + def __ne__(self, other): + return not self == other + + def __repr__(self): + return ( + "".format(self=self) + ) + + +class DSAPrivateNumbers(object): + def __init__(self, x, public_numbers): + if not isinstance(x, six.integer_types): + raise TypeError("DSAPrivateNumbers x argument must be an integer.") + + if not isinstance(public_numbers, DSAPublicNumbers): + raise TypeError( + "public_numbers must be a DSAPublicNumbers instance." + ) + self._public_numbers = public_numbers + self._x = x + + x = utils.read_only_property("_x") + public_numbers = utils.read_only_property("_public_numbers") + + def private_key(self, backend): + return backend.load_dsa_private_numbers(self) + + def __eq__(self, other): + if not isinstance(other, DSAPrivateNumbers): + return NotImplemented + + return ( + self.x == other.x and self.public_numbers == other.public_numbers + ) + + def __ne__(self, other): + return not self == other diff --git a/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py b/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py new file mode 100644 index 0000000..907a635 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py @@ -0,0 +1,352 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography import utils + + +@six.add_metaclass(abc.ABCMeta) +class EllipticCurve(object): + @abc.abstractproperty + def name(self): + """ + The name of the curve. e.g. secp256r1. + """ + + @abc.abstractproperty + def key_size(self): + """ + The bit length of the base point of the curve. + """ + + +@six.add_metaclass(abc.ABCMeta) +class EllipticCurveSignatureAlgorithm(object): + @abc.abstractproperty + def algorithm(self): + """ + The digest algorithm used with this signature. + """ + + +@six.add_metaclass(abc.ABCMeta) +class EllipticCurvePrivateKey(object): + @abc.abstractmethod + def signer(self, signature_algorithm): + """ + Returns an AsymmetricSignatureContext used for signing data. + """ + + @abc.abstractmethod + def exchange(self, algorithm, peer_public_key): + """ + Performs a key exchange operation using the provided algorithm with the + provided peer's public key. + """ + + @abc.abstractmethod + def public_key(self): + """ + The EllipticCurvePublicKey for this private key. + """ + + @abc.abstractproperty + def curve(self): + """ + The EllipticCurve that this key is on. + """ + + +@six.add_metaclass(abc.ABCMeta) +class EllipticCurvePrivateKeyWithSerialization(EllipticCurvePrivateKey): + @abc.abstractmethod + def private_numbers(self): + """ + Returns an EllipticCurvePrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes(self, encoding, format, encryption_algorithm): + """ + Returns the key serialized as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class EllipticCurvePublicKey(object): + @abc.abstractmethod + def verifier(self, signature, signature_algorithm): + """ + Returns an AsymmetricVerificationContext used for signing data. + """ + + @abc.abstractproperty + def curve(self): + """ + The EllipticCurve that this key is on. + """ + + @abc.abstractmethod + def public_numbers(self): + """ + Returns an EllipticCurvePublicNumbers. + """ + + @abc.abstractmethod + def public_bytes(self, encoding, format): + """ + Returns the key serialized as bytes. + """ + + +EllipticCurvePublicKeyWithSerialization = EllipticCurvePublicKey + + +@utils.register_interface(EllipticCurve) +class SECT571R1(object): + name = "sect571r1" + key_size = 571 + + +@utils.register_interface(EllipticCurve) +class SECT409R1(object): + name = "sect409r1" + key_size = 409 + + +@utils.register_interface(EllipticCurve) +class SECT283R1(object): + name = "sect283r1" + key_size = 283 + + +@utils.register_interface(EllipticCurve) +class SECT233R1(object): + name = "sect233r1" + key_size = 233 + + +@utils.register_interface(EllipticCurve) +class SECT163R2(object): + name = "sect163r2" + key_size = 163 + + +@utils.register_interface(EllipticCurve) +class SECT571K1(object): + name = "sect571k1" + key_size = 571 + + +@utils.register_interface(EllipticCurve) +class SECT409K1(object): + name = "sect409k1" + key_size = 409 + + +@utils.register_interface(EllipticCurve) +class SECT283K1(object): + name = "sect283k1" + key_size = 283 + + +@utils.register_interface(EllipticCurve) +class SECT233K1(object): + name = "sect233k1" + key_size = 233 + + +@utils.register_interface(EllipticCurve) +class SECT163K1(object): + name = "sect163k1" + key_size = 163 + + +@utils.register_interface(EllipticCurve) +class SECP521R1(object): + name = "secp521r1" + key_size = 521 + + +@utils.register_interface(EllipticCurve) +class SECP384R1(object): + name = "secp384r1" + key_size = 384 + + +@utils.register_interface(EllipticCurve) +class SECP256R1(object): + name = "secp256r1" + key_size = 256 + + +@utils.register_interface(EllipticCurve) +class SECP256K1(object): + name = "secp256k1" + key_size = 256 + + +@utils.register_interface(EllipticCurve) +class SECP224R1(object): + name = "secp224r1" + key_size = 224 + + +@utils.register_interface(EllipticCurve) +class SECP192R1(object): + name = "secp192r1" + key_size = 192 + + +_CURVE_TYPES = { + "prime192v1": SECP192R1, + "prime256v1": SECP256R1, + + "secp192r1": SECP192R1, + "secp224r1": SECP224R1, + "secp256r1": SECP256R1, + "secp384r1": SECP384R1, + "secp521r1": SECP521R1, + "secp256k1": SECP256K1, + + "sect163k1": SECT163K1, + "sect233k1": SECT233K1, + "sect283k1": SECT283K1, + "sect409k1": SECT409K1, + "sect571k1": SECT571K1, + + "sect163r2": SECT163R2, + "sect233r1": SECT233R1, + "sect283r1": SECT283R1, + "sect409r1": SECT409R1, + "sect571r1": SECT571R1, +} + + +@utils.register_interface(EllipticCurveSignatureAlgorithm) +class ECDSA(object): + def __init__(self, algorithm): + self._algorithm = algorithm + + algorithm = utils.read_only_property("_algorithm") + + +def generate_private_key(curve, backend): + return backend.generate_elliptic_curve_private_key(curve) + + +class EllipticCurvePublicNumbers(object): + def __init__(self, x, y, curve): + if ( + not isinstance(x, six.integer_types) or + not isinstance(y, six.integer_types) + ): + raise TypeError("x and y must be integers.") + + if not isinstance(curve, EllipticCurve): + raise TypeError("curve must provide the EllipticCurve interface.") + + self._y = y + self._x = x + self._curve = curve + + def public_key(self, backend): + return backend.load_elliptic_curve_public_numbers(self) + + def encode_point(self): + # key_size is in bits. Convert to bytes and round up + byte_length = (self.curve.key_size + 7) // 8 + return ( + b'\x04' + utils.int_to_bytes(self.x, byte_length) + + utils.int_to_bytes(self.y, byte_length) + ) + + @classmethod + def from_encoded_point(cls, curve, data): + if not isinstance(curve, EllipticCurve): + raise TypeError("curve must be an EllipticCurve instance") + + if data.startswith(b'\x04'): + # key_size is in bits. Convert to bytes and round up + byte_length = (curve.key_size + 7) // 8 + if len(data) == 2 * byte_length + 1: + x = utils.int_from_bytes(data[1:byte_length + 1], 'big') + y = utils.int_from_bytes(data[byte_length + 1:], 'big') + return cls(x, y, curve) + else: + raise ValueError('Invalid elliptic curve point data length') + else: + raise ValueError('Unsupported elliptic curve point type') + + curve = utils.read_only_property("_curve") + x = utils.read_only_property("_x") + y = utils.read_only_property("_y") + + def __eq__(self, other): + if not isinstance(other, EllipticCurvePublicNumbers): + return NotImplemented + + return ( + self.x == other.x and + self.y == other.y and + self.curve.name == other.curve.name and + self.curve.key_size == other.curve.key_size + ) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.x, self.y, self.curve.name, self.curve.key_size)) + + def __repr__(self): + return ( + "".format(self) + ) + + +class EllipticCurvePrivateNumbers(object): + def __init__(self, private_value, public_numbers): + if not isinstance(private_value, six.integer_types): + raise TypeError("private_value must be an integer.") + + if not isinstance(public_numbers, EllipticCurvePublicNumbers): + raise TypeError( + "public_numbers must be an EllipticCurvePublicNumbers " + "instance." + ) + + self._private_value = private_value + self._public_numbers = public_numbers + + def private_key(self, backend): + return backend.load_elliptic_curve_private_numbers(self) + + private_value = utils.read_only_property("_private_value") + public_numbers = utils.read_only_property("_public_numbers") + + def __eq__(self, other): + if not isinstance(other, EllipticCurvePrivateNumbers): + return NotImplemented + + return ( + self.private_value == other.private_value and + self.public_numbers == other.public_numbers + ) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.private_value, self.public_numbers)) + + +class ECDH(object): + pass diff --git a/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py b/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py new file mode 100644 index 0000000..c796d8e --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py @@ -0,0 +1,67 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography import utils +from cryptography.hazmat.primitives import hashes + + +@six.add_metaclass(abc.ABCMeta) +class AsymmetricPadding(object): + @abc.abstractproperty + def name(self): + """ + A string naming this padding (e.g. "PSS", "PKCS1"). + """ + + +@utils.register_interface(AsymmetricPadding) +class PKCS1v15(object): + name = "EMSA-PKCS1-v1_5" + + +@utils.register_interface(AsymmetricPadding) +class PSS(object): + MAX_LENGTH = object() + name = "EMSA-PSS" + + def __init__(self, mgf, salt_length): + self._mgf = mgf + + if (not isinstance(salt_length, six.integer_types) and + salt_length is not self.MAX_LENGTH): + raise TypeError("salt_length must be an integer.") + + if salt_length is not self.MAX_LENGTH and salt_length < 0: + raise ValueError("salt_length must be zero or greater.") + + self._salt_length = salt_length + + +@utils.register_interface(AsymmetricPadding) +class OAEP(object): + name = "EME-OAEP" + + def __init__(self, mgf, algorithm, label): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Expected instance of hashes.HashAlgorithm.") + + self._mgf = mgf + self._algorithm = algorithm + self._label = label + + +class MGF1(object): + MAX_LENGTH = object() + + def __init__(self, algorithm): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Expected instance of hashes.HashAlgorithm.") + + self._algorithm = algorithm diff --git a/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py b/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py new file mode 100644 index 0000000..2cb8951 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py @@ -0,0 +1,364 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc +from fractions import gcd + +import six + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.backends.interfaces import RSABackend + + +@six.add_metaclass(abc.ABCMeta) +class RSAPrivateKey(object): + @abc.abstractmethod + def signer(self, padding, algorithm): + """ + Returns an AsymmetricSignatureContext used for signing data. + """ + + @abc.abstractmethod + def decrypt(self, ciphertext, padding): + """ + Decrypts the provided ciphertext. + """ + + @abc.abstractproperty + def key_size(self): + """ + The bit length of the public modulus. + """ + + @abc.abstractmethod + def public_key(self): + """ + The RSAPublicKey associated with this private key. + """ + + @abc.abstractmethod + def sign(self, data, padding, algorithm): + """ + Signs the data. + """ + + +@six.add_metaclass(abc.ABCMeta) +class RSAPrivateKeyWithSerialization(RSAPrivateKey): + @abc.abstractmethod + def private_numbers(self): + """ + Returns an RSAPrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes(self, encoding, format, encryption_algorithm): + """ + Returns the key serialized as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class RSAPublicKey(object): + @abc.abstractmethod + def verifier(self, signature, padding, algorithm): + """ + Returns an AsymmetricVerificationContext used for verifying signatures. + """ + + @abc.abstractmethod + def encrypt(self, plaintext, padding): + """ + Encrypts the given plaintext. + """ + + @abc.abstractproperty + def key_size(self): + """ + The bit length of the public modulus. + """ + + @abc.abstractmethod + def public_numbers(self): + """ + Returns an RSAPublicNumbers + """ + + @abc.abstractmethod + def public_bytes(self, encoding, format): + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def verify(self, signature, data, padding, algorithm): + """ + Verifies the signature of the data. + """ + + +RSAPublicKeyWithSerialization = RSAPublicKey + + +def generate_private_key(public_exponent, key_size, backend): + if not isinstance(backend, RSABackend): + raise UnsupportedAlgorithm( + "Backend object does not implement RSABackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + _verify_rsa_parameters(public_exponent, key_size) + return backend.generate_rsa_private_key(public_exponent, key_size) + + +def _verify_rsa_parameters(public_exponent, key_size): + if public_exponent < 3: + raise ValueError("public_exponent must be >= 3.") + + if public_exponent & 1 == 0: + raise ValueError("public_exponent must be odd.") + + if key_size < 512: + raise ValueError("key_size must be at least 512-bits.") + + +def _check_private_key_components(p, q, private_exponent, dmp1, dmq1, iqmp, + public_exponent, modulus): + if modulus < 3: + raise ValueError("modulus must be >= 3.") + + if p >= modulus: + raise ValueError("p must be < modulus.") + + if q >= modulus: + raise ValueError("q must be < modulus.") + + if dmp1 >= modulus: + raise ValueError("dmp1 must be < modulus.") + + if dmq1 >= modulus: + raise ValueError("dmq1 must be < modulus.") + + if iqmp >= modulus: + raise ValueError("iqmp must be < modulus.") + + if private_exponent >= modulus: + raise ValueError("private_exponent must be < modulus.") + + if public_exponent < 3 or public_exponent >= modulus: + raise ValueError("public_exponent must be >= 3 and < modulus.") + + if public_exponent & 1 == 0: + raise ValueError("public_exponent must be odd.") + + if dmp1 & 1 == 0: + raise ValueError("dmp1 must be odd.") + + if dmq1 & 1 == 0: + raise ValueError("dmq1 must be odd.") + + if p * q != modulus: + raise ValueError("p*q must equal modulus.") + + +def _check_public_key_components(e, n): + if n < 3: + raise ValueError("n must be >= 3.") + + if e < 3 or e >= n: + raise ValueError("e must be >= 3 and < n.") + + if e & 1 == 0: + raise ValueError("e must be odd.") + + +def _modinv(e, m): + """ + Modular Multiplicative Inverse. Returns x such that: (x*e) mod m == 1 + """ + x1, y1, x2, y2 = 1, 0, 0, 1 + a, b = e, m + while b > 0: + q, r = divmod(a, b) + xn, yn = x1 - q * x2, y1 - q * y2 + a, b, x1, y1, x2, y2 = b, r, x2, y2, xn, yn + return x1 % m + + +def rsa_crt_iqmp(p, q): + """ + Compute the CRT (q ** -1) % p value from RSA primes p and q. + """ + return _modinv(q, p) + + +def rsa_crt_dmp1(private_exponent, p): + """ + Compute the CRT private_exponent % (p - 1) value from the RSA + private_exponent and p. + """ + return private_exponent % (p - 1) + + +def rsa_crt_dmq1(private_exponent, q): + """ + Compute the CRT private_exponent % (q - 1) value from the RSA + private_exponent and q. + """ + return private_exponent % (q - 1) + + +# Controls the number of iterations rsa_recover_prime_factors will perform +# to obtain the prime factors. Each iteration increments by 2 so the actual +# maximum attempts is half this number. +_MAX_RECOVERY_ATTEMPTS = 1000 + + +def rsa_recover_prime_factors(n, e, d): + """ + Compute factors p and q from the private exponent d. We assume that n has + no more than two factors. This function is adapted from code in PyCrypto. + """ + # See 8.2.2(i) in Handbook of Applied Cryptography. + ktot = d * e - 1 + # The quantity d*e-1 is a multiple of phi(n), even, + # and can be represented as t*2^s. + t = ktot + while t % 2 == 0: + t = t // 2 + # Cycle through all multiplicative inverses in Zn. + # The algorithm is non-deterministic, but there is a 50% chance + # any candidate a leads to successful factoring. + # See "Digitalized Signatures and Public Key Functions as Intractable + # as Factorization", M. Rabin, 1979 + spotted = False + a = 2 + while not spotted and a < _MAX_RECOVERY_ATTEMPTS: + k = t + # Cycle through all values a^{t*2^i}=a^k + while k < ktot: + cand = pow(a, k, n) + # Check if a^k is a non-trivial root of unity (mod n) + if cand != 1 and cand != (n - 1) and pow(cand, 2, n) == 1: + # We have found a number such that (cand-1)(cand+1)=0 (mod n). + # Either of the terms divides n. + p = gcd(cand + 1, n) + spotted = True + break + k *= 2 + # This value was not any good... let's try another! + a += 2 + if not spotted: + raise ValueError("Unable to compute factors p and q from exponent d.") + # Found ! + q, r = divmod(n, p) + assert r == 0 + + return (p, q) + + +class RSAPrivateNumbers(object): + def __init__(self, p, q, d, dmp1, dmq1, iqmp, + public_numbers): + if ( + not isinstance(p, six.integer_types) or + not isinstance(q, six.integer_types) or + not isinstance(d, six.integer_types) or + not isinstance(dmp1, six.integer_types) or + not isinstance(dmq1, six.integer_types) or + not isinstance(iqmp, six.integer_types) + ): + raise TypeError( + "RSAPrivateNumbers p, q, d, dmp1, dmq1, iqmp arguments must" + " all be an integers." + ) + + if not isinstance(public_numbers, RSAPublicNumbers): + raise TypeError( + "RSAPrivateNumbers public_numbers must be an RSAPublicNumbers" + " instance." + ) + + self._p = p + self._q = q + self._d = d + self._dmp1 = dmp1 + self._dmq1 = dmq1 + self._iqmp = iqmp + self._public_numbers = public_numbers + + p = utils.read_only_property("_p") + q = utils.read_only_property("_q") + d = utils.read_only_property("_d") + dmp1 = utils.read_only_property("_dmp1") + dmq1 = utils.read_only_property("_dmq1") + iqmp = utils.read_only_property("_iqmp") + public_numbers = utils.read_only_property("_public_numbers") + + def private_key(self, backend): + return backend.load_rsa_private_numbers(self) + + def __eq__(self, other): + if not isinstance(other, RSAPrivateNumbers): + return NotImplemented + + return ( + self.p == other.p and + self.q == other.q and + self.d == other.d and + self.dmp1 == other.dmp1 and + self.dmq1 == other.dmq1 and + self.iqmp == other.iqmp and + self.public_numbers == other.public_numbers + ) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(( + self.p, + self.q, + self.d, + self.dmp1, + self.dmq1, + self.iqmp, + self.public_numbers, + )) + + +class RSAPublicNumbers(object): + def __init__(self, e, n): + if ( + not isinstance(e, six.integer_types) or + not isinstance(n, six.integer_types) + ): + raise TypeError("RSAPublicNumbers arguments must be integers.") + + self._e = e + self._n = n + + e = utils.read_only_property("_e") + n = utils.read_only_property("_n") + + def public_key(self, backend): + return backend.load_rsa_public_numbers(self) + + def __repr__(self): + return "".format(self) + + def __eq__(self, other): + if not isinstance(other, RSAPublicNumbers): + return NotImplemented + + return self.e == other.e and self.n == other.n + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.e, self.n)) diff --git a/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py b/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py new file mode 100644 index 0000000..5b27654 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py @@ -0,0 +1,71 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import warnings + +from pyasn1.codec.der import decoder, encoder +from pyasn1.error import PyAsn1Error +from pyasn1.type import namedtype, univ + +import six + +from cryptography import utils + + +class _DSSSigValue(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('r', univ.Integer()), + namedtype.NamedType('s', univ.Integer()) + ) + + +def decode_rfc6979_signature(signature): + warnings.warn( + "decode_rfc6979_signature is deprecated and will " + "be removed in a future version, use decode_dss_signature instead.", + utils.DeprecatedIn10, + stacklevel=2 + ) + return decode_dss_signature(signature) + + +def decode_dss_signature(signature): + try: + data, remaining = decoder.decode(signature, asn1Spec=_DSSSigValue()) + except PyAsn1Error: + raise ValueError("Invalid signature data. Unable to decode ASN.1") + + if remaining: + raise ValueError( + "The signature contains bytes after the end of the ASN.1 sequence." + ) + + r = int(data.getComponentByName('r')) + s = int(data.getComponentByName('s')) + return (r, s) + + +def encode_rfc6979_signature(r, s): + warnings.warn( + "encode_rfc6979_signature is deprecated and will " + "be removed in a future version, use encode_dss_signature instead.", + utils.DeprecatedIn10, + stacklevel=2 + ) + return encode_dss_signature(r, s) + + +def encode_dss_signature(r, s): + if ( + not isinstance(r, six.integer_types) or + not isinstance(s, six.integer_types) + ): + raise ValueError("Both r and s must be integers") + + sig = _DSSSigValue() + sig.setComponentByName('r', r) + sig.setComponentByName('s', s) + return encoder.encode(sig) diff --git a/lib/python3.4/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py b/lib/python3.4/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py new file mode 100644 index 0000000..b5dd0ed --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py @@ -0,0 +1,20 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography.hazmat.primitives.ciphers.base import ( + AEADCipherContext, AEADEncryptionContext, BlockCipherAlgorithm, Cipher, + CipherAlgorithm, CipherContext +) + + +__all__ = [ + "Cipher", + "CipherAlgorithm", + "BlockCipherAlgorithm", + "CipherContext", + "AEADCipherContext", + "AEADEncryptionContext", +] diff --git a/lib/python3.4/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py b/lib/python3.4/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py new file mode 100644 index 0000000..c193f79 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py @@ -0,0 +1,140 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.hazmat.primitives.ciphers import ( + BlockCipherAlgorithm, CipherAlgorithm +) + + +def _verify_key_size(algorithm, key): + # Verify that the key size matches the expected key size + if len(key) * 8 not in algorithm.key_sizes: + raise ValueError("Invalid key size ({0}) for {1}.".format( + len(key) * 8, algorithm.name + )) + return key + + +@utils.register_interface(BlockCipherAlgorithm) +@utils.register_interface(CipherAlgorithm) +class AES(object): + name = "AES" + block_size = 128 + key_sizes = frozenset([128, 192, 256]) + + def __init__(self, key): + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 + + +@utils.register_interface(BlockCipherAlgorithm) +@utils.register_interface(CipherAlgorithm) +class Camellia(object): + name = "camellia" + block_size = 128 + key_sizes = frozenset([128, 192, 256]) + + def __init__(self, key): + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 + + +@utils.register_interface(BlockCipherAlgorithm) +@utils.register_interface(CipherAlgorithm) +class TripleDES(object): + name = "3DES" + block_size = 64 + key_sizes = frozenset([64, 128, 192]) + + def __init__(self, key): + if len(key) == 8: + key += key + key + elif len(key) == 16: + key += key[:8] + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 + + +@utils.register_interface(BlockCipherAlgorithm) +@utils.register_interface(CipherAlgorithm) +class Blowfish(object): + name = "Blowfish" + block_size = 64 + key_sizes = frozenset(range(32, 449, 8)) + + def __init__(self, key): + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 + + +@utils.register_interface(BlockCipherAlgorithm) +@utils.register_interface(CipherAlgorithm) +class CAST5(object): + name = "CAST5" + block_size = 64 + key_sizes = frozenset(range(40, 129, 8)) + + def __init__(self, key): + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 + + +@utils.register_interface(CipherAlgorithm) +class ARC4(object): + name = "RC4" + key_sizes = frozenset([40, 56, 64, 80, 128, 160, 192, 256]) + + def __init__(self, key): + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 + + +@utils.register_interface(CipherAlgorithm) +class IDEA(object): + name = "IDEA" + block_size = 64 + key_sizes = frozenset([128]) + + def __init__(self, key): + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 + + +@utils.register_interface(BlockCipherAlgorithm) +@utils.register_interface(CipherAlgorithm) +class SEED(object): + name = "SEED" + block_size = 128 + key_sizes = frozenset([128]) + + def __init__(self, key): + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 diff --git a/lib/python3.4/site-packages/cryptography/hazmat/primitives/ciphers/base.py b/lib/python3.4/site-packages/cryptography/hazmat/primitives/ciphers/base.py new file mode 100644 index 0000000..496975a --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/primitives/ciphers/base.py @@ -0,0 +1,203 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, AlreadyUpdated, NotYetFinalized, UnsupportedAlgorithm, + _Reasons +) +from cryptography.hazmat.backends.interfaces import CipherBackend +from cryptography.hazmat.primitives.ciphers import modes + + +@six.add_metaclass(abc.ABCMeta) +class CipherAlgorithm(object): + @abc.abstractproperty + def name(self): + """ + A string naming this mode (e.g. "AES", "Camellia"). + """ + + @abc.abstractproperty + def key_size(self): + """ + The size of the key being used as an integer in bits (e.g. 128, 256). + """ + + +@six.add_metaclass(abc.ABCMeta) +class BlockCipherAlgorithm(object): + @abc.abstractproperty + def block_size(self): + """ + The size of a block as an integer in bits (e.g. 64, 128). + """ + + +@six.add_metaclass(abc.ABCMeta) +class CipherContext(object): + @abc.abstractmethod + def update(self, data): + """ + Processes the provided bytes through the cipher and returns the results + as bytes. + """ + + @abc.abstractmethod + def finalize(self): + """ + Returns the results of processing the final block as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class AEADCipherContext(object): + @abc.abstractmethod + def authenticate_additional_data(self, data): + """ + Authenticates the provided bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class AEADEncryptionContext(object): + @abc.abstractproperty + def tag(self): + """ + Returns tag bytes. This is only available after encryption is + finalized. + """ + + +class Cipher(object): + def __init__(self, algorithm, mode, backend): + if not isinstance(backend, CipherBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement CipherBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + if not isinstance(algorithm, CipherAlgorithm): + raise TypeError("Expected interface of CipherAlgorithm.") + + if mode is not None: + mode.validate_for_algorithm(algorithm) + + self.algorithm = algorithm + self.mode = mode + self._backend = backend + + def encryptor(self): + if isinstance(self.mode, modes.ModeWithAuthenticationTag): + if self.mode.tag is not None: + raise ValueError( + "Authentication tag must be None when encrypting." + ) + ctx = self._backend.create_symmetric_encryption_ctx( + self.algorithm, self.mode + ) + return self._wrap_ctx(ctx, encrypt=True) + + def decryptor(self): + if isinstance(self.mode, modes.ModeWithAuthenticationTag): + if self.mode.tag is None: + raise ValueError( + "Authentication tag must be provided when decrypting." + ) + ctx = self._backend.create_symmetric_decryption_ctx( + self.algorithm, self.mode + ) + return self._wrap_ctx(ctx, encrypt=False) + + def _wrap_ctx(self, ctx, encrypt): + if isinstance(self.mode, modes.ModeWithAuthenticationTag): + if encrypt: + return _AEADEncryptionContext(ctx) + else: + return _AEADCipherContext(ctx) + else: + return _CipherContext(ctx) + + +@utils.register_interface(CipherContext) +class _CipherContext(object): + def __init__(self, ctx): + self._ctx = ctx + + def update(self, data): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + return self._ctx.update(data) + + def finalize(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + data = self._ctx.finalize() + self._ctx = None + return data + + +@utils.register_interface(AEADCipherContext) +@utils.register_interface(CipherContext) +class _AEADCipherContext(object): + def __init__(self, ctx): + self._ctx = ctx + self._bytes_processed = 0 + self._aad_bytes_processed = 0 + self._tag = None + self._updated = False + + def update(self, data): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + self._updated = True + self._bytes_processed += len(data) + if self._bytes_processed > self._ctx._mode._MAX_ENCRYPTED_BYTES: + raise ValueError( + "{0} has a maximum encrypted byte limit of {1}".format( + self._ctx._mode.name, self._ctx._mode._MAX_ENCRYPTED_BYTES + ) + ) + + return self._ctx.update(data) + + def finalize(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + data = self._ctx.finalize() + self._tag = self._ctx.tag + self._ctx = None + return data + + def authenticate_additional_data(self, data): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + if self._updated: + raise AlreadyUpdated("Update has been called on this context.") + + self._aad_bytes_processed += len(data) + if self._aad_bytes_processed > self._ctx._mode._MAX_AAD_BYTES: + raise ValueError( + "{0} has a maximum AAD byte limit of {1}".format( + self._ctx._mode.name, self._ctx._mode._MAX_AAD_BYTES + ) + ) + + self._ctx.authenticate_additional_data(data) + + +@utils.register_interface(AEADEncryptionContext) +class _AEADEncryptionContext(_AEADCipherContext): + @property + def tag(self): + if self._ctx is not None: + raise NotYetFinalized("You must finalize encryption before " + "getting the tag.") + return self._tag diff --git a/lib/python3.4/site-packages/cryptography/hazmat/primitives/ciphers/modes.py b/lib/python3.4/site-packages/cryptography/hazmat/primitives/ciphers/modes.py new file mode 100644 index 0000000..802e544 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/primitives/ciphers/modes.py @@ -0,0 +1,185 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography import utils + + +@six.add_metaclass(abc.ABCMeta) +class Mode(object): + @abc.abstractproperty + def name(self): + """ + A string naming this mode (e.g. "ECB", "CBC"). + """ + + @abc.abstractmethod + def validate_for_algorithm(self, algorithm): + """ + Checks that all the necessary invariants of this (mode, algorithm) + combination are met. + """ + + +@six.add_metaclass(abc.ABCMeta) +class ModeWithInitializationVector(object): + @abc.abstractproperty + def initialization_vector(self): + """ + The value of the initialization vector for this mode as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class ModeWithNonce(object): + @abc.abstractproperty + def nonce(self): + """ + The value of the nonce for this mode as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class ModeWithAuthenticationTag(object): + @abc.abstractproperty + def tag(self): + """ + The value of the tag supplied to the constructor of this mode. + """ + + +def _check_iv_length(self, algorithm): + if len(self.initialization_vector) * 8 != algorithm.block_size: + raise ValueError("Invalid IV size ({0}) for {1}.".format( + len(self.initialization_vector), self.name + )) + + +@utils.register_interface(Mode) +@utils.register_interface(ModeWithInitializationVector) +class CBC(object): + name = "CBC" + + def __init__(self, initialization_vector): + if not isinstance(initialization_vector, bytes): + raise TypeError("initialization_vector must be bytes") + + self._initialization_vector = initialization_vector + + initialization_vector = utils.read_only_property("_initialization_vector") + validate_for_algorithm = _check_iv_length + + +@utils.register_interface(Mode) +class ECB(object): + name = "ECB" + + def validate_for_algorithm(self, algorithm): + pass + + +@utils.register_interface(Mode) +@utils.register_interface(ModeWithInitializationVector) +class OFB(object): + name = "OFB" + + def __init__(self, initialization_vector): + if not isinstance(initialization_vector, bytes): + raise TypeError("initialization_vector must be bytes") + + self._initialization_vector = initialization_vector + + initialization_vector = utils.read_only_property("_initialization_vector") + validate_for_algorithm = _check_iv_length + + +@utils.register_interface(Mode) +@utils.register_interface(ModeWithInitializationVector) +class CFB(object): + name = "CFB" + + def __init__(self, initialization_vector): + if not isinstance(initialization_vector, bytes): + raise TypeError("initialization_vector must be bytes") + + self._initialization_vector = initialization_vector + + initialization_vector = utils.read_only_property("_initialization_vector") + validate_for_algorithm = _check_iv_length + + +@utils.register_interface(Mode) +@utils.register_interface(ModeWithInitializationVector) +class CFB8(object): + name = "CFB8" + + def __init__(self, initialization_vector): + if not isinstance(initialization_vector, bytes): + raise TypeError("initialization_vector must be bytes") + + self._initialization_vector = initialization_vector + + initialization_vector = utils.read_only_property("_initialization_vector") + validate_for_algorithm = _check_iv_length + + +@utils.register_interface(Mode) +@utils.register_interface(ModeWithNonce) +class CTR(object): + name = "CTR" + + def __init__(self, nonce): + if not isinstance(nonce, bytes): + raise TypeError("nonce must be bytes") + + self._nonce = nonce + + nonce = utils.read_only_property("_nonce") + + def validate_for_algorithm(self, algorithm): + if len(self.nonce) * 8 != algorithm.block_size: + raise ValueError("Invalid nonce size ({0}) for {1}.".format( + len(self.nonce), self.name + )) + + +@utils.register_interface(Mode) +@utils.register_interface(ModeWithInitializationVector) +@utils.register_interface(ModeWithAuthenticationTag) +class GCM(object): + name = "GCM" + _MAX_ENCRYPTED_BYTES = (2 ** 39 - 256) // 8 + _MAX_AAD_BYTES = (2 ** 64) // 8 + + def __init__(self, initialization_vector, tag=None, min_tag_length=16): + # len(initialization_vector) must in [1, 2 ** 64), but it's impossible + # to actually construct a bytes object that large, so we don't check + # for it + if min_tag_length < 4: + raise ValueError("min_tag_length must be >= 4") + if tag is not None and len(tag) < min_tag_length: + raise ValueError( + "Authentication tag must be {0} bytes or longer.".format( + min_tag_length) + ) + + if not isinstance(initialization_vector, bytes): + raise TypeError("initialization_vector must be bytes") + + if tag is not None and not isinstance(tag, bytes): + raise TypeError("tag must be bytes or None") + + self._initialization_vector = initialization_vector + self._tag = tag + + tag = utils.read_only_property("_tag") + initialization_vector = utils.read_only_property("_initialization_vector") + + def validate_for_algorithm(self, algorithm): + pass diff --git a/lib/python3.4/site-packages/cryptography/hazmat/primitives/cmac.py b/lib/python3.4/site-packages/cryptography/hazmat/primitives/cmac.py new file mode 100644 index 0000000..c2038a3 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/primitives/cmac.py @@ -0,0 +1,66 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import CMACBackend +from cryptography.hazmat.primitives import ciphers, interfaces + + +@utils.register_interface(interfaces.MACContext) +class CMAC(object): + def __init__(self, algorithm, backend, ctx=None): + if not isinstance(backend, CMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement CMACBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + if not isinstance(algorithm, ciphers.BlockCipherAlgorithm): + raise TypeError( + "Expected instance of BlockCipherAlgorithm." + ) + self._algorithm = algorithm + + self._backend = backend + if ctx is None: + self._ctx = self._backend.create_cmac_ctx(self._algorithm) + else: + self._ctx = ctx + + def update(self, data): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + if not isinstance(data, bytes): + raise TypeError("data must be bytes.") + self._ctx.update(data) + + def finalize(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + digest = self._ctx.finalize() + self._ctx = None + return digest + + def verify(self, signature): + if not isinstance(signature, bytes): + raise TypeError("signature must be bytes.") + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + + ctx, self._ctx = self._ctx, None + ctx.verify(signature) + + def copy(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + return CMAC( + self._algorithm, + backend=self._backend, + ctx=self._ctx.copy() + ) diff --git a/lib/python3.4/site-packages/cryptography/hazmat/primitives/constant_time.py b/lib/python3.4/site-packages/cryptography/hazmat/primitives/constant_time.py new file mode 100644 index 0000000..5a682ca --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/primitives/constant_time.py @@ -0,0 +1,26 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import hmac + +from cryptography.hazmat.bindings._constant_time import lib + + +if hasattr(hmac, "compare_digest"): + def bytes_eq(a, b): + if not isinstance(a, bytes) or not isinstance(b, bytes): + raise TypeError("a and b must be bytes.") + + return hmac.compare_digest(a, b) + +else: + def bytes_eq(a, b): + if not isinstance(a, bytes) or not isinstance(b, bytes): + raise TypeError("a and b must be bytes.") + + return lib.Cryptography_constant_time_bytes_eq( + a, len(a), b, len(b) + ) == 1 diff --git a/lib/python3.4/site-packages/cryptography/hazmat/primitives/hashes.py b/lib/python3.4/site-packages/cryptography/hazmat/primitives/hashes.py new file mode 100644 index 0000000..6bc8500 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/primitives/hashes.py @@ -0,0 +1,163 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import HashBackend + + +@six.add_metaclass(abc.ABCMeta) +class HashAlgorithm(object): + @abc.abstractproperty + def name(self): + """ + A string naming this algorithm (e.g. "sha256", "md5"). + """ + + @abc.abstractproperty + def digest_size(self): + """ + The size of the resulting digest in bytes. + """ + + @abc.abstractproperty + def block_size(self): + """ + The internal block size of the hash algorithm in bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class HashContext(object): + @abc.abstractproperty + def algorithm(self): + """ + A HashAlgorithm that will be used by this context. + """ + + @abc.abstractmethod + def update(self, data): + """ + Processes the provided bytes through the hash. + """ + + @abc.abstractmethod + def finalize(self): + """ + Finalizes the hash context and returns the hash digest as bytes. + """ + + @abc.abstractmethod + def copy(self): + """ + Return a HashContext that is a copy of the current context. + """ + + +@utils.register_interface(HashContext) +class Hash(object): + def __init__(self, algorithm, backend, ctx=None): + if not isinstance(backend, HashBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HashBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + if not isinstance(algorithm, HashAlgorithm): + raise TypeError("Expected instance of hashes.HashAlgorithm.") + self._algorithm = algorithm + + self._backend = backend + + if ctx is None: + self._ctx = self._backend.create_hash_ctx(self.algorithm) + else: + self._ctx = ctx + + algorithm = utils.read_only_property("_algorithm") + + def update(self, data): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + if not isinstance(data, bytes): + raise TypeError("data must be bytes.") + self._ctx.update(data) + + def copy(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + return Hash( + self.algorithm, backend=self._backend, ctx=self._ctx.copy() + ) + + def finalize(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + digest = self._ctx.finalize() + self._ctx = None + return digest + + +@utils.register_interface(HashAlgorithm) +class SHA1(object): + name = "sha1" + digest_size = 20 + block_size = 64 + + +@utils.register_interface(HashAlgorithm) +class SHA224(object): + name = "sha224" + digest_size = 28 + block_size = 64 + + +@utils.register_interface(HashAlgorithm) +class SHA256(object): + name = "sha256" + digest_size = 32 + block_size = 64 + + +@utils.register_interface(HashAlgorithm) +class SHA384(object): + name = "sha384" + digest_size = 48 + block_size = 128 + + +@utils.register_interface(HashAlgorithm) +class SHA512(object): + name = "sha512" + digest_size = 64 + block_size = 128 + + +@utils.register_interface(HashAlgorithm) +class RIPEMD160(object): + name = "ripemd160" + digest_size = 20 + block_size = 64 + + +@utils.register_interface(HashAlgorithm) +class Whirlpool(object): + name = "whirlpool" + digest_size = 64 + block_size = 64 + + +@utils.register_interface(HashAlgorithm) +class MD5(object): + name = "md5" + digest_size = 16 + block_size = 64 diff --git a/lib/python3.4/site-packages/cryptography/hazmat/primitives/hmac.py b/lib/python3.4/site-packages/cryptography/hazmat/primitives/hmac.py new file mode 100644 index 0000000..15b9ee6 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/primitives/hmac.py @@ -0,0 +1,69 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import HMACBackend +from cryptography.hazmat.primitives import hashes, interfaces + + +@utils.register_interface(interfaces.MACContext) +@utils.register_interface(hashes.HashContext) +class HMAC(object): + def __init__(self, key, algorithm, backend, ctx=None): + if not isinstance(backend, HMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HMACBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Expected instance of hashes.HashAlgorithm.") + self._algorithm = algorithm + + self._backend = backend + self._key = key + if ctx is None: + self._ctx = self._backend.create_hmac_ctx(key, self.algorithm) + else: + self._ctx = ctx + + algorithm = utils.read_only_property("_algorithm") + + def update(self, data): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + if not isinstance(data, bytes): + raise TypeError("data must be bytes.") + self._ctx.update(data) + + def copy(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + return HMAC( + self._key, + self.algorithm, + backend=self._backend, + ctx=self._ctx.copy() + ) + + def finalize(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + digest = self._ctx.finalize() + self._ctx = None + return digest + + def verify(self, signature): + if not isinstance(signature, bytes): + raise TypeError("signature must be bytes.") + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + + ctx, self._ctx = self._ctx, None + ctx.verify(signature) diff --git a/lib/python3.4/site-packages/cryptography/hazmat/primitives/interfaces/__init__.py b/lib/python3.4/site-packages/cryptography/hazmat/primitives/interfaces/__init__.py new file mode 100644 index 0000000..4c95190 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/primitives/interfaces/__init__.py @@ -0,0 +1,37 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + + +@six.add_metaclass(abc.ABCMeta) +class MACContext(object): + @abc.abstractmethod + def update(self, data): + """ + Processes the provided bytes. + """ + + @abc.abstractmethod + def finalize(self): + """ + Returns the message authentication code as bytes. + """ + + @abc.abstractmethod + def copy(self): + """ + Return a MACContext that is a copy of the current context. + """ + + @abc.abstractmethod + def verify(self, signature): + """ + Checks if the generated message authentication code matches the + signature. + """ diff --git a/lib/python3.4/site-packages/cryptography/hazmat/primitives/kdf/__init__.py b/lib/python3.4/site-packages/cryptography/hazmat/primitives/kdf/__init__.py new file mode 100644 index 0000000..2d0724e --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/primitives/kdf/__init__.py @@ -0,0 +1,26 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + + +@six.add_metaclass(abc.ABCMeta) +class KeyDerivationFunction(object): + @abc.abstractmethod + def derive(self, key_material): + """ + Deterministically generates and returns a new key based on the existing + key material. + """ + + @abc.abstractmethod + def verify(self, key_material, expected_key): + """ + Checks whether the key generated by the key material matches the + expected derived key. Raises an exception if they do not match. + """ diff --git a/lib/python3.4/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py b/lib/python3.4/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py new file mode 100644 index 0000000..c6399e4 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py @@ -0,0 +1,125 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import struct + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import HMACBackend +from cryptography.hazmat.backends.interfaces import HashBackend +from cryptography.hazmat.primitives import constant_time, hashes, hmac +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +def _int_to_u32be(n): + return struct.pack('>I', n) + + +def _common_args_checks(algorithm, length, otherinfo): + max_length = algorithm.digest_size * (2 ** 32 - 1) + if length > max_length: + raise ValueError( + "Can not derive keys larger than {0} bits.".format( + max_length + )) + if not (otherinfo is None or isinstance(otherinfo, bytes)): + raise TypeError("otherinfo must be bytes.") + + +def _concatkdf_derive(key_material, length, auxfn, otherinfo): + if not isinstance(key_material, bytes): + raise TypeError("key_material must be bytes.") + + output = [b""] + outlen = 0 + counter = 1 + + while (length > outlen): + h = auxfn() + h.update(_int_to_u32be(counter)) + h.update(key_material) + h.update(otherinfo) + output.append(h.finalize()) + outlen += len(output[-1]) + counter += 1 + + return b"".join(output)[:length] + + +@utils.register_interface(KeyDerivationFunction) +class ConcatKDFHash(object): + def __init__(self, algorithm, length, otherinfo, backend): + + _common_args_checks(algorithm, length, otherinfo) + self._algorithm = algorithm + self._length = length + self._otherinfo = otherinfo + if self._otherinfo is None: + self._otherinfo = b"" + + if not isinstance(backend, HashBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HashBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + self._backend = backend + self._used = False + + def _hash(self): + return hashes.Hash(self._algorithm, self._backend) + + def derive(self, key_material): + if self._used: + raise AlreadyFinalized + self._used = True + return _concatkdf_derive(key_material, self._length, + self._hash, self._otherinfo) + + def verify(self, key_material, expected_key): + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey + + +@utils.register_interface(KeyDerivationFunction) +class ConcatKDFHMAC(object): + def __init__(self, algorithm, length, salt, otherinfo, backend): + + _common_args_checks(algorithm, length, otherinfo) + self._algorithm = algorithm + self._length = length + self._otherinfo = otherinfo + if self._otherinfo is None: + self._otherinfo = b"" + + if not (salt is None or isinstance(salt, bytes)): + raise TypeError("salt must be bytes.") + if salt is None: + salt = b"\x00" * algorithm.block_size + self._salt = salt + + if not isinstance(backend, HMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HMACBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + self._backend = backend + self._used = False + + def _hmac(self): + return hmac.HMAC(self._salt, self._algorithm, self._backend) + + def derive(self, key_material): + if self._used: + raise AlreadyFinalized + self._used = True + return _concatkdf_derive(key_material, self._length, + self._hmac, self._otherinfo) + + def verify(self, key_material, expected_key): + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/lib/python3.4/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py b/lib/python3.4/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py new file mode 100644 index 0000000..f738bbd --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py @@ -0,0 +1,116 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import six + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import HMACBackend +from cryptography.hazmat.primitives import constant_time, hmac +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +@utils.register_interface(KeyDerivationFunction) +class HKDF(object): + def __init__(self, algorithm, length, salt, info, backend): + if not isinstance(backend, HMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HMACBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + self._algorithm = algorithm + + if not (salt is None or isinstance(salt, bytes)): + raise TypeError("salt must be bytes.") + + if salt is None: + salt = b"\x00" * (self._algorithm.digest_size // 8) + + self._salt = salt + + self._backend = backend + + self._hkdf_expand = HKDFExpand(self._algorithm, length, info, backend) + + def _extract(self, key_material): + h = hmac.HMAC(self._salt, self._algorithm, backend=self._backend) + h.update(key_material) + return h.finalize() + + def derive(self, key_material): + if not isinstance(key_material, bytes): + raise TypeError("key_material must be bytes.") + + return self._hkdf_expand.derive(self._extract(key_material)) + + def verify(self, key_material, expected_key): + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey + + +@utils.register_interface(KeyDerivationFunction) +class HKDFExpand(object): + def __init__(self, algorithm, length, info, backend): + if not isinstance(backend, HMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HMACBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + self._algorithm = algorithm + + self._backend = backend + + max_length = 255 * (algorithm.digest_size // 8) + + if length > max_length: + raise ValueError( + "Can not derive keys larger than {0} octets.".format( + max_length + )) + + self._length = length + + if not (info is None or isinstance(info, bytes)): + raise TypeError("info must be bytes.") + + if info is None: + info = b"" + + self._info = info + + self._used = False + + def _expand(self, key_material): + output = [b""] + counter = 1 + + while (self._algorithm.digest_size // 8) * len(output) < self._length: + h = hmac.HMAC(key_material, self._algorithm, backend=self._backend) + h.update(output[-1]) + h.update(self._info) + h.update(six.int2byte(counter)) + output.append(h.finalize()) + counter += 1 + + return b"".join(output)[:self._length] + + def derive(self, key_material): + if not isinstance(key_material, bytes): + raise TypeError("key_material must be bytes.") + + if self._used: + raise AlreadyFinalized + + self._used = True + return self._expand(key_material) + + def verify(self, key_material, expected_key): + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/lib/python3.4/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py b/lib/python3.4/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py new file mode 100644 index 0000000..29ac0fa --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py @@ -0,0 +1,148 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from enum import Enum + +from six.moves import range + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import HMACBackend +from cryptography.hazmat.primitives import constant_time, hashes, hmac +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +class Mode(Enum): + CounterMode = "ctr" + + +class CounterLocation(Enum): + BeforeFixed = "before_fixed" + AfterFixed = "after_fixed" + + +@utils.register_interface(KeyDerivationFunction) +class KBKDFHMAC(object): + def __init__(self, algorithm, mode, length, rlen, llen, + location, label, context, fixed, backend): + if not isinstance(backend, HMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HMACBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + if not isinstance(algorithm, hashes.HashAlgorithm): + raise UnsupportedAlgorithm( + "Algorithm supplied is not a supported hash algorithm.", + _Reasons.UNSUPPORTED_HASH + ) + + if not backend.hmac_supported(algorithm): + raise UnsupportedAlgorithm( + "Algorithm supplied is not a supported hmac algorithm.", + _Reasons.UNSUPPORTED_HASH + ) + + if not isinstance(mode, Mode): + raise TypeError("mode must be of type Mode") + + if not isinstance(location, CounterLocation): + raise TypeError("location must be of type CounterLocation") + + if (label or context) and fixed: + raise ValueError("When supplying fixed data, " + "label and context are ignored.") + + if rlen is None or not self._valid_byte_length(rlen): + raise ValueError("rlen must be between 1 and 4") + + if llen is None and fixed is None: + raise ValueError("Please specify an llen") + + if llen is not None and not isinstance(llen, int): + raise TypeError("llen must be an integer") + + if label is None: + label = b'' + + if context is None: + context = b'' + + if (not isinstance(label, bytes) or + not isinstance(context, bytes)): + raise TypeError('label and context must be of type bytes') + + self._algorithm = algorithm + self._mode = mode + self._length = length + self._rlen = rlen + self._llen = llen + self._location = location + self._label = label + self._context = context + self._backend = backend + self._used = False + self._fixed_data = fixed + + def _valid_byte_length(self, value): + if not isinstance(value, int): + raise TypeError('value must be of type int') + + value_bin = utils.int_to_bytes(1, value) + if not 1 <= len(value_bin) <= 4: + return False + return True + + def derive(self, key_material): + if self._used: + raise AlreadyFinalized + + if not isinstance(key_material, bytes): + raise TypeError('key_material must be bytes') + self._used = True + + # inverse floor division (equivalent to ceiling) + rounds = -(-self._length // self._algorithm.digest_size) + + output = [b''] + + # For counter mode, the number of iterations shall not be + # larger than 2^r-1, where r <= 32 is the binary length of the counter + # This ensures that the counter values used as an input to the + # PRF will not repeat during a particular call to the KDF function. + r_bin = utils.int_to_bytes(1, self._rlen) + if rounds > pow(2, len(r_bin) * 8) - 1: + raise ValueError('There are too many iterations.') + + for i in range(1, rounds + 1): + h = hmac.HMAC(key_material, self._algorithm, backend=self._backend) + + counter = utils.int_to_bytes(i, self._rlen) + if self._location == CounterLocation.BeforeFixed: + h.update(counter) + + h.update(self._generate_fixed_input()) + + if self._location == CounterLocation.AfterFixed: + h.update(counter) + + output.append(h.finalize()) + + return b''.join(output)[:self._length] + + def _generate_fixed_input(self): + if self._fixed_data and isinstance(self._fixed_data, bytes): + return self._fixed_data + + l = utils.int_to_bytes(self._length * 8, self._llen) + + return b"".join([self._label, b"\x00", self._context, l]) + + def verify(self, key_material, expected_key): + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/lib/python3.4/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py b/lib/python3.4/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py new file mode 100644 index 0000000..f8ce7a3 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py @@ -0,0 +1,58 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import PBKDF2HMACBackend +from cryptography.hazmat.primitives import constant_time +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +@utils.register_interface(KeyDerivationFunction) +class PBKDF2HMAC(object): + def __init__(self, algorithm, length, salt, iterations, backend): + if not isinstance(backend, PBKDF2HMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement PBKDF2HMACBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + if not backend.pbkdf2_hmac_supported(algorithm): + raise UnsupportedAlgorithm( + "{0} is not supported for PBKDF2 by this backend.".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + self._used = False + self._algorithm = algorithm + self._length = length + if not isinstance(salt, bytes): + raise TypeError("salt must be bytes.") + self._salt = salt + self._iterations = iterations + self._backend = backend + + def derive(self, key_material): + if self._used: + raise AlreadyFinalized("PBKDF2 instances can only be used once.") + self._used = True + + if not isinstance(key_material, bytes): + raise TypeError("key_material must be bytes.") + return self._backend.derive_pbkdf2_hmac( + self._algorithm, + self._length, + self._salt, + self._iterations, + key_material + ) + + def verify(self, key_material, expected_key): + derived_key = self.derive(key_material) + if not constant_time.bytes_eq(derived_key, expected_key): + raise InvalidKey("Keys do not match.") diff --git a/lib/python3.4/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py b/lib/python3.4/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py new file mode 100644 index 0000000..83789b3 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py @@ -0,0 +1,70 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import struct + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import HashBackend +from cryptography.hazmat.primitives import constant_time, hashes +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +def _int_to_u32be(n): + return struct.pack('>I', n) + + +@utils.register_interface(KeyDerivationFunction) +class X963KDF(object): + def __init__(self, algorithm, length, sharedinfo, backend): + + max_len = algorithm.digest_size * (2 ** 32 - 1) + if length > max_len: + raise ValueError( + "Can not derive keys larger than {0} bits.".format(max_len)) + if not (sharedinfo is None or isinstance(sharedinfo, bytes)): + raise TypeError("sharedinfo must be bytes.") + self._algorithm = algorithm + self._length = length + self._sharedinfo = sharedinfo + + if not isinstance(backend, HashBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HashBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + self._backend = backend + self._used = False + + def derive(self, key_material): + if self._used: + raise AlreadyFinalized + self._used = True + + if not isinstance(key_material, bytes): + raise TypeError("key_material must be bytes.") + + output = [b""] + outlen = 0 + counter = 1 + + while self._length > outlen: + h = hashes.Hash(self._algorithm, self._backend) + h.update(key_material) + h.update(_int_to_u32be(counter)) + if self._sharedinfo is not None: + h.update(self._sharedinfo) + output.append(h.finalize()) + outlen += len(output[-1]) + counter += 1 + + return b"".join(output)[:self._length] + + def verify(self, key_material, expected_key): + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/lib/python3.4/site-packages/cryptography/hazmat/primitives/keywrap.py b/lib/python3.4/site-packages/cryptography/hazmat/primitives/keywrap.py new file mode 100644 index 0000000..6e79ab6 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/primitives/keywrap.py @@ -0,0 +1,85 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import struct + +from cryptography.hazmat.primitives.ciphers import Cipher +from cryptography.hazmat.primitives.ciphers.algorithms import AES +from cryptography.hazmat.primitives.ciphers.modes import ECB +from cryptography.hazmat.primitives.constant_time import bytes_eq + + +def aes_key_wrap(wrapping_key, key_to_wrap, backend): + if len(wrapping_key) not in [16, 24, 32]: + raise ValueError("The wrapping key must be a valid AES key length") + + if len(key_to_wrap) < 16: + raise ValueError("The key to wrap must be at least 16 bytes") + + if len(key_to_wrap) % 8 != 0: + raise ValueError("The key to wrap must be a multiple of 8 bytes") + + # RFC 3394 Key Wrap - 2.2.1 (index method) + encryptor = Cipher(AES(wrapping_key), ECB(), backend).encryptor() + a = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6" + r = [key_to_wrap[i:i + 8] for i in range(0, len(key_to_wrap), 8)] + n = len(r) + for j in range(6): + for i in range(n): + # every encryption operation is a discrete 16 byte chunk (because + # AES has a 128-bit block size) and since we're using ECB it is + # safe to reuse the encryptor for the entire operation + b = encryptor.update(a + r[i]) + # pack/unpack are safe as these are always 64-bit chunks + a = struct.pack( + ">Q", struct.unpack(">Q", b[:8])[0] ^ ((n * j) + i + 1) + ) + r[i] = b[-8:] + + assert encryptor.finalize() == b"" + + return a + b"".join(r) + + +def aes_key_unwrap(wrapping_key, wrapped_key, backend): + if len(wrapped_key) < 24: + raise ValueError("Must be at least 24 bytes") + + if len(wrapped_key) % 8 != 0: + raise ValueError("The wrapped key must be a multiple of 8 bytes") + + if len(wrapping_key) not in [16, 24, 32]: + raise ValueError("The wrapping key must be a valid AES key length") + + # Implement RFC 3394 Key Unwrap - 2.2.2 (index method) + decryptor = Cipher(AES(wrapping_key), ECB(), backend).decryptor() + aiv = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6" + + r = [wrapped_key[i:i + 8] for i in range(0, len(wrapped_key), 8)] + a = r.pop(0) + n = len(r) + for j in reversed(range(6)): + for i in reversed(range(n)): + # pack/unpack are safe as these are always 64-bit chunks + atr = struct.pack( + ">Q", struct.unpack(">Q", a)[0] ^ ((n * j) + i + 1) + ) + r[i] + # every decryption operation is a discrete 16 byte chunk so + # it is safe to reuse the decryptor for the entire operation + b = decryptor.update(atr) + a = b[:8] + r[i] = b[-8:] + + assert decryptor.finalize() == b"" + + if not bytes_eq(a, aiv): + raise InvalidUnwrap() + + return b"".join(r) + + +class InvalidUnwrap(Exception): + pass diff --git a/lib/python3.4/site-packages/cryptography/hazmat/primitives/padding.py b/lib/python3.4/site-packages/cryptography/hazmat/primitives/padding.py new file mode 100644 index 0000000..77fb8f8 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/primitives/padding.py @@ -0,0 +1,202 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography import utils +from cryptography.exceptions import AlreadyFinalized +from cryptography.hazmat.bindings._padding import lib + + +@six.add_metaclass(abc.ABCMeta) +class PaddingContext(object): + @abc.abstractmethod + def update(self, data): + """ + Pads the provided bytes and returns any available data as bytes. + """ + + @abc.abstractmethod + def finalize(self): + """ + Finalize the padding, returns bytes. + """ + + +def _byte_padding_check(block_size): + if not (0 <= block_size < 256): + raise ValueError("block_size must be in range(0, 256).") + + if block_size % 8 != 0: + raise ValueError("block_size must be a multiple of 8.") + + +def _byte_padding_update(buffer_, data, block_size): + if buffer_ is None: + raise AlreadyFinalized("Context was already finalized.") + + if not isinstance(data, bytes): + raise TypeError("data must be bytes.") + + buffer_ += data + + finished_blocks = len(buffer_) // (block_size // 8) + + result = buffer_[:finished_blocks * (block_size // 8)] + buffer_ = buffer_[finished_blocks * (block_size // 8):] + + return buffer_, result + + +def _byte_padding_pad(buffer_, block_size, paddingfn): + if buffer_ is None: + raise AlreadyFinalized("Context was already finalized.") + + pad_size = block_size // 8 - len(buffer_) + return buffer_ + paddingfn(pad_size) + + +def _byte_unpadding_update(buffer_, data, block_size): + if buffer_ is None: + raise AlreadyFinalized("Context was already finalized.") + + if not isinstance(data, bytes): + raise TypeError("data must be bytes.") + + buffer_ += data + + finished_blocks = max(len(buffer_) // (block_size // 8) - 1, 0) + + result = buffer_[:finished_blocks * (block_size // 8)] + buffer_ = buffer_[finished_blocks * (block_size // 8):] + + return buffer_, result + + +def _byte_unpadding_check(buffer_, block_size, checkfn): + if buffer_ is None: + raise AlreadyFinalized("Context was already finalized.") + + if len(buffer_) != block_size // 8: + raise ValueError("Invalid padding bytes.") + + valid = checkfn(buffer_, block_size // 8) + + if not valid: + raise ValueError("Invalid padding bytes.") + + pad_size = six.indexbytes(buffer_, -1) + return buffer_[:-pad_size] + + +class PKCS7(object): + def __init__(self, block_size): + _byte_padding_check(block_size) + self.block_size = block_size + + def padder(self): + return _PKCS7PaddingContext(self.block_size) + + def unpadder(self): + return _PKCS7UnpaddingContext(self.block_size) + + +@utils.register_interface(PaddingContext) +class _PKCS7PaddingContext(object): + def __init__(self, block_size): + self.block_size = block_size + # TODO: more copies than necessary, we should use zero-buffer (#193) + self._buffer = b"" + + def update(self, data): + self._buffer, result = _byte_padding_update( + self._buffer, data, self.block_size) + return result + + def _padding(self, size): + return six.int2byte(size) * size + + def finalize(self): + result = _byte_padding_pad( + self._buffer, self.block_size, self._padding) + self._buffer = None + return result + + +@utils.register_interface(PaddingContext) +class _PKCS7UnpaddingContext(object): + def __init__(self, block_size): + self.block_size = block_size + # TODO: more copies than necessary, we should use zero-buffer (#193) + self._buffer = b"" + + def update(self, data): + self._buffer, result = _byte_unpadding_update( + self._buffer, data, self.block_size) + return result + + def finalize(self): + result = _byte_unpadding_check( + self._buffer, self.block_size, + lib.Cryptography_check_pkcs7_padding) + self._buffer = None + return result + + +class ANSIX923(object): + def __init__(self, block_size): + _byte_padding_check(block_size) + self.block_size = block_size + + def padder(self): + return _ANSIX923PaddingContext(self.block_size) + + def unpadder(self): + return _ANSIX923UnpaddingContext(self.block_size) + + +@utils.register_interface(PaddingContext) +class _ANSIX923PaddingContext(object): + def __init__(self, block_size): + self.block_size = block_size + # TODO: more copies than necessary, we should use zero-buffer (#193) + self._buffer = b"" + + def update(self, data): + self._buffer, result = _byte_padding_update( + self._buffer, data, self.block_size) + return result + + def _padding(self, size): + return six.int2byte(0) * (size - 1) + six.int2byte(size) + + def finalize(self): + result = _byte_padding_pad( + self._buffer, self.block_size, self._padding) + self._buffer = None + return result + + +@utils.register_interface(PaddingContext) +class _ANSIX923UnpaddingContext(object): + def __init__(self, block_size): + self.block_size = block_size + # TODO: more copies than necessary, we should use zero-buffer (#193) + self._buffer = b"" + + def update(self, data): + self._buffer, result = _byte_unpadding_update( + self._buffer, data, self.block_size) + return result + + def finalize(self): + result = _byte_unpadding_check( + self._buffer, self.block_size, + lib.Cryptography_check_ansix923_padding) + self._buffer = None + return result diff --git a/lib/python3.4/site-packages/cryptography/hazmat/primitives/serialization.py b/lib/python3.4/site-packages/cryptography/hazmat/primitives/serialization.py new file mode 100644 index 0000000..992fd42 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/primitives/serialization.py @@ -0,0 +1,197 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc +import base64 +import struct +from enum import Enum + +import six + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm +from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa + + +def load_pem_private_key(data, password, backend): + return backend.load_pem_private_key(data, password) + + +def load_pem_public_key(data, backend): + return backend.load_pem_public_key(data) + + +def load_der_private_key(data, password, backend): + return backend.load_der_private_key(data, password) + + +def load_der_public_key(data, backend): + return backend.load_der_public_key(data) + + +def load_ssh_public_key(data, backend): + key_parts = data.split(b' ', 2) + + if len(key_parts) < 2: + raise ValueError( + 'Key is not in the proper format or contains extra data.') + + key_type = key_parts[0] + + if key_type == b'ssh-rsa': + loader = _load_ssh_rsa_public_key + elif key_type == b'ssh-dss': + loader = _load_ssh_dss_public_key + elif key_type in [ + b'ecdsa-sha2-nistp256', b'ecdsa-sha2-nistp384', b'ecdsa-sha2-nistp521', + ]: + loader = _load_ssh_ecdsa_public_key + else: + raise UnsupportedAlgorithm('Key type is not supported.') + + key_body = key_parts[1] + + try: + decoded_data = base64.b64decode(key_body) + except TypeError: + raise ValueError('Key is not in the proper format.') + + inner_key_type, rest = _ssh_read_next_string(decoded_data) + + if inner_key_type != key_type: + raise ValueError( + 'Key header and key body contain different key type values.' + ) + + return loader(key_type, rest, backend) + + +def _load_ssh_rsa_public_key(key_type, decoded_data, backend): + e, rest = _ssh_read_next_mpint(decoded_data) + n, rest = _ssh_read_next_mpint(rest) + + if rest: + raise ValueError('Key body contains extra bytes.') + + return rsa.RSAPublicNumbers(e, n).public_key(backend) + + +def _load_ssh_dss_public_key(key_type, decoded_data, backend): + p, rest = _ssh_read_next_mpint(decoded_data) + q, rest = _ssh_read_next_mpint(rest) + g, rest = _ssh_read_next_mpint(rest) + y, rest = _ssh_read_next_mpint(rest) + + if rest: + raise ValueError('Key body contains extra bytes.') + + parameter_numbers = dsa.DSAParameterNumbers(p, q, g) + public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers) + + return public_numbers.public_key(backend) + + +def _load_ssh_ecdsa_public_key(expected_key_type, decoded_data, backend): + curve_name, rest = _ssh_read_next_string(decoded_data) + data, rest = _ssh_read_next_string(rest) + + if expected_key_type != b"ecdsa-sha2-" + curve_name: + raise ValueError( + 'Key header and key body contain different key type values.' + ) + + if rest: + raise ValueError('Key body contains extra bytes.') + + curve = { + b"nistp256": ec.SECP256R1, + b"nistp384": ec.SECP384R1, + b"nistp521": ec.SECP521R1, + }[curve_name]() + + if six.indexbytes(data, 0) != 4: + raise NotImplementedError( + "Compressed elliptic curve points are not supported" + ) + + numbers = ec.EllipticCurvePublicNumbers.from_encoded_point(curve, data) + return numbers.public_key(backend) + + +def _ssh_read_next_string(data): + """ + Retrieves the next RFC 4251 string value from the data. + + While the RFC calls these strings, in Python they are bytes objects. + """ + if len(data) < 4: + raise ValueError("Key is not in the proper format") + + str_len, = struct.unpack('>I', data[:4]) + if len(data) < str_len + 4: + raise ValueError("Key is not in the proper format") + + return data[4:4 + str_len], data[4 + str_len:] + + +def _ssh_read_next_mpint(data): + """ + Reads the next mpint from the data. + + Currently, all mpints are interpreted as unsigned. + """ + mpint_data, rest = _ssh_read_next_string(data) + + return ( + utils.int_from_bytes(mpint_data, byteorder='big', signed=False), rest + ) + + +def _ssh_write_string(data): + return struct.pack(">I", len(data)) + data + + +def _ssh_write_mpint(value): + data = utils.int_to_bytes(value) + if six.indexbytes(data, 0) & 0x80: + data = b"\x00" + data + return _ssh_write_string(data) + + +class Encoding(Enum): + PEM = "PEM" + DER = "DER" + OpenSSH = "OpenSSH" + + +class PrivateFormat(Enum): + PKCS8 = "PKCS8" + TraditionalOpenSSL = "TraditionalOpenSSL" + + +class PublicFormat(Enum): + SubjectPublicKeyInfo = "X.509 subjectPublicKeyInfo with PKCS#1" + PKCS1 = "Raw PKCS#1" + OpenSSH = "OpenSSH" + + +@six.add_metaclass(abc.ABCMeta) +class KeySerializationEncryption(object): + pass + + +@utils.register_interface(KeySerializationEncryption) +class BestAvailableEncryption(object): + def __init__(self, password): + if not isinstance(password, bytes) or len(password) == 0: + raise ValueError("Password must be 1 or more bytes.") + + self.password = password + + +@utils.register_interface(KeySerializationEncryption) +class NoEncryption(object): + pass diff --git a/lib/python3.4/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py b/lib/python3.4/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py new file mode 100644 index 0000000..e71f9e6 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py @@ -0,0 +1,9 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + + +class InvalidToken(Exception): + pass diff --git a/lib/python3.4/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py b/lib/python3.4/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py new file mode 100644 index 0000000..12bc766 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py @@ -0,0 +1,67 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import struct + +import six + +from cryptography.exceptions import ( + UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import HMACBackend +from cryptography.hazmat.primitives import constant_time, hmac +from cryptography.hazmat.primitives.hashes import SHA1, SHA256, SHA512 +from cryptography.hazmat.primitives.twofactor import InvalidToken +from cryptography.hazmat.primitives.twofactor.utils import _generate_uri + + +class HOTP(object): + def __init__(self, key, length, algorithm, backend): + if not isinstance(backend, HMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HMACBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + if len(key) < 16: + raise ValueError("Key length has to be at least 128 bits.") + + if not isinstance(length, six.integer_types): + raise TypeError("Length parameter must be an integer type.") + + if length < 6 or length > 8: + raise ValueError("Length of HOTP has to be between 6 to 8.") + + if not isinstance(algorithm, (SHA1, SHA256, SHA512)): + raise TypeError("Algorithm must be SHA1, SHA256 or SHA512.") + + self._key = key + self._length = length + self._algorithm = algorithm + self._backend = backend + + def generate(self, counter): + truncated_value = self._dynamic_truncate(counter) + hotp = truncated_value % (10 ** self._length) + return "{0:0{1}}".format(hotp, self._length).encode() + + def verify(self, hotp, counter): + if not constant_time.bytes_eq(self.generate(counter), hotp): + raise InvalidToken("Supplied HOTP value does not match.") + + def _dynamic_truncate(self, counter): + ctx = hmac.HMAC(self._key, self._algorithm, self._backend) + ctx.update(struct.pack(">Q", counter)) + hmac_value = ctx.finalize() + + offset = six.indexbytes(hmac_value, len(hmac_value) - 1) & 0b1111 + p = hmac_value[offset:offset + 4] + return struct.unpack(">I", p)[0] & 0x7fffffff + + def get_provisioning_uri(self, account_name, counter, issuer): + return _generate_uri(self, "hotp", account_name, issuer, [ + ("counter", int(counter)), + ]) diff --git a/lib/python3.4/site-packages/cryptography/hazmat/primitives/twofactor/totp.py b/lib/python3.4/site-packages/cryptography/hazmat/primitives/twofactor/totp.py new file mode 100644 index 0000000..6070590 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/primitives/twofactor/totp.py @@ -0,0 +1,39 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography.exceptions import ( + UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import HMACBackend +from cryptography.hazmat.primitives import constant_time +from cryptography.hazmat.primitives.twofactor import InvalidToken +from cryptography.hazmat.primitives.twofactor.hotp import HOTP +from cryptography.hazmat.primitives.twofactor.utils import _generate_uri + + +class TOTP(object): + def __init__(self, key, length, algorithm, time_step, backend): + if not isinstance(backend, HMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HMACBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + self._time_step = time_step + self._hotp = HOTP(key, length, algorithm, backend) + + def generate(self, time): + counter = int(time / self._time_step) + return self._hotp.generate(counter) + + def verify(self, totp, time): + if not constant_time.bytes_eq(self.generate(time), totp): + raise InvalidToken("Supplied TOTP value does not match.") + + def get_provisioning_uri(self, account_name, issuer): + return _generate_uri(self._hotp, "totp", account_name, issuer, [ + ("period", int(self._time_step)), + ]) diff --git a/lib/python3.4/site-packages/cryptography/hazmat/primitives/twofactor/utils.py b/lib/python3.4/site-packages/cryptography/hazmat/primitives/twofactor/utils.py new file mode 100644 index 0000000..0ed8c4c --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/hazmat/primitives/twofactor/utils.py @@ -0,0 +1,30 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import base64 + +from six.moves.urllib.parse import quote, urlencode + + +def _generate_uri(hotp, type_name, account_name, issuer, extra_parameters): + parameters = [ + ("digits", hotp._length), + ("secret", base64.b32encode(hotp._key)), + ("algorithm", hotp._algorithm.name.upper()), + ] + + if issuer is not None: + parameters.append(("issuer", issuer)) + + parameters.extend(extra_parameters) + + uriparts = { + "type": type_name, + "label": ("%s:%s" % (quote(issuer), quote(account_name)) if issuer + else quote(account_name)), + "parameters": urlencode(parameters), + } + return "otpauth://{type}/{label}?{parameters}".format(**uriparts) diff --git a/lib/python3.4/site-packages/cryptography/utils.py b/lib/python3.4/site-packages/cryptography/utils.py new file mode 100644 index 0000000..d3e845a --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/utils.py @@ -0,0 +1,138 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc +import binascii +import inspect +import struct +import sys +import warnings + + +# the functions deprecated in 1.0 and 1.4 are on an arbitrarily extended +# deprecation cycle and should not be removed until we agree on when that cycle +# ends. +DeprecatedIn10 = DeprecationWarning +DeprecatedIn14 = DeprecationWarning + + +def read_only_property(name): + return property(lambda self: getattr(self, name)) + + +def register_interface(iface): + def register_decorator(klass): + verify_interface(iface, klass) + iface.register(klass) + return klass + return register_decorator + + +if hasattr(int, "from_bytes"): + int_from_bytes = int.from_bytes +else: + def int_from_bytes(data, byteorder, signed=False): + assert byteorder == 'big' + assert not signed + + if len(data) % 4 != 0: + data = (b'\x00' * (4 - (len(data) % 4))) + data + + result = 0 + + while len(data) > 0: + digit, = struct.unpack('>I', data[:4]) + result = (result << 32) + digit + # TODO: this is quadratic in the length of data + data = data[4:] + + return result + + +def int_to_bytes(integer, length=None): + hex_string = '%x' % integer + if length is None: + n = len(hex_string) + else: + n = length * 2 + return binascii.unhexlify(hex_string.zfill(n + (n & 1))) + + +class InterfaceNotImplemented(Exception): + pass + + +if hasattr(inspect, "signature"): + signature = inspect.signature +else: + signature = inspect.getargspec + + +def verify_interface(iface, klass): + for method in iface.__abstractmethods__: + if not hasattr(klass, method): + raise InterfaceNotImplemented( + "{0} is missing a {1!r} method".format(klass, method) + ) + if isinstance(getattr(iface, method), abc.abstractproperty): + # Can't properly verify these yet. + continue + sig = signature(getattr(iface, method)) + actual = signature(getattr(klass, method)) + if sig != actual: + raise InterfaceNotImplemented( + "{0}.{1}'s signature differs from the expected. Expected: " + "{2!r}. Received: {3!r}".format( + klass, method, sig, actual + ) + ) + + +if sys.version_info >= (2, 7): + def bit_length(x): + return x.bit_length() +else: + def bit_length(x): + return len(bin(x)) - (2 + (x <= 0)) + + +class _DeprecatedValue(object): + def __init__(self, value, message, warning_class): + self.value = value + self.message = message + self.warning_class = warning_class + + +class _ModuleWithDeprecations(object): + def __init__(self, module): + self.__dict__["_module"] = module + + def __getattr__(self, attr): + obj = getattr(self._module, attr) + if isinstance(obj, _DeprecatedValue): + warnings.warn(obj.message, obj.warning_class, stacklevel=2) + obj = obj.value + return obj + + def __setattr__(self, attr, value): + setattr(self._module, attr, value) + + def __delattr__(self, attr): + obj = getattr(self._module, attr) + if isinstance(obj, _DeprecatedValue): + warnings.warn(obj.message, obj.warning_class, stacklevel=2) + + delattr(self._module, attr) + + def __dir__(self): + return ["_module"] + dir(self._module) + + +def deprecated(value, module_name, message, warning_class): + module = sys.modules[module_name] + if not isinstance(module, _ModuleWithDeprecations): + sys.modules[module_name] = module = _ModuleWithDeprecations(module) + return _DeprecatedValue(value, message, warning_class) diff --git a/lib/python3.4/site-packages/cryptography/x509/__init__.py b/lib/python3.4/site-packages/cryptography/x509/__init__.py new file mode 100644 index 0000000..968d29d --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/x509/__init__.py @@ -0,0 +1,174 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography.x509.base import ( + Certificate, CertificateBuilder, CertificateRevocationList, + CertificateRevocationListBuilder, + CertificateSigningRequest, CertificateSigningRequestBuilder, + InvalidVersion, RevokedCertificate, RevokedCertificateBuilder, + Version, load_der_x509_certificate, load_der_x509_crl, load_der_x509_csr, + load_pem_x509_certificate, load_pem_x509_crl, load_pem_x509_csr, +) +from cryptography.x509.extensions import ( + AccessDescription, AuthorityInformationAccess, + AuthorityKeyIdentifier, BasicConstraints, CRLDistributionPoints, + CRLNumber, CRLReason, CertificateIssuer, CertificatePolicies, + DistributionPoint, DuplicateExtension, ExtendedKeyUsage, Extension, + ExtensionNotFound, ExtensionType, Extensions, GeneralNames, + InhibitAnyPolicy, InvalidityDate, IssuerAlternativeName, KeyUsage, + NameConstraints, NoticeReference, OCSPNoCheck, PolicyConstraints, + PolicyInformation, ReasonFlags, SubjectAlternativeName, + SubjectKeyIdentifier, UnrecognizedExtension, UnsupportedExtension, + UserNotice +) +from cryptography.x509.general_name import ( + DNSName, DirectoryName, GeneralName, IPAddress, OtherName, RFC822Name, + RegisteredID, UniformResourceIdentifier, UnsupportedGeneralNameType, + _GENERAL_NAMES +) +from cryptography.x509.name import Name, NameAttribute +from cryptography.x509.oid import ( + AuthorityInformationAccessOID, CRLEntryExtensionOID, + CertificatePoliciesOID, ExtendedKeyUsageOID, ExtensionOID, NameOID, + ObjectIdentifier, SignatureAlgorithmOID, _SIG_OIDS_TO_HASH +) + + +OID_AUTHORITY_INFORMATION_ACCESS = ExtensionOID.AUTHORITY_INFORMATION_ACCESS +OID_AUTHORITY_KEY_IDENTIFIER = ExtensionOID.AUTHORITY_KEY_IDENTIFIER +OID_BASIC_CONSTRAINTS = ExtensionOID.BASIC_CONSTRAINTS +OID_CERTIFICATE_POLICIES = ExtensionOID.CERTIFICATE_POLICIES +OID_CRL_DISTRIBUTION_POINTS = ExtensionOID.CRL_DISTRIBUTION_POINTS +OID_EXTENDED_KEY_USAGE = ExtensionOID.EXTENDED_KEY_USAGE +OID_FRESHEST_CRL = ExtensionOID.FRESHEST_CRL +OID_INHIBIT_ANY_POLICY = ExtensionOID.INHIBIT_ANY_POLICY +OID_ISSUER_ALTERNATIVE_NAME = ExtensionOID.ISSUER_ALTERNATIVE_NAME +OID_KEY_USAGE = ExtensionOID.KEY_USAGE +OID_NAME_CONSTRAINTS = ExtensionOID.NAME_CONSTRAINTS +OID_OCSP_NO_CHECK = ExtensionOID.OCSP_NO_CHECK +OID_POLICY_CONSTRAINTS = ExtensionOID.POLICY_CONSTRAINTS +OID_POLICY_MAPPINGS = ExtensionOID.POLICY_MAPPINGS +OID_SUBJECT_ALTERNATIVE_NAME = ExtensionOID.SUBJECT_ALTERNATIVE_NAME +OID_SUBJECT_DIRECTORY_ATTRIBUTES = ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES +OID_SUBJECT_INFORMATION_ACCESS = ExtensionOID.SUBJECT_INFORMATION_ACCESS +OID_SUBJECT_KEY_IDENTIFIER = ExtensionOID.SUBJECT_KEY_IDENTIFIER + +OID_DSA_WITH_SHA1 = SignatureAlgorithmOID.DSA_WITH_SHA1 +OID_DSA_WITH_SHA224 = SignatureAlgorithmOID.DSA_WITH_SHA224 +OID_DSA_WITH_SHA256 = SignatureAlgorithmOID.DSA_WITH_SHA256 +OID_ECDSA_WITH_SHA1 = SignatureAlgorithmOID.ECDSA_WITH_SHA1 +OID_ECDSA_WITH_SHA224 = SignatureAlgorithmOID.ECDSA_WITH_SHA224 +OID_ECDSA_WITH_SHA256 = SignatureAlgorithmOID.ECDSA_WITH_SHA256 +OID_ECDSA_WITH_SHA384 = SignatureAlgorithmOID.ECDSA_WITH_SHA384 +OID_ECDSA_WITH_SHA512 = SignatureAlgorithmOID.ECDSA_WITH_SHA512 +OID_RSA_WITH_MD5 = SignatureAlgorithmOID.RSA_WITH_MD5 +OID_RSA_WITH_SHA1 = SignatureAlgorithmOID.RSA_WITH_SHA1 +OID_RSA_WITH_SHA224 = SignatureAlgorithmOID.RSA_WITH_SHA224 +OID_RSA_WITH_SHA256 = SignatureAlgorithmOID.RSA_WITH_SHA256 +OID_RSA_WITH_SHA384 = SignatureAlgorithmOID.RSA_WITH_SHA384 +OID_RSA_WITH_SHA512 = SignatureAlgorithmOID.RSA_WITH_SHA512 + +OID_COMMON_NAME = NameOID.COMMON_NAME +OID_COUNTRY_NAME = NameOID.COUNTRY_NAME +OID_DOMAIN_COMPONENT = NameOID.DOMAIN_COMPONENT +OID_DN_QUALIFIER = NameOID.DN_QUALIFIER +OID_EMAIL_ADDRESS = NameOID.EMAIL_ADDRESS +OID_GENERATION_QUALIFIER = NameOID.GENERATION_QUALIFIER +OID_GIVEN_NAME = NameOID.GIVEN_NAME +OID_LOCALITY_NAME = NameOID.LOCALITY_NAME +OID_ORGANIZATIONAL_UNIT_NAME = NameOID.ORGANIZATIONAL_UNIT_NAME +OID_ORGANIZATION_NAME = NameOID.ORGANIZATION_NAME +OID_PSEUDONYM = NameOID.PSEUDONYM +OID_SERIAL_NUMBER = NameOID.SERIAL_NUMBER +OID_STATE_OR_PROVINCE_NAME = NameOID.STATE_OR_PROVINCE_NAME +OID_SURNAME = NameOID.SURNAME +OID_TITLE = NameOID.TITLE + +OID_CLIENT_AUTH = ExtendedKeyUsageOID.CLIENT_AUTH +OID_CODE_SIGNING = ExtendedKeyUsageOID.CODE_SIGNING +OID_EMAIL_PROTECTION = ExtendedKeyUsageOID.EMAIL_PROTECTION +OID_OCSP_SIGNING = ExtendedKeyUsageOID.OCSP_SIGNING +OID_SERVER_AUTH = ExtendedKeyUsageOID.SERVER_AUTH +OID_TIME_STAMPING = ExtendedKeyUsageOID.TIME_STAMPING + +OID_ANY_POLICY = CertificatePoliciesOID.ANY_POLICY +OID_CPS_QUALIFIER = CertificatePoliciesOID.CPS_QUALIFIER +OID_CPS_USER_NOTICE = CertificatePoliciesOID.CPS_USER_NOTICE + +OID_CERTIFICATE_ISSUER = CRLEntryExtensionOID.CERTIFICATE_ISSUER +OID_CRL_REASON = CRLEntryExtensionOID.CRL_REASON +OID_INVALIDITY_DATE = CRLEntryExtensionOID.INVALIDITY_DATE + +OID_CA_ISSUERS = AuthorityInformationAccessOID.CA_ISSUERS +OID_OCSP = AuthorityInformationAccessOID.OCSP + + +__all__ = [ + "load_pem_x509_certificate", + "load_der_x509_certificate", + "load_pem_x509_csr", + "load_der_x509_csr", + "load_pem_x509_crl", + "load_der_x509_crl", + "InvalidVersion", + "DuplicateExtension", + "UnsupportedExtension", + "ExtensionNotFound", + "UnsupportedGeneralNameType", + "NameAttribute", + "Name", + "ObjectIdentifier", + "ExtensionType", + "Extensions", + "Extension", + "ExtendedKeyUsage", + "OCSPNoCheck", + "BasicConstraints", + "CRLNumber", + "KeyUsage", + "AuthorityInformationAccess", + "AccessDescription", + "CertificatePolicies", + "PolicyInformation", + "UserNotice", + "NoticeReference", + "SubjectKeyIdentifier", + "NameConstraints", + "CRLDistributionPoints", + "DistributionPoint", + "ReasonFlags", + "InhibitAnyPolicy", + "SubjectAlternativeName", + "IssuerAlternativeName", + "AuthorityKeyIdentifier", + "GeneralNames", + "GeneralName", + "RFC822Name", + "DNSName", + "UniformResourceIdentifier", + "RegisteredID", + "DirectoryName", + "IPAddress", + "OtherName", + "Certificate", + "CertificateRevocationList", + "CertificateRevocationListBuilder", + "CertificateSigningRequest", + "RevokedCertificate", + "RevokedCertificateBuilder", + "CertificateSigningRequestBuilder", + "CertificateBuilder", + "Version", + "_SIG_OIDS_TO_HASH", + "OID_CA_ISSUERS", + "OID_OCSP", + "_GENERAL_NAMES", + "CertificateIssuer", + "CRLReason", + "InvalidityDate", + "UnrecognizedExtension", + "PolicyConstraints", +] diff --git a/lib/python3.4/site-packages/cryptography/x509/base.py b/lib/python3.4/site-packages/cryptography/x509/base.py new file mode 100644 index 0000000..ab70fe7 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/x509/base.py @@ -0,0 +1,682 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc +import datetime +from enum import Enum + +import six + +from cryptography import utils +from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa +from cryptography.x509.extensions import Extension, ExtensionType +from cryptography.x509.name import Name + + +_UNIX_EPOCH = datetime.datetime(1970, 1, 1) + + +class Version(Enum): + v1 = 0 + v3 = 2 + + +def load_pem_x509_certificate(data, backend): + return backend.load_pem_x509_certificate(data) + + +def load_der_x509_certificate(data, backend): + return backend.load_der_x509_certificate(data) + + +def load_pem_x509_csr(data, backend): + return backend.load_pem_x509_csr(data) + + +def load_der_x509_csr(data, backend): + return backend.load_der_x509_csr(data) + + +def load_pem_x509_crl(data, backend): + return backend.load_pem_x509_crl(data) + + +def load_der_x509_crl(data, backend): + return backend.load_der_x509_crl(data) + + +class InvalidVersion(Exception): + def __init__(self, msg, parsed_version): + super(InvalidVersion, self).__init__(msg) + self.parsed_version = parsed_version + + +@six.add_metaclass(abc.ABCMeta) +class Certificate(object): + @abc.abstractmethod + def fingerprint(self, algorithm): + """ + Returns bytes using digest passed. + """ + + @abc.abstractproperty + def serial_number(self): + """ + Returns certificate serial number + """ + + @abc.abstractproperty + def version(self): + """ + Returns the certificate version + """ + + @abc.abstractmethod + def public_key(self): + """ + Returns the public key + """ + + @abc.abstractproperty + def not_valid_before(self): + """ + Not before time (represented as UTC datetime) + """ + + @abc.abstractproperty + def not_valid_after(self): + """ + Not after time (represented as UTC datetime) + """ + + @abc.abstractproperty + def issuer(self): + """ + Returns the issuer name object. + """ + + @abc.abstractproperty + def subject(self): + """ + Returns the subject name object. + """ + + @abc.abstractproperty + def signature_hash_algorithm(self): + """ + Returns a HashAlgorithm corresponding to the type of the digest signed + in the certificate. + """ + + @abc.abstractproperty + def extensions(self): + """ + Returns an Extensions object. + """ + + @abc.abstractproperty + def signature(self): + """ + Returns the signature bytes. + """ + + @abc.abstractproperty + def tbs_certificate_bytes(self): + """ + Returns the tbsCertificate payload bytes as defined in RFC 5280. + """ + + @abc.abstractmethod + def __eq__(self, other): + """ + Checks equality. + """ + + @abc.abstractmethod + def __ne__(self, other): + """ + Checks not equal. + """ + + @abc.abstractmethod + def __hash__(self): + """ + Computes a hash. + """ + + @abc.abstractmethod + def public_bytes(self, encoding): + """ + Serializes the certificate to PEM or DER format. + """ + + +@six.add_metaclass(abc.ABCMeta) +class CertificateRevocationList(object): + @abc.abstractmethod + def public_bytes(self, encoding): + """ + Serializes the CRL to PEM or DER format. + """ + + @abc.abstractmethod + def fingerprint(self, algorithm): + """ + Returns bytes using digest passed. + """ + + @abc.abstractproperty + def signature_hash_algorithm(self): + """ + Returns a HashAlgorithm corresponding to the type of the digest signed + in the certificate. + """ + + @abc.abstractproperty + def issuer(self): + """ + Returns the X509Name with the issuer of this CRL. + """ + + @abc.abstractproperty + def next_update(self): + """ + Returns the date of next update for this CRL. + """ + + @abc.abstractproperty + def last_update(self): + """ + Returns the date of last update for this CRL. + """ + + @abc.abstractproperty + def extensions(self): + """ + Returns an Extensions object containing a list of CRL extensions. + """ + + @abc.abstractproperty + def signature(self): + """ + Returns the signature bytes. + """ + + @abc.abstractproperty + def tbs_certlist_bytes(self): + """ + Returns the tbsCertList payload bytes as defined in RFC 5280. + """ + + @abc.abstractmethod + def __eq__(self, other): + """ + Checks equality. + """ + + @abc.abstractmethod + def __ne__(self, other): + """ + Checks not equal. + """ + + +@six.add_metaclass(abc.ABCMeta) +class CertificateSigningRequest(object): + @abc.abstractmethod + def __eq__(self, other): + """ + Checks equality. + """ + + @abc.abstractmethod + def __ne__(self, other): + """ + Checks not equal. + """ + + @abc.abstractmethod + def __hash__(self): + """ + Computes a hash. + """ + + @abc.abstractmethod + def public_key(self): + """ + Returns the public key + """ + + @abc.abstractproperty + def subject(self): + """ + Returns the subject name object. + """ + + @abc.abstractproperty + def signature_hash_algorithm(self): + """ + Returns a HashAlgorithm corresponding to the type of the digest signed + in the certificate. + """ + + @abc.abstractproperty + def extensions(self): + """ + Returns the extensions in the signing request. + """ + + @abc.abstractmethod + def public_bytes(self, encoding): + """ + Encodes the request to PEM or DER format. + """ + + @abc.abstractproperty + def signature(self): + """ + Returns the signature bytes. + """ + + @abc.abstractproperty + def tbs_certrequest_bytes(self): + """ + Returns the PKCS#10 CertificationRequestInfo bytes as defined in RFC + 2986. + """ + + @abc.abstractproperty + def is_signature_valid(self): + """ + Verifies signature of signing request. + """ + + +@six.add_metaclass(abc.ABCMeta) +class RevokedCertificate(object): + @abc.abstractproperty + def serial_number(self): + """ + Returns the serial number of the revoked certificate. + """ + + @abc.abstractproperty + def revocation_date(self): + """ + Returns the date of when this certificate was revoked. + """ + + @abc.abstractproperty + def extensions(self): + """ + Returns an Extensions object containing a list of Revoked extensions. + """ + + +class CertificateSigningRequestBuilder(object): + def __init__(self, subject_name=None, extensions=[]): + """ + Creates an empty X.509 certificate request (v1). + """ + self._subject_name = subject_name + self._extensions = extensions + + def subject_name(self, name): + """ + Sets the certificate requestor's distinguished name. + """ + if not isinstance(name, Name): + raise TypeError('Expecting x509.Name object.') + if self._subject_name is not None: + raise ValueError('The subject name may only be set once.') + return CertificateSigningRequestBuilder(name, self._extensions) + + def add_extension(self, extension, critical): + """ + Adds an X.509 extension to the certificate request. + """ + if not isinstance(extension, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extension.oid, critical, extension) + + # TODO: This is quadratic in the number of extensions + for e in self._extensions: + if e.oid == extension.oid: + raise ValueError('This extension has already been set.') + return CertificateSigningRequestBuilder( + self._subject_name, self._extensions + [extension] + ) + + def sign(self, private_key, algorithm, backend): + """ + Signs the request using the requestor's private key. + """ + if self._subject_name is None: + raise ValueError("A CertificateSigningRequest must have a subject") + return backend.create_x509_csr(self, private_key, algorithm) + + +class CertificateBuilder(object): + def __init__(self, issuer_name=None, subject_name=None, + public_key=None, serial_number=None, not_valid_before=None, + not_valid_after=None, extensions=[]): + self._version = Version.v3 + self._issuer_name = issuer_name + self._subject_name = subject_name + self._public_key = public_key + self._serial_number = serial_number + self._not_valid_before = not_valid_before + self._not_valid_after = not_valid_after + self._extensions = extensions + + def issuer_name(self, name): + """ + Sets the CA's distinguished name. + """ + if not isinstance(name, Name): + raise TypeError('Expecting x509.Name object.') + if self._issuer_name is not None: + raise ValueError('The issuer name may only be set once.') + return CertificateBuilder( + name, self._subject_name, self._public_key, + self._serial_number, self._not_valid_before, + self._not_valid_after, self._extensions + ) + + def subject_name(self, name): + """ + Sets the requestor's distinguished name. + """ + if not isinstance(name, Name): + raise TypeError('Expecting x509.Name object.') + if self._subject_name is not None: + raise ValueError('The subject name may only be set once.') + return CertificateBuilder( + self._issuer_name, name, self._public_key, + self._serial_number, self._not_valid_before, + self._not_valid_after, self._extensions + ) + + def public_key(self, key): + """ + Sets the requestor's public key (as found in the signing request). + """ + if not isinstance(key, (dsa.DSAPublicKey, rsa.RSAPublicKey, + ec.EllipticCurvePublicKey)): + raise TypeError('Expecting one of DSAPublicKey, RSAPublicKey,' + ' or EllipticCurvePublicKey.') + if self._public_key is not None: + raise ValueError('The public key may only be set once.') + return CertificateBuilder( + self._issuer_name, self._subject_name, key, + self._serial_number, self._not_valid_before, + self._not_valid_after, self._extensions + ) + + def serial_number(self, number): + """ + Sets the certificate serial number. + """ + if not isinstance(number, six.integer_types): + raise TypeError('Serial number must be of integral type.') + if self._serial_number is not None: + raise ValueError('The serial number may only be set once.') + if number < 0: + raise ValueError('The serial number should be non-negative.') + if utils.bit_length(number) > 160: # As defined in RFC 5280 + raise ValueError('The serial number should not be more than 160 ' + 'bits.') + return CertificateBuilder( + self._issuer_name, self._subject_name, + self._public_key, number, self._not_valid_before, + self._not_valid_after, self._extensions + ) + + def not_valid_before(self, time): + """ + Sets the certificate activation time. + """ + if not isinstance(time, datetime.datetime): + raise TypeError('Expecting datetime object.') + if self._not_valid_before is not None: + raise ValueError('The not valid before may only be set once.') + if time <= _UNIX_EPOCH: + raise ValueError('The not valid before date must be after the unix' + ' epoch (1970 January 1).') + if self._not_valid_after is not None and time > self._not_valid_after: + raise ValueError( + 'The not valid before date must be before the not valid after ' + 'date.' + ) + return CertificateBuilder( + self._issuer_name, self._subject_name, + self._public_key, self._serial_number, time, + self._not_valid_after, self._extensions + ) + + def not_valid_after(self, time): + """ + Sets the certificate expiration time. + """ + if not isinstance(time, datetime.datetime): + raise TypeError('Expecting datetime object.') + if self._not_valid_after is not None: + raise ValueError('The not valid after may only be set once.') + if time <= _UNIX_EPOCH: + raise ValueError('The not valid after date must be after the unix' + ' epoch (1970 January 1).') + if (self._not_valid_before is not None and + time < self._not_valid_before): + raise ValueError( + 'The not valid after date must be after the not valid before ' + 'date.' + ) + return CertificateBuilder( + self._issuer_name, self._subject_name, + self._public_key, self._serial_number, self._not_valid_before, + time, self._extensions + ) + + def add_extension(self, extension, critical): + """ + Adds an X.509 extension to the certificate. + """ + if not isinstance(extension, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extension.oid, critical, extension) + + # TODO: This is quadratic in the number of extensions + for e in self._extensions: + if e.oid == extension.oid: + raise ValueError('This extension has already been set.') + + return CertificateBuilder( + self._issuer_name, self._subject_name, + self._public_key, self._serial_number, self._not_valid_before, + self._not_valid_after, self._extensions + [extension] + ) + + def sign(self, private_key, algorithm, backend): + """ + Signs the certificate using the CA's private key. + """ + if self._subject_name is None: + raise ValueError("A certificate must have a subject name") + + if self._issuer_name is None: + raise ValueError("A certificate must have an issuer name") + + if self._serial_number is None: + raise ValueError("A certificate must have a serial number") + + if self._not_valid_before is None: + raise ValueError("A certificate must have a not valid before time") + + if self._not_valid_after is None: + raise ValueError("A certificate must have a not valid after time") + + if self._public_key is None: + raise ValueError("A certificate must have a public key") + + return backend.create_x509_certificate(self, private_key, algorithm) + + +class CertificateRevocationListBuilder(object): + def __init__(self, issuer_name=None, last_update=None, next_update=None, + extensions=[], revoked_certificates=[]): + self._issuer_name = issuer_name + self._last_update = last_update + self._next_update = next_update + self._extensions = extensions + self._revoked_certificates = revoked_certificates + + def issuer_name(self, issuer_name): + if not isinstance(issuer_name, Name): + raise TypeError('Expecting x509.Name object.') + if self._issuer_name is not None: + raise ValueError('The issuer name may only be set once.') + return CertificateRevocationListBuilder( + issuer_name, self._last_update, self._next_update, + self._extensions, self._revoked_certificates + ) + + def last_update(self, last_update): + if not isinstance(last_update, datetime.datetime): + raise TypeError('Expecting datetime object.') + if self._last_update is not None: + raise ValueError('Last update may only be set once.') + if last_update <= _UNIX_EPOCH: + raise ValueError('The last update date must be after the unix' + ' epoch (1970 January 1).') + if self._next_update is not None and last_update > self._next_update: + raise ValueError( + 'The last update date must be before the next update date.' + ) + return CertificateRevocationListBuilder( + self._issuer_name, last_update, self._next_update, + self._extensions, self._revoked_certificates + ) + + def next_update(self, next_update): + if not isinstance(next_update, datetime.datetime): + raise TypeError('Expecting datetime object.') + if self._next_update is not None: + raise ValueError('Last update may only be set once.') + if next_update <= _UNIX_EPOCH: + raise ValueError('The last update date must be after the unix' + ' epoch (1970 January 1).') + if self._last_update is not None and next_update < self._last_update: + raise ValueError( + 'The next update date must be after the last update date.' + ) + return CertificateRevocationListBuilder( + self._issuer_name, self._last_update, next_update, + self._extensions, self._revoked_certificates + ) + + def add_extension(self, extension, critical): + """ + Adds an X.509 extension to the certificate revocation list. + """ + if not isinstance(extension, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extension.oid, critical, extension) + + # TODO: This is quadratic in the number of extensions + for e in self._extensions: + if e.oid == extension.oid: + raise ValueError('This extension has already been set.') + return CertificateRevocationListBuilder( + self._issuer_name, self._last_update, self._next_update, + self._extensions + [extension], self._revoked_certificates + ) + + def add_revoked_certificate(self, revoked_certificate): + """ + Adds a revoked certificate to the CRL. + """ + if not isinstance(revoked_certificate, RevokedCertificate): + raise TypeError("Must be an instance of RevokedCertificate") + + return CertificateRevocationListBuilder( + self._issuer_name, self._last_update, + self._next_update, self._extensions, + self._revoked_certificates + [revoked_certificate] + ) + + def sign(self, private_key, algorithm, backend): + if self._issuer_name is None: + raise ValueError("A CRL must have an issuer name") + + if self._last_update is None: + raise ValueError("A CRL must have a last update time") + + if self._next_update is None: + raise ValueError("A CRL must have a next update time") + + return backend.create_x509_crl(self, private_key, algorithm) + + +class RevokedCertificateBuilder(object): + def __init__(self, serial_number=None, revocation_date=None, + extensions=[]): + self._serial_number = serial_number + self._revocation_date = revocation_date + self._extensions = extensions + + def serial_number(self, number): + if not isinstance(number, six.integer_types): + raise TypeError('Serial number must be of integral type.') + if self._serial_number is not None: + raise ValueError('The serial number may only be set once.') + if number < 0: + raise ValueError('The serial number should be non-negative.') + if utils.bit_length(number) > 160: # As defined in RFC 5280 + raise ValueError('The serial number should not be more than 160 ' + 'bits.') + return RevokedCertificateBuilder( + number, self._revocation_date, self._extensions + ) + + def revocation_date(self, time): + if not isinstance(time, datetime.datetime): + raise TypeError('Expecting datetime object.') + if self._revocation_date is not None: + raise ValueError('The revocation date may only be set once.') + if time <= _UNIX_EPOCH: + raise ValueError('The revocation date must be after the unix' + ' epoch (1970 January 1).') + return RevokedCertificateBuilder( + self._serial_number, time, self._extensions + ) + + def add_extension(self, extension, critical): + if not isinstance(extension, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extension.oid, critical, extension) + + # TODO: This is quadratic in the number of extensions + for e in self._extensions: + if e.oid == extension.oid: + raise ValueError('This extension has already been set.') + return RevokedCertificateBuilder( + self._serial_number, self._revocation_date, + self._extensions + [extension] + ) + + def build(self, backend): + if self._serial_number is None: + raise ValueError("A revoked certificate must have a serial number") + if self._revocation_date is None: + raise ValueError( + "A revoked certificate must have a revocation date" + ) + + return backend.create_x509_revoked_certificate(self) diff --git a/lib/python3.4/site-packages/cryptography/x509/extensions.py b/lib/python3.4/site-packages/cryptography/x509/extensions.py new file mode 100644 index 0000000..b7ea72c --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/x509/extensions.py @@ -0,0 +1,1189 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc +import datetime +import hashlib +import ipaddress +from enum import Enum + +from pyasn1.codec.der import decoder +from pyasn1.type import namedtype, univ + +import six + +from cryptography import utils +from cryptography.hazmat.primitives import constant_time, serialization +from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePublicKey +from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey +from cryptography.x509.general_name import GeneralName, IPAddress, OtherName +from cryptography.x509.name import Name +from cryptography.x509.oid import ( + CRLEntryExtensionOID, ExtensionOID, ObjectIdentifier +) + + +class _SubjectPublicKeyInfo(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('algorithm', univ.Sequence()), + namedtype.NamedType('subjectPublicKey', univ.BitString()) + ) + + +def _key_identifier_from_public_key(public_key): + if isinstance(public_key, RSAPublicKey): + data = public_key.public_bytes( + serialization.Encoding.DER, + serialization.PublicFormat.PKCS1, + ) + elif isinstance(public_key, EllipticCurvePublicKey): + data = public_key.public_numbers().encode_point() + else: + # This is a very slow way to do this. + serialized = public_key.public_bytes( + serialization.Encoding.DER, + serialization.PublicFormat.SubjectPublicKeyInfo + ) + spki, remaining = decoder.decode( + serialized, asn1Spec=_SubjectPublicKeyInfo() + ) + assert not remaining + # the univ.BitString object is a tuple of bits. We need bytes and + # pyasn1 really doesn't want to give them to us. To get it we'll + # build an integer and convert that to bytes. + bits = 0 + for bit in spki.getComponentByName("subjectPublicKey"): + bits = bits << 1 | bit + + data = utils.int_to_bytes(bits) + + return hashlib.sha1(data).digest() + + +class DuplicateExtension(Exception): + def __init__(self, msg, oid): + super(DuplicateExtension, self).__init__(msg) + self.oid = oid + + +class UnsupportedExtension(Exception): + def __init__(self, msg, oid): + super(UnsupportedExtension, self).__init__(msg) + self.oid = oid + + +class ExtensionNotFound(Exception): + def __init__(self, msg, oid): + super(ExtensionNotFound, self).__init__(msg) + self.oid = oid + + +@six.add_metaclass(abc.ABCMeta) +class ExtensionType(object): + @abc.abstractproperty + def oid(self): + """ + Returns the oid associated with the given extension type. + """ + + +class Extensions(object): + def __init__(self, extensions): + self._extensions = extensions + + def get_extension_for_oid(self, oid): + for ext in self: + if ext.oid == oid: + return ext + + raise ExtensionNotFound("No {0} extension was found".format(oid), oid) + + def get_extension_for_class(self, extclass): + if extclass is UnrecognizedExtension: + raise TypeError( + "UnrecognizedExtension can't be used with " + "get_extension_for_class because more than one instance of the" + " class may be present." + ) + + for ext in self: + if isinstance(ext.value, extclass): + return ext + + raise ExtensionNotFound( + "No {0} extension was found".format(extclass), extclass.oid + ) + + def __iter__(self): + return iter(self._extensions) + + def __len__(self): + return len(self._extensions) + + def __getitem__(self, idx): + return self._extensions[idx] + + def __repr__(self): + return ( + "".format(self._extensions) + ) + + +@utils.register_interface(ExtensionType) +class CRLNumber(object): + oid = ExtensionOID.CRL_NUMBER + + def __init__(self, crl_number): + if not isinstance(crl_number, six.integer_types): + raise TypeError("crl_number must be an integer") + + self._crl_number = crl_number + + def __eq__(self, other): + if not isinstance(other, CRLNumber): + return NotImplemented + + return self.crl_number == other.crl_number + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.crl_number) + + def __repr__(self): + return "".format(self.crl_number) + + crl_number = utils.read_only_property("_crl_number") + + +@utils.register_interface(ExtensionType) +class AuthorityKeyIdentifier(object): + oid = ExtensionOID.AUTHORITY_KEY_IDENTIFIER + + def __init__(self, key_identifier, authority_cert_issuer, + authority_cert_serial_number): + if (authority_cert_issuer is None) != ( + authority_cert_serial_number is None + ): + raise ValueError( + "authority_cert_issuer and authority_cert_serial_number " + "must both be present or both None" + ) + + if authority_cert_issuer is not None and not all( + isinstance(x, GeneralName) for x in authority_cert_issuer + ): + raise TypeError( + "authority_cert_issuer must be a list of GeneralName " + "objects" + ) + + if authority_cert_serial_number is not None and not isinstance( + authority_cert_serial_number, six.integer_types + ): + raise TypeError( + "authority_cert_serial_number must be an integer" + ) + + self._key_identifier = key_identifier + self._authority_cert_issuer = authority_cert_issuer + self._authority_cert_serial_number = authority_cert_serial_number + + @classmethod + def from_issuer_public_key(cls, public_key): + digest = _key_identifier_from_public_key(public_key) + return cls( + key_identifier=digest, + authority_cert_issuer=None, + authority_cert_serial_number=None + ) + + @classmethod + def from_issuer_subject_key_identifier(cls, ski): + return cls( + key_identifier=ski.value.digest, + authority_cert_issuer=None, + authority_cert_serial_number=None + ) + + def __repr__(self): + return ( + "".format(self) + ) + + def __eq__(self, other): + if not isinstance(other, AuthorityKeyIdentifier): + return NotImplemented + + return ( + self.key_identifier == other.key_identifier and + self.authority_cert_issuer == other.authority_cert_issuer and + self.authority_cert_serial_number == + other.authority_cert_serial_number + ) + + def __ne__(self, other): + return not self == other + + key_identifier = utils.read_only_property("_key_identifier") + authority_cert_issuer = utils.read_only_property("_authority_cert_issuer") + authority_cert_serial_number = utils.read_only_property( + "_authority_cert_serial_number" + ) + + +@utils.register_interface(ExtensionType) +class SubjectKeyIdentifier(object): + oid = ExtensionOID.SUBJECT_KEY_IDENTIFIER + + def __init__(self, digest): + self._digest = digest + + @classmethod + def from_public_key(cls, public_key): + return cls(_key_identifier_from_public_key(public_key)) + + digest = utils.read_only_property("_digest") + + def __repr__(self): + return "".format(self.digest) + + def __eq__(self, other): + if not isinstance(other, SubjectKeyIdentifier): + return NotImplemented + + return constant_time.bytes_eq(self.digest, other.digest) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.digest) + + +@utils.register_interface(ExtensionType) +class AuthorityInformationAccess(object): + oid = ExtensionOID.AUTHORITY_INFORMATION_ACCESS + + def __init__(self, descriptions): + if not all(isinstance(x, AccessDescription) for x in descriptions): + raise TypeError( + "Every item in the descriptions list must be an " + "AccessDescription" + ) + + self._descriptions = descriptions + + def __iter__(self): + return iter(self._descriptions) + + def __len__(self): + return len(self._descriptions) + + def __repr__(self): + return "".format(self._descriptions) + + def __eq__(self, other): + if not isinstance(other, AuthorityInformationAccess): + return NotImplemented + + return self._descriptions == other._descriptions + + def __ne__(self, other): + return not self == other + + def __getitem__(self, idx): + return self._descriptions[idx] + + +class AccessDescription(object): + def __init__(self, access_method, access_location): + if not isinstance(access_method, ObjectIdentifier): + raise TypeError("access_method must be an ObjectIdentifier") + + if not isinstance(access_location, GeneralName): + raise TypeError("access_location must be a GeneralName") + + self._access_method = access_method + self._access_location = access_location + + def __repr__(self): + return ( + "".format(self) + ) + + def __eq__(self, other): + if not isinstance(other, AccessDescription): + return NotImplemented + + return ( + self.access_method == other.access_method and + self.access_location == other.access_location + ) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.access_method, self.access_location)) + + access_method = utils.read_only_property("_access_method") + access_location = utils.read_only_property("_access_location") + + +@utils.register_interface(ExtensionType) +class BasicConstraints(object): + oid = ExtensionOID.BASIC_CONSTRAINTS + + def __init__(self, ca, path_length): + if not isinstance(ca, bool): + raise TypeError("ca must be a boolean value") + + if path_length is not None and not ca: + raise ValueError("path_length must be None when ca is False") + + if ( + path_length is not None and + (not isinstance(path_length, six.integer_types) or path_length < 0) + ): + raise TypeError( + "path_length must be a non-negative integer or None" + ) + + self._ca = ca + self._path_length = path_length + + ca = utils.read_only_property("_ca") + path_length = utils.read_only_property("_path_length") + + def __repr__(self): + return ("").format(self) + + def __eq__(self, other): + if not isinstance(other, BasicConstraints): + return NotImplemented + + return self.ca == other.ca and self.path_length == other.path_length + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.ca, self.path_length)) + + +@utils.register_interface(ExtensionType) +class CRLDistributionPoints(object): + oid = ExtensionOID.CRL_DISTRIBUTION_POINTS + + def __init__(self, distribution_points): + if not all( + isinstance(x, DistributionPoint) for x in distribution_points + ): + raise TypeError( + "distribution_points must be a list of DistributionPoint " + "objects" + ) + + self._distribution_points = distribution_points + + def __iter__(self): + return iter(self._distribution_points) + + def __len__(self): + return len(self._distribution_points) + + def __repr__(self): + return "".format(self._distribution_points) + + def __eq__(self, other): + if not isinstance(other, CRLDistributionPoints): + return NotImplemented + + return self._distribution_points == other._distribution_points + + def __ne__(self, other): + return not self == other + + def __getitem__(self, idx): + return self._distribution_points[idx] + + +class DistributionPoint(object): + def __init__(self, full_name, relative_name, reasons, crl_issuer): + if full_name and relative_name: + raise ValueError( + "You cannot provide both full_name and relative_name, at " + "least one must be None." + ) + + if full_name and not all( + isinstance(x, GeneralName) for x in full_name + ): + raise TypeError( + "full_name must be a list of GeneralName objects" + ) + + if relative_name and not isinstance(relative_name, Name): + raise TypeError("relative_name must be a Name") + + if crl_issuer and not all( + isinstance(x, GeneralName) for x in crl_issuer + ): + raise TypeError( + "crl_issuer must be None or a list of general names" + ) + + if reasons and (not isinstance(reasons, frozenset) or not all( + isinstance(x, ReasonFlags) for x in reasons + )): + raise TypeError("reasons must be None or frozenset of ReasonFlags") + + if reasons and ( + ReasonFlags.unspecified in reasons or + ReasonFlags.remove_from_crl in reasons + ): + raise ValueError( + "unspecified and remove_from_crl are not valid reasons in a " + "DistributionPoint" + ) + + if reasons and not crl_issuer and not (full_name or relative_name): + raise ValueError( + "You must supply crl_issuer, full_name, or relative_name when " + "reasons is not None" + ) + + self._full_name = full_name + self._relative_name = relative_name + self._reasons = reasons + self._crl_issuer = crl_issuer + + def __repr__(self): + return ( + "".format(self) + ) + + def __eq__(self, other): + if not isinstance(other, DistributionPoint): + return NotImplemented + + return ( + self.full_name == other.full_name and + self.relative_name == other.relative_name and + self.reasons == other.reasons and + self.crl_issuer == other.crl_issuer + ) + + def __ne__(self, other): + return not self == other + + full_name = utils.read_only_property("_full_name") + relative_name = utils.read_only_property("_relative_name") + reasons = utils.read_only_property("_reasons") + crl_issuer = utils.read_only_property("_crl_issuer") + + +class ReasonFlags(Enum): + unspecified = "unspecified" + key_compromise = "keyCompromise" + ca_compromise = "cACompromise" + affiliation_changed = "affiliationChanged" + superseded = "superseded" + cessation_of_operation = "cessationOfOperation" + certificate_hold = "certificateHold" + privilege_withdrawn = "privilegeWithdrawn" + aa_compromise = "aACompromise" + remove_from_crl = "removeFromCRL" + + +@utils.register_interface(ExtensionType) +class PolicyConstraints(object): + oid = ExtensionOID.POLICY_CONSTRAINTS + + def __init__(self, require_explicit_policy, inhibit_policy_mapping): + if require_explicit_policy is not None and not isinstance( + require_explicit_policy, six.integer_types + ): + raise TypeError( + "require_explicit_policy must be a non-negative integer or " + "None" + ) + + if inhibit_policy_mapping is not None and not isinstance( + inhibit_policy_mapping, six.integer_types + ): + raise TypeError( + "inhibit_policy_mapping must be a non-negative integer or None" + ) + + if inhibit_policy_mapping is None and require_explicit_policy is None: + raise ValueError( + "At least one of require_explicit_policy and " + "inhibit_policy_mapping must not be None" + ) + + self._require_explicit_policy = require_explicit_policy + self._inhibit_policy_mapping = inhibit_policy_mapping + + def __repr__(self): + return ( + u"".format(self) + ) + + def __eq__(self, other): + if not isinstance(other, PolicyConstraints): + return NotImplemented + + return ( + self.require_explicit_policy == other.require_explicit_policy and + self.inhibit_policy_mapping == other.inhibit_policy_mapping + ) + + def __ne__(self, other): + return not self == other + + require_explicit_policy = utils.read_only_property( + "_require_explicit_policy" + ) + inhibit_policy_mapping = utils.read_only_property( + "_inhibit_policy_mapping" + ) + + +@utils.register_interface(ExtensionType) +class CertificatePolicies(object): + oid = ExtensionOID.CERTIFICATE_POLICIES + + def __init__(self, policies): + if not all(isinstance(x, PolicyInformation) for x in policies): + raise TypeError( + "Every item in the policies list must be a " + "PolicyInformation" + ) + + self._policies = policies + + def __iter__(self): + return iter(self._policies) + + def __len__(self): + return len(self._policies) + + def __repr__(self): + return "".format(self._policies) + + def __eq__(self, other): + if not isinstance(other, CertificatePolicies): + return NotImplemented + + return self._policies == other._policies + + def __ne__(self, other): + return not self == other + + def __getitem__(self, idx): + return self._policies[idx] + + +class PolicyInformation(object): + def __init__(self, policy_identifier, policy_qualifiers): + if not isinstance(policy_identifier, ObjectIdentifier): + raise TypeError("policy_identifier must be an ObjectIdentifier") + + self._policy_identifier = policy_identifier + if policy_qualifiers and not all( + isinstance( + x, (six.text_type, UserNotice) + ) for x in policy_qualifiers + ): + raise TypeError( + "policy_qualifiers must be a list of strings and/or UserNotice" + " objects or None" + ) + + self._policy_qualifiers = policy_qualifiers + + def __repr__(self): + return ( + "".format(self) + ) + + def __eq__(self, other): + if not isinstance(other, PolicyInformation): + return NotImplemented + + return ( + self.policy_identifier == other.policy_identifier and + self.policy_qualifiers == other.policy_qualifiers + ) + + def __ne__(self, other): + return not self == other + + policy_identifier = utils.read_only_property("_policy_identifier") + policy_qualifiers = utils.read_only_property("_policy_qualifiers") + + +class UserNotice(object): + def __init__(self, notice_reference, explicit_text): + if notice_reference and not isinstance( + notice_reference, NoticeReference + ): + raise TypeError( + "notice_reference must be None or a NoticeReference" + ) + + self._notice_reference = notice_reference + self._explicit_text = explicit_text + + def __repr__(self): + return ( + "".format(self) + ) + + def __eq__(self, other): + if not isinstance(other, UserNotice): + return NotImplemented + + return ( + self.notice_reference == other.notice_reference and + self.explicit_text == other.explicit_text + ) + + def __ne__(self, other): + return not self == other + + notice_reference = utils.read_only_property("_notice_reference") + explicit_text = utils.read_only_property("_explicit_text") + + +class NoticeReference(object): + def __init__(self, organization, notice_numbers): + self._organization = organization + if not isinstance(notice_numbers, list) or not all( + isinstance(x, int) for x in notice_numbers + ): + raise TypeError( + "notice_numbers must be a list of integers" + ) + + self._notice_numbers = notice_numbers + + def __repr__(self): + return ( + "".format(self) + ) + + def __eq__(self, other): + if not isinstance(other, NoticeReference): + return NotImplemented + + return ( + self.organization == other.organization and + self.notice_numbers == other.notice_numbers + ) + + def __ne__(self, other): + return not self == other + + organization = utils.read_only_property("_organization") + notice_numbers = utils.read_only_property("_notice_numbers") + + +@utils.register_interface(ExtensionType) +class ExtendedKeyUsage(object): + oid = ExtensionOID.EXTENDED_KEY_USAGE + + def __init__(self, usages): + if not all(isinstance(x, ObjectIdentifier) for x in usages): + raise TypeError( + "Every item in the usages list must be an ObjectIdentifier" + ) + + self._usages = usages + + def __iter__(self): + return iter(self._usages) + + def __len__(self): + return len(self._usages) + + def __repr__(self): + return "".format(self._usages) + + def __eq__(self, other): + if not isinstance(other, ExtendedKeyUsage): + return NotImplemented + + return self._usages == other._usages + + def __ne__(self, other): + return not self == other + + +@utils.register_interface(ExtensionType) +class OCSPNoCheck(object): + oid = ExtensionOID.OCSP_NO_CHECK + + +@utils.register_interface(ExtensionType) +class InhibitAnyPolicy(object): + oid = ExtensionOID.INHIBIT_ANY_POLICY + + def __init__(self, skip_certs): + if not isinstance(skip_certs, six.integer_types): + raise TypeError("skip_certs must be an integer") + + if skip_certs < 0: + raise ValueError("skip_certs must be a non-negative integer") + + self._skip_certs = skip_certs + + def __repr__(self): + return "".format(self) + + def __eq__(self, other): + if not isinstance(other, InhibitAnyPolicy): + return NotImplemented + + return self.skip_certs == other.skip_certs + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.skip_certs) + + skip_certs = utils.read_only_property("_skip_certs") + + +@utils.register_interface(ExtensionType) +class KeyUsage(object): + oid = ExtensionOID.KEY_USAGE + + def __init__(self, digital_signature, content_commitment, key_encipherment, + data_encipherment, key_agreement, key_cert_sign, crl_sign, + encipher_only, decipher_only): + if not key_agreement and (encipher_only or decipher_only): + raise ValueError( + "encipher_only and decipher_only can only be true when " + "key_agreement is true" + ) + + self._digital_signature = digital_signature + self._content_commitment = content_commitment + self._key_encipherment = key_encipherment + self._data_encipherment = data_encipherment + self._key_agreement = key_agreement + self._key_cert_sign = key_cert_sign + self._crl_sign = crl_sign + self._encipher_only = encipher_only + self._decipher_only = decipher_only + + digital_signature = utils.read_only_property("_digital_signature") + content_commitment = utils.read_only_property("_content_commitment") + key_encipherment = utils.read_only_property("_key_encipherment") + data_encipherment = utils.read_only_property("_data_encipherment") + key_agreement = utils.read_only_property("_key_agreement") + key_cert_sign = utils.read_only_property("_key_cert_sign") + crl_sign = utils.read_only_property("_crl_sign") + + @property + def encipher_only(self): + if not self.key_agreement: + raise ValueError( + "encipher_only is undefined unless key_agreement is true" + ) + else: + return self._encipher_only + + @property + def decipher_only(self): + if not self.key_agreement: + raise ValueError( + "decipher_only is undefined unless key_agreement is true" + ) + else: + return self._decipher_only + + def __repr__(self): + try: + encipher_only = self.encipher_only + decipher_only = self.decipher_only + except ValueError: + encipher_only = None + decipher_only = None + + return ("").format( + self, encipher_only, decipher_only) + + def __eq__(self, other): + if not isinstance(other, KeyUsage): + return NotImplemented + + return ( + self.digital_signature == other.digital_signature and + self.content_commitment == other.content_commitment and + self.key_encipherment == other.key_encipherment and + self.data_encipherment == other.data_encipherment and + self.key_agreement == other.key_agreement and + self.key_cert_sign == other.key_cert_sign and + self.crl_sign == other.crl_sign and + self._encipher_only == other._encipher_only and + self._decipher_only == other._decipher_only + ) + + def __ne__(self, other): + return not self == other + + +@utils.register_interface(ExtensionType) +class NameConstraints(object): + oid = ExtensionOID.NAME_CONSTRAINTS + + def __init__(self, permitted_subtrees, excluded_subtrees): + if permitted_subtrees is not None: + if not all( + isinstance(x, GeneralName) for x in permitted_subtrees + ): + raise TypeError( + "permitted_subtrees must be a list of GeneralName objects " + "or None" + ) + + self._validate_ip_name(permitted_subtrees) + + if excluded_subtrees is not None: + if not all( + isinstance(x, GeneralName) for x in excluded_subtrees + ): + raise TypeError( + "excluded_subtrees must be a list of GeneralName objects " + "or None" + ) + + self._validate_ip_name(excluded_subtrees) + + if permitted_subtrees is None and excluded_subtrees is None: + raise ValueError( + "At least one of permitted_subtrees and excluded_subtrees " + "must not be None" + ) + + self._permitted_subtrees = permitted_subtrees + self._excluded_subtrees = excluded_subtrees + + def __eq__(self, other): + if not isinstance(other, NameConstraints): + return NotImplemented + + return ( + self.excluded_subtrees == other.excluded_subtrees and + self.permitted_subtrees == other.permitted_subtrees + ) + + def __ne__(self, other): + return not self == other + + def _validate_ip_name(self, tree): + if any(isinstance(name, IPAddress) and not isinstance( + name.value, (ipaddress.IPv4Network, ipaddress.IPv6Network) + ) for name in tree): + raise TypeError( + "IPAddress name constraints must be an IPv4Network or" + " IPv6Network object" + ) + + def __repr__(self): + return ( + u"".format(self) + ) + + permitted_subtrees = utils.read_only_property("_permitted_subtrees") + excluded_subtrees = utils.read_only_property("_excluded_subtrees") + + +class Extension(object): + def __init__(self, oid, critical, value): + if not isinstance(oid, ObjectIdentifier): + raise TypeError( + "oid argument must be an ObjectIdentifier instance." + ) + + if not isinstance(critical, bool): + raise TypeError("critical must be a boolean value") + + self._oid = oid + self._critical = critical + self._value = value + + oid = utils.read_only_property("_oid") + critical = utils.read_only_property("_critical") + value = utils.read_only_property("_value") + + def __repr__(self): + return ("").format(self) + + def __eq__(self, other): + if not isinstance(other, Extension): + return NotImplemented + + return ( + self.oid == other.oid and + self.critical == other.critical and + self.value == other.value + ) + + def __ne__(self, other): + return not self == other + + +class GeneralNames(object): + def __init__(self, general_names): + if not all(isinstance(x, GeneralName) for x in general_names): + raise TypeError( + "Every item in the general_names list must be an " + "object conforming to the GeneralName interface" + ) + + self._general_names = general_names + + def __iter__(self): + return iter(self._general_names) + + def __len__(self): + return len(self._general_names) + + def get_values_for_type(self, type): + # Return the value of each GeneralName, except for OtherName instances + # which we return directly because it has two important properties not + # just one value. + objs = (i for i in self if isinstance(i, type)) + if type != OtherName: + objs = (i.value for i in objs) + return list(objs) + + def __repr__(self): + return "".format(self._general_names) + + def __eq__(self, other): + if not isinstance(other, GeneralNames): + return NotImplemented + + return self._general_names == other._general_names + + def __ne__(self, other): + return not self == other + + def __getitem__(self, idx): + return self._general_names[idx] + + +@utils.register_interface(ExtensionType) +class SubjectAlternativeName(object): + oid = ExtensionOID.SUBJECT_ALTERNATIVE_NAME + + def __init__(self, general_names): + self._general_names = GeneralNames(general_names) + + def __iter__(self): + return iter(self._general_names) + + def __len__(self): + return len(self._general_names) + + def get_values_for_type(self, type): + return self._general_names.get_values_for_type(type) + + def __repr__(self): + return "".format(self._general_names) + + def __eq__(self, other): + if not isinstance(other, SubjectAlternativeName): + return NotImplemented + + return self._general_names == other._general_names + + def __getitem__(self, idx): + return self._general_names[idx] + + def __ne__(self, other): + return not self == other + + +@utils.register_interface(ExtensionType) +class IssuerAlternativeName(object): + oid = ExtensionOID.ISSUER_ALTERNATIVE_NAME + + def __init__(self, general_names): + self._general_names = GeneralNames(general_names) + + def __iter__(self): + return iter(self._general_names) + + def __len__(self): + return len(self._general_names) + + def get_values_for_type(self, type): + return self._general_names.get_values_for_type(type) + + def __repr__(self): + return "".format(self._general_names) + + def __eq__(self, other): + if not isinstance(other, IssuerAlternativeName): + return NotImplemented + + return self._general_names == other._general_names + + def __ne__(self, other): + return not self == other + + def __getitem__(self, idx): + return self._general_names[idx] + + +@utils.register_interface(ExtensionType) +class CertificateIssuer(object): + oid = CRLEntryExtensionOID.CERTIFICATE_ISSUER + + def __init__(self, general_names): + self._general_names = GeneralNames(general_names) + + def __iter__(self): + return iter(self._general_names) + + def __len__(self): + return len(self._general_names) + + def get_values_for_type(self, type): + return self._general_names.get_values_for_type(type) + + def __repr__(self): + return "".format(self._general_names) + + def __eq__(self, other): + if not isinstance(other, CertificateIssuer): + return NotImplemented + + return self._general_names == other._general_names + + def __ne__(self, other): + return not self == other + + def __getitem__(self, idx): + return self._general_names[idx] + + +@utils.register_interface(ExtensionType) +class CRLReason(object): + oid = CRLEntryExtensionOID.CRL_REASON + + def __init__(self, reason): + if not isinstance(reason, ReasonFlags): + raise TypeError("reason must be an element from ReasonFlags") + + self._reason = reason + + def __repr__(self): + return "".format(self._reason) + + def __eq__(self, other): + if not isinstance(other, CRLReason): + return NotImplemented + + return self.reason == other.reason + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.reason) + + reason = utils.read_only_property("_reason") + + +@utils.register_interface(ExtensionType) +class InvalidityDate(object): + oid = CRLEntryExtensionOID.INVALIDITY_DATE + + def __init__(self, invalidity_date): + if not isinstance(invalidity_date, datetime.datetime): + raise TypeError("invalidity_date must be a datetime.datetime") + + self._invalidity_date = invalidity_date + + def __repr__(self): + return "".format( + self._invalidity_date + ) + + def __eq__(self, other): + if not isinstance(other, InvalidityDate): + return NotImplemented + + return self.invalidity_date == other.invalidity_date + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.invalidity_date) + + invalidity_date = utils.read_only_property("_invalidity_date") + + +@utils.register_interface(ExtensionType) +class UnrecognizedExtension(object): + def __init__(self, oid, value): + if not isinstance(oid, ObjectIdentifier): + raise TypeError("oid must be an ObjectIdentifier") + self._oid = oid + self._value = value + + oid = utils.read_only_property("_oid") + value = utils.read_only_property("_value") + + def __repr__(self): + return ( + "".format( + self + ) + ) + + def __eq__(self, other): + if not isinstance(other, UnrecognizedExtension): + return NotImplemented + + return self.oid == other.oid and self.value == other.value + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.oid, self.value)) diff --git a/lib/python3.4/site-packages/cryptography/x509/general_name.py b/lib/python3.4/site-packages/cryptography/x509/general_name.py new file mode 100644 index 0000000..6745243 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/x509/general_name.py @@ -0,0 +1,271 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc +import ipaddress +from email.utils import parseaddr + +import idna + +import six + +from six.moves import urllib_parse + +from cryptography import utils +from cryptography.x509.name import Name +from cryptography.x509.oid import ObjectIdentifier + + +_GENERAL_NAMES = { + 0: "otherName", + 1: "rfc822Name", + 2: "dNSName", + 3: "x400Address", + 4: "directoryName", + 5: "ediPartyName", + 6: "uniformResourceIdentifier", + 7: "iPAddress", + 8: "registeredID", +} + + +class UnsupportedGeneralNameType(Exception): + def __init__(self, msg, type): + super(UnsupportedGeneralNameType, self).__init__(msg) + self.type = type + + +@six.add_metaclass(abc.ABCMeta) +class GeneralName(object): + @abc.abstractproperty + def value(self): + """ + Return the value of the object + """ + + +@utils.register_interface(GeneralName) +class RFC822Name(object): + def __init__(self, value): + if not isinstance(value, six.text_type): + raise TypeError("value must be a unicode string") + + name, address = parseaddr(value) + parts = address.split(u"@") + if name or not address: + # parseaddr has found a name (e.g. Name ) or the entire + # value is an empty string. + raise ValueError("Invalid rfc822name value") + elif len(parts) == 1: + # Single label email name. This is valid for local delivery. + # No IDNA encoding needed since there is no domain component. + encoded = address.encode("ascii") + else: + # A normal email of the form user@domain.com. Let's attempt to + # encode the domain component and reconstruct the address. + encoded = parts[0].encode("ascii") + b"@" + idna.encode(parts[1]) + + self._value = value + self._encoded = encoded + + value = utils.read_only_property("_value") + + def __repr__(self): + return "".format(self.value) + + def __eq__(self, other): + if not isinstance(other, RFC822Name): + return NotImplemented + + return self.value == other.value + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.value) + + +@utils.register_interface(GeneralName) +class DNSName(object): + def __init__(self, value): + if not isinstance(value, six.text_type): + raise TypeError("value must be a unicode string") + + self._value = value + + value = utils.read_only_property("_value") + + def __repr__(self): + return "".format(self.value) + + def __eq__(self, other): + if not isinstance(other, DNSName): + return NotImplemented + + return self.value == other.value + + def __ne__(self, other): + return not self == other + + +@utils.register_interface(GeneralName) +class UniformResourceIdentifier(object): + def __init__(self, value): + if not isinstance(value, six.text_type): + raise TypeError("value must be a unicode string") + + parsed = urllib_parse.urlparse(value) + if not parsed.hostname: + netloc = "" + elif parsed.port: + netloc = ( + idna.encode(parsed.hostname) + + ":{0}".format(parsed.port).encode("ascii") + ).decode("ascii") + else: + netloc = idna.encode(parsed.hostname).decode("ascii") + + # Note that building a URL in this fashion means it should be + # semantically indistinguishable from the original but is not + # guaranteed to be exactly the same. + uri = urllib_parse.urlunparse(( + parsed.scheme, + netloc, + parsed.path, + parsed.params, + parsed.query, + parsed.fragment + )).encode("ascii") + + self._value = value + self._encoded = uri + + value = utils.read_only_property("_value") + + def __repr__(self): + return "".format(self.value) + + def __eq__(self, other): + if not isinstance(other, UniformResourceIdentifier): + return NotImplemented + + return self.value == other.value + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.value) + + +@utils.register_interface(GeneralName) +class DirectoryName(object): + def __init__(self, value): + if not isinstance(value, Name): + raise TypeError("value must be a Name") + + self._value = value + + value = utils.read_only_property("_value") + + def __repr__(self): + return "".format(self.value) + + def __eq__(self, other): + if not isinstance(other, DirectoryName): + return NotImplemented + + return self.value == other.value + + def __ne__(self, other): + return not self == other + + +@utils.register_interface(GeneralName) +class RegisteredID(object): + def __init__(self, value): + if not isinstance(value, ObjectIdentifier): + raise TypeError("value must be an ObjectIdentifier") + + self._value = value + + value = utils.read_only_property("_value") + + def __repr__(self): + return "".format(self.value) + + def __eq__(self, other): + if not isinstance(other, RegisteredID): + return NotImplemented + + return self.value == other.value + + def __ne__(self, other): + return not self == other + + +@utils.register_interface(GeneralName) +class IPAddress(object): + def __init__(self, value): + if not isinstance( + value, + ( + ipaddress.IPv4Address, + ipaddress.IPv6Address, + ipaddress.IPv4Network, + ipaddress.IPv6Network + ) + ): + raise TypeError( + "value must be an instance of ipaddress.IPv4Address, " + "ipaddress.IPv6Address, ipaddress.IPv4Network, or " + "ipaddress.IPv6Network" + ) + + self._value = value + + value = utils.read_only_property("_value") + + def __repr__(self): + return "".format(self.value) + + def __eq__(self, other): + if not isinstance(other, IPAddress): + return NotImplemented + + return self.value == other.value + + def __ne__(self, other): + return not self == other + + +@utils.register_interface(GeneralName) +class OtherName(object): + def __init__(self, type_id, value): + if not isinstance(type_id, ObjectIdentifier): + raise TypeError("type_id must be an ObjectIdentifier") + if not isinstance(value, bytes): + raise TypeError("value must be a binary string") + + self._type_id = type_id + self._value = value + + type_id = utils.read_only_property("_type_id") + value = utils.read_only_property("_value") + + def __repr__(self): + return "".format( + self.type_id, self.value) + + def __eq__(self, other): + if not isinstance(other, OtherName): + return NotImplemented + + return self.type_id == other.type_id and self.value == other.value + + def __ne__(self, other): + return not self == other diff --git a/lib/python3.4/site-packages/cryptography/x509/name.py b/lib/python3.4/site-packages/cryptography/x509/name.py new file mode 100644 index 0000000..d62341d --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/x509/name.py @@ -0,0 +1,83 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import six + +from cryptography import utils +from cryptography.x509.oid import NameOID, ObjectIdentifier + + +class NameAttribute(object): + def __init__(self, oid, value): + if not isinstance(oid, ObjectIdentifier): + raise TypeError( + "oid argument must be an ObjectIdentifier instance." + ) + + if not isinstance(value, six.text_type): + raise TypeError( + "value argument must be a text type." + ) + + if oid == NameOID.COUNTRY_NAME and len(value.encode("utf8")) != 2: + raise ValueError( + "Country name must be a 2 character country code" + ) + + self._oid = oid + self._value = value + + oid = utils.read_only_property("_oid") + value = utils.read_only_property("_value") + + def __eq__(self, other): + if not isinstance(other, NameAttribute): + return NotImplemented + + return ( + self.oid == other.oid and + self.value == other.value + ) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.oid, self.value)) + + def __repr__(self): + return "".format(self) + + +class Name(object): + def __init__(self, attributes): + self._attributes = attributes + + def get_attributes_for_oid(self, oid): + return [i for i in self if i.oid == oid] + + def __eq__(self, other): + if not isinstance(other, Name): + return NotImplemented + + return self._attributes == other._attributes + + def __ne__(self, other): + return not self == other + + def __hash__(self): + # TODO: this is relatively expensive, if this looks like a bottleneck + # for you, consider optimizing! + return hash(tuple(self._attributes)) + + def __iter__(self): + return iter(self._attributes) + + def __len__(self): + return len(self._attributes) + + def __repr__(self): + return "".format(self._attributes) diff --git a/lib/python3.4/site-packages/cryptography/x509/oid.py b/lib/python3.4/site-packages/cryptography/x509/oid.py new file mode 100644 index 0000000..48e9d69 --- /dev/null +++ b/lib/python3.4/site-packages/cryptography/x509/oid.py @@ -0,0 +1,243 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.hazmat.primitives import hashes + + +class ObjectIdentifier(object): + def __init__(self, dotted_string): + self._dotted_string = dotted_string + + nodes = self._dotted_string.split(".") + intnodes = [] + + # There must be at least 2 nodes, the first node must be 0..2, and + # if less than 2, the second node cannot have a value outside the + # range 0..39. All nodes must be integers. + for node in nodes: + try: + intnodes.append(int(node, 0)) + except ValueError: + raise ValueError( + "Malformed OID: %s (non-integer nodes)" % ( + self._dotted_string)) + + if len(nodes) < 2: + raise ValueError( + "Malformed OID: %s (insufficient number of nodes)" % ( + self._dotted_string)) + + if intnodes[0] > 2: + raise ValueError( + "Malformed OID: %s (first node outside valid range)" % ( + self._dotted_string)) + + if intnodes[0] < 2 and intnodes[1] >= 40: + raise ValueError( + "Malformed OID: %s (second node outside valid range)" % ( + self._dotted_string)) + + def __eq__(self, other): + if not isinstance(other, ObjectIdentifier): + return NotImplemented + + return self.dotted_string == other.dotted_string + + def __ne__(self, other): + return not self == other + + def __repr__(self): + return "".format( + self.dotted_string, + self._name + ) + + def __hash__(self): + return hash(self.dotted_string) + + @property + def _name(self): + return _OID_NAMES.get(self, "Unknown OID") + + dotted_string = utils.read_only_property("_dotted_string") + + +class ExtensionOID(object): + SUBJECT_DIRECTORY_ATTRIBUTES = ObjectIdentifier("2.5.29.9") + SUBJECT_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.14") + KEY_USAGE = ObjectIdentifier("2.5.29.15") + SUBJECT_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.17") + ISSUER_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.18") + BASIC_CONSTRAINTS = ObjectIdentifier("2.5.29.19") + NAME_CONSTRAINTS = ObjectIdentifier("2.5.29.30") + CRL_DISTRIBUTION_POINTS = ObjectIdentifier("2.5.29.31") + CERTIFICATE_POLICIES = ObjectIdentifier("2.5.29.32") + POLICY_MAPPINGS = ObjectIdentifier("2.5.29.33") + AUTHORITY_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.35") + POLICY_CONSTRAINTS = ObjectIdentifier("2.5.29.36") + EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37") + FRESHEST_CRL = ObjectIdentifier("2.5.29.46") + INHIBIT_ANY_POLICY = ObjectIdentifier("2.5.29.54") + AUTHORITY_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.1") + SUBJECT_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.11") + OCSP_NO_CHECK = ObjectIdentifier("1.3.6.1.5.5.7.48.1.5") + CRL_NUMBER = ObjectIdentifier("2.5.29.20") + + +class CRLEntryExtensionOID(object): + CERTIFICATE_ISSUER = ObjectIdentifier("2.5.29.29") + CRL_REASON = ObjectIdentifier("2.5.29.21") + INVALIDITY_DATE = ObjectIdentifier("2.5.29.24") + + +class NameOID(object): + COMMON_NAME = ObjectIdentifier("2.5.4.3") + COUNTRY_NAME = ObjectIdentifier("2.5.4.6") + LOCALITY_NAME = ObjectIdentifier("2.5.4.7") + STATE_OR_PROVINCE_NAME = ObjectIdentifier("2.5.4.8") + ORGANIZATION_NAME = ObjectIdentifier("2.5.4.10") + ORGANIZATIONAL_UNIT_NAME = ObjectIdentifier("2.5.4.11") + SERIAL_NUMBER = ObjectIdentifier("2.5.4.5") + SURNAME = ObjectIdentifier("2.5.4.4") + GIVEN_NAME = ObjectIdentifier("2.5.4.42") + TITLE = ObjectIdentifier("2.5.4.12") + GENERATION_QUALIFIER = ObjectIdentifier("2.5.4.44") + DN_QUALIFIER = ObjectIdentifier("2.5.4.46") + PSEUDONYM = ObjectIdentifier("2.5.4.65") + DOMAIN_COMPONENT = ObjectIdentifier("0.9.2342.19200300.100.1.25") + EMAIL_ADDRESS = ObjectIdentifier("1.2.840.113549.1.9.1") + JURISDICTION_COUNTRY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.3") + JURISDICTION_LOCALITY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.1") + JURISDICTION_STATE_OR_PROVINCE_NAME = ObjectIdentifier( + "1.3.6.1.4.1.311.60.2.1.2" + ) + BUSINESS_CATEGORY = ObjectIdentifier("2.5.4.15") + + +class SignatureAlgorithmOID(object): + RSA_WITH_MD5 = ObjectIdentifier("1.2.840.113549.1.1.4") + RSA_WITH_SHA1 = ObjectIdentifier("1.2.840.113549.1.1.5") + RSA_WITH_SHA224 = ObjectIdentifier("1.2.840.113549.1.1.14") + RSA_WITH_SHA256 = ObjectIdentifier("1.2.840.113549.1.1.11") + RSA_WITH_SHA384 = ObjectIdentifier("1.2.840.113549.1.1.12") + RSA_WITH_SHA512 = ObjectIdentifier("1.2.840.113549.1.1.13") + ECDSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10045.4.1") + ECDSA_WITH_SHA224 = ObjectIdentifier("1.2.840.10045.4.3.1") + ECDSA_WITH_SHA256 = ObjectIdentifier("1.2.840.10045.4.3.2") + ECDSA_WITH_SHA384 = ObjectIdentifier("1.2.840.10045.4.3.3") + ECDSA_WITH_SHA512 = ObjectIdentifier("1.2.840.10045.4.3.4") + DSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10040.4.3") + DSA_WITH_SHA224 = ObjectIdentifier("2.16.840.1.101.3.4.3.1") + DSA_WITH_SHA256 = ObjectIdentifier("2.16.840.1.101.3.4.3.2") + +_SIG_OIDS_TO_HASH = { + SignatureAlgorithmOID.RSA_WITH_MD5.dotted_string: hashes.MD5(), + SignatureAlgorithmOID.RSA_WITH_SHA1.dotted_string: hashes.SHA1(), + SignatureAlgorithmOID.RSA_WITH_SHA224.dotted_string: hashes.SHA224(), + SignatureAlgorithmOID.RSA_WITH_SHA256.dotted_string: hashes.SHA256(), + SignatureAlgorithmOID.RSA_WITH_SHA384.dotted_string: hashes.SHA384(), + SignatureAlgorithmOID.RSA_WITH_SHA512.dotted_string: hashes.SHA512(), + SignatureAlgorithmOID.ECDSA_WITH_SHA1.dotted_string: hashes.SHA1(), + SignatureAlgorithmOID.ECDSA_WITH_SHA224.dotted_string: hashes.SHA224(), + SignatureAlgorithmOID.ECDSA_WITH_SHA256.dotted_string: hashes.SHA256(), + SignatureAlgorithmOID.ECDSA_WITH_SHA384.dotted_string: hashes.SHA384(), + SignatureAlgorithmOID.ECDSA_WITH_SHA512.dotted_string: hashes.SHA512(), + SignatureAlgorithmOID.DSA_WITH_SHA1.dotted_string: hashes.SHA1(), + SignatureAlgorithmOID.DSA_WITH_SHA224.dotted_string: hashes.SHA224(), + SignatureAlgorithmOID.DSA_WITH_SHA256.dotted_string: hashes.SHA256() +} + + +class ExtendedKeyUsageOID(object): + SERVER_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.1") + CLIENT_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.2") + CODE_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.3") + EMAIL_PROTECTION = ObjectIdentifier("1.3.6.1.5.5.7.3.4") + TIME_STAMPING = ObjectIdentifier("1.3.6.1.5.5.7.3.8") + OCSP_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.9") + + +class AuthorityInformationAccessOID(object): + CA_ISSUERS = ObjectIdentifier("1.3.6.1.5.5.7.48.2") + OCSP = ObjectIdentifier("1.3.6.1.5.5.7.48.1") + + +class CertificatePoliciesOID(object): + CPS_QUALIFIER = ObjectIdentifier("1.3.6.1.5.5.7.2.1") + CPS_USER_NOTICE = ObjectIdentifier("1.3.6.1.5.5.7.2.2") + ANY_POLICY = ObjectIdentifier("2.5.29.32.0") + +_OID_NAMES = { + NameOID.COMMON_NAME: "commonName", + NameOID.COUNTRY_NAME: "countryName", + NameOID.LOCALITY_NAME: "localityName", + NameOID.STATE_OR_PROVINCE_NAME: "stateOrProvinceName", + NameOID.ORGANIZATION_NAME: "organizationName", + NameOID.ORGANIZATIONAL_UNIT_NAME: "organizationalUnitName", + NameOID.SERIAL_NUMBER: "serialNumber", + NameOID.SURNAME: "surname", + NameOID.GIVEN_NAME: "givenName", + NameOID.TITLE: "title", + NameOID.GENERATION_QUALIFIER: "generationQualifier", + NameOID.DN_QUALIFIER: "dnQualifier", + NameOID.PSEUDONYM: "pseudonym", + NameOID.DOMAIN_COMPONENT: "domainComponent", + NameOID.EMAIL_ADDRESS: "emailAddress", + NameOID.JURISDICTION_COUNTRY_NAME: "jurisdictionCountryName", + NameOID.JURISDICTION_LOCALITY_NAME: "jurisdictionLocalityName", + NameOID.JURISDICTION_STATE_OR_PROVINCE_NAME: ( + "jurisdictionStateOrProvinceName" + ), + NameOID.BUSINESS_CATEGORY: "businessCategory", + + SignatureAlgorithmOID.RSA_WITH_MD5: "md5WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA1: "sha1WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA224: "sha224WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA256: "sha256WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA384: "sha384WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA512: "sha512WithRSAEncryption", + SignatureAlgorithmOID.ECDSA_WITH_SHA1: "ecdsa-with-SHA1", + SignatureAlgorithmOID.ECDSA_WITH_SHA224: "ecdsa-with-SHA224", + SignatureAlgorithmOID.ECDSA_WITH_SHA256: "ecdsa-with-SHA256", + SignatureAlgorithmOID.ECDSA_WITH_SHA384: "ecdsa-with-SHA384", + SignatureAlgorithmOID.ECDSA_WITH_SHA512: "ecdsa-with-SHA512", + SignatureAlgorithmOID.DSA_WITH_SHA1: "dsa-with-sha1", + SignatureAlgorithmOID.DSA_WITH_SHA224: "dsa-with-sha224", + SignatureAlgorithmOID.DSA_WITH_SHA256: "dsa-with-sha256", + ExtendedKeyUsageOID.SERVER_AUTH: "serverAuth", + ExtendedKeyUsageOID.CLIENT_AUTH: "clientAuth", + ExtendedKeyUsageOID.CODE_SIGNING: "codeSigning", + ExtendedKeyUsageOID.EMAIL_PROTECTION: "emailProtection", + ExtendedKeyUsageOID.TIME_STAMPING: "timeStamping", + ExtendedKeyUsageOID.OCSP_SIGNING: "OCSPSigning", + ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES: "subjectDirectoryAttributes", + ExtensionOID.SUBJECT_KEY_IDENTIFIER: "subjectKeyIdentifier", + ExtensionOID.KEY_USAGE: "keyUsage", + ExtensionOID.SUBJECT_ALTERNATIVE_NAME: "subjectAltName", + ExtensionOID.ISSUER_ALTERNATIVE_NAME: "issuerAltName", + ExtensionOID.BASIC_CONSTRAINTS: "basicConstraints", + CRLEntryExtensionOID.CRL_REASON: "cRLReason", + CRLEntryExtensionOID.INVALIDITY_DATE: "invalidityDate", + CRLEntryExtensionOID.CERTIFICATE_ISSUER: "certificateIssuer", + ExtensionOID.NAME_CONSTRAINTS: "nameConstraints", + ExtensionOID.CRL_DISTRIBUTION_POINTS: "cRLDistributionPoints", + ExtensionOID.CERTIFICATE_POLICIES: "certificatePolicies", + ExtensionOID.POLICY_MAPPINGS: "policyMappings", + ExtensionOID.AUTHORITY_KEY_IDENTIFIER: "authorityKeyIdentifier", + ExtensionOID.POLICY_CONSTRAINTS: "policyConstraints", + ExtensionOID.EXTENDED_KEY_USAGE: "extendedKeyUsage", + ExtensionOID.FRESHEST_CRL: "freshestCRL", + ExtensionOID.INHIBIT_ANY_POLICY: "inhibitAnyPolicy", + ExtensionOID.AUTHORITY_INFORMATION_ACCESS: "authorityInfoAccess", + ExtensionOID.SUBJECT_INFORMATION_ACCESS: "subjectInfoAccess", + ExtensionOID.OCSP_NO_CHECK: "OCSPNoCheck", + ExtensionOID.CRL_NUMBER: "cRLNumber", + AuthorityInformationAccessOID.OCSP: "OCSP", + AuthorityInformationAccessOID.CA_ISSUERS: "caIssuers", + CertificatePoliciesOID.CPS_QUALIFIER: "id-qt-cps", + CertificatePoliciesOID.CPS_USER_NOTICE: "id-qt-unotice", +} diff --git a/lib/python3.4/site-packages/easy_install.py b/lib/python3.4/site-packages/easy_install.py new file mode 100644 index 0000000..d87e984 --- /dev/null +++ b/lib/python3.4/site-packages/easy_install.py @@ -0,0 +1,5 @@ +"""Run the EasyInstall command""" + +if __name__ == '__main__': + from setuptools.command.easy_install import main + main() diff --git a/lib/python3.4/site-packages/ed25519-1.4-py3.4.egg-info/PKG-INFO b/lib/python3.4/site-packages/ed25519-1.4-py3.4.egg-info/PKG-INFO new file mode 100644 index 0000000..654981e --- /dev/null +++ b/lib/python3.4/site-packages/ed25519-1.4-py3.4.egg-info/PKG-INFO @@ -0,0 +1,28 @@ +Metadata-Version: 1.1 +Name: ed25519 +Version: 1.4 +Summary: Ed25519 public-key signatures +Home-page: https://github.com/warner/python-ed25519 +Author: Brian Warner +Author-email: warner-python-ed25519@lothar.com +License: MIT +Description: Python bindings to the Ed25519 public-key signature system. + + This offers a comfortable python interface to a C implementation of the + Ed25519 public-key signature system (http://ed25519.cr.yp.to/), using the + portable 'ref' code from the 'SUPERCOP' benchmarking suite. + + This system provides high (128-bit) security, short (32-byte) keys, short + (64-byte) signatures, and fast (2-6ms) operation. Please see the README for + more details. + +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Topic :: Security :: Cryptography diff --git a/lib/python3.4/site-packages/ed25519-1.4-py3.4.egg-info/SOURCES.txt b/lib/python3.4/site-packages/ed25519-1.4-py3.4.egg-info/SOURCES.txt new file mode 100644 index 0000000..ebe0028 --- /dev/null +++ b/lib/python3.4/site-packages/ed25519-1.4-py3.4.egg-info/SOURCES.txt @@ -0,0 +1,39 @@ +LICENSE +MANIFEST.in +Makefile +NEWS +README.md +kat-ed25519.txt +kat.py +setup.cfg +test_ed25519_kat.py +versioneer.py +bin/edsig +ed25519.egg-info/PKG-INFO +ed25519.egg-info/SOURCES.txt +ed25519.egg-info/dependency_links.txt +ed25519.egg-info/top_level.txt +src/ed25519/__init__.py +src/ed25519/_version.py +src/ed25519/keys.py +src/ed25519/test_ed25519.py +src/ed25519-glue/ed25519module.c +src/ed25519-supercop-ref/Makefile +src/ed25519-supercop-ref/api.h +src/ed25519-supercop-ref/crypto_int32.h +src/ed25519-supercop-ref/crypto_sign.h +src/ed25519-supercop-ref/crypto_uint32.h +src/ed25519-supercop-ref/crypto_verify_32.h +src/ed25519-supercop-ref/ed25519.c +src/ed25519-supercop-ref/fe25519.c +src/ed25519-supercop-ref/fe25519.h +src/ed25519-supercop-ref/ge25519.c +src/ed25519-supercop-ref/ge25519.h +src/ed25519-supercop-ref/ge25519_base.data +src/ed25519-supercop-ref/sc25519.c +src/ed25519-supercop-ref/sc25519.h +src/ed25519-supercop-ref/sha512-blocks.c +src/ed25519-supercop-ref/sha512-hash.c +src/ed25519-supercop-ref/sha512.h +src/ed25519-supercop-ref/test.c +src/ed25519-supercop-ref/verify.c \ No newline at end of file diff --git a/lib/python3.4/site-packages/ed25519-1.4-py3.4.egg-info/dependency_links.txt b/lib/python3.4/site-packages/ed25519-1.4-py3.4.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/lib/python3.4/site-packages/ed25519-1.4-py3.4.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/lib/python3.4/site-packages/ed25519-1.4-py3.4.egg-info/installed-files.txt b/lib/python3.4/site-packages/ed25519-1.4-py3.4.egg-info/installed-files.txt new file mode 100644 index 0000000..552794d --- /dev/null +++ b/lib/python3.4/site-packages/ed25519-1.4-py3.4.egg-info/installed-files.txt @@ -0,0 +1,15 @@ +../ed25519/keys.py +../ed25519/test_ed25519.py +../ed25519/__init__.py +../ed25519/_version.py +../ed25519/__pycache__/keys.cpython-34.pyc +../ed25519/__pycache__/test_ed25519.cpython-34.pyc +../ed25519/__pycache__/__init__.cpython-34.pyc +../ed25519/__pycache__/_version.cpython-34.pyc +../ed25519/_ed25519.cpython-34m.so +./ +PKG-INFO +dependency_links.txt +SOURCES.txt +top_level.txt +../../../../bin/edsig diff --git a/lib/python3.4/site-packages/ed25519-1.4-py3.4.egg-info/top_level.txt b/lib/python3.4/site-packages/ed25519-1.4-py3.4.egg-info/top_level.txt new file mode 100644 index 0000000..1da7fc0 --- /dev/null +++ b/lib/python3.4/site-packages/ed25519-1.4-py3.4.egg-info/top_level.txt @@ -0,0 +1 @@ +ed25519 diff --git a/lib/python3.4/site-packages/ed25519/__init__.py b/lib/python3.4/site-packages/ed25519/__init__.py new file mode 100644 index 0000000..7ad9cd8 --- /dev/null +++ b/lib/python3.4/site-packages/ed25519/__init__.py @@ -0,0 +1,11 @@ +from .keys import (BadSignatureError, BadPrefixError, + create_keypair, SigningKey, VerifyingKey, + remove_prefix, to_ascii, from_ascii) + +(BadSignatureError, BadPrefixError, + create_keypair, SigningKey, VerifyingKey, + remove_prefix, to_ascii, from_ascii) # hush pyflakes + +from ._version import get_versions +__version__ = str(get_versions()['version']) +del get_versions diff --git a/lib/python3.4/site-packages/ed25519/_ed25519.cpython-34m.so b/lib/python3.4/site-packages/ed25519/_ed25519.cpython-34m.so new file mode 100755 index 0000000..5755baf Binary files /dev/null and b/lib/python3.4/site-packages/ed25519/_ed25519.cpython-34m.so differ diff --git a/lib/python3.4/site-packages/ed25519/_version.py b/lib/python3.4/site-packages/ed25519/_version.py new file mode 100644 index 0000000..7c19428 --- /dev/null +++ b/lib/python3.4/site-packages/ed25519/_version.py @@ -0,0 +1,21 @@ + +# This file was generated by 'versioneer.py' (0.15) from +# revision-control system data, or from the parent directory name of an +# unpacked source archive. Distribution tarballs contain a pre-generated copy +# of this file. + +import json +import sys + +version_json = ''' +{ + "dirty": false, + "error": null, + "full-revisionid": "a8732e8b6ba4e04e83c7ef05f86c565a2b2fc278", + "version": "1.4" +} +''' # END VERSION_JSON + + +def get_versions(): + return json.loads(version_json) diff --git a/lib/python3.4/site-packages/ed25519/keys.py b/lib/python3.4/site-packages/ed25519/keys.py new file mode 100644 index 0000000..2803174 --- /dev/null +++ b/lib/python3.4/site-packages/ed25519/keys.py @@ -0,0 +1,191 @@ +import os +import base64 +from . import _ed25519 +BadSignatureError = _ed25519.BadSignatureError + +def create_keypair(entropy=os.urandom): + SEEDLEN = int(_ed25519.SECRETKEYBYTES/2) + assert SEEDLEN == 32 + seed = entropy(SEEDLEN) + sk = SigningKey(seed) + vk = sk.get_verifying_key() + return sk, vk + +class BadPrefixError(Exception): + pass + +def remove_prefix(s_bytes, prefix): + assert(type(s_bytes) == type(prefix)) + if s_bytes[:len(prefix)] != prefix: + raise BadPrefixError("did not see expected '%s' prefix" % (prefix,)) + return s_bytes[len(prefix):] + +def to_ascii(s_bytes, prefix="", encoding="base64"): + """Return a version-prefixed ASCII representation of the given binary + string. 'encoding' indicates how to do the encoding, and can be one of: + * base64 + * base32 + * base16 (or hex) + + This function handles bytes, not bits, so it does not append any trailing + '=' (unlike standard base64.b64encode). It also lowercases the base32 + output. + + 'prefix' will be prepended to the encoded form, and is useful for + distinguishing the purpose and version of the binary string. E.g. you + could prepend 'pub0-' to a VerifyingKey string to allow the receiving + code to raise a useful error if someone pasted in a signature string by + mistake. + """ + assert isinstance(s_bytes, bytes) + if not isinstance(prefix, bytes): + prefix = prefix.encode('ascii') + if encoding == "base64": + s_ascii = base64.b64encode(s_bytes).decode('ascii').rstrip("=") + elif encoding == "base32": + s_ascii = base64.b32encode(s_bytes).decode('ascii').rstrip("=").lower() + elif encoding in ("base16", "hex"): + s_ascii = base64.b16encode(s_bytes).decode('ascii').lower() + else: + raise NotImplementedError + return prefix+s_ascii.encode('ascii') + +def from_ascii(s_ascii, prefix="", encoding="base64"): + """This is the opposite of to_ascii. It will throw BadPrefixError if + the prefix is not found. + """ + if isinstance(s_ascii, bytes): + s_ascii = s_ascii.decode('ascii') + if isinstance(prefix, bytes): + prefix = prefix.decode('ascii') + s_ascii = remove_prefix(s_ascii.strip(), prefix) + if encoding == "base64": + s_ascii += "=" * ((4 - len(s_ascii) % 4) % 4) + s_bytes = base64.b64decode(s_ascii.encode('ascii')) + elif encoding == "base32": + s_ascii += "=" * ((8 - len(s_ascii) % 8) % 8) + s_bytes = base64.b32decode(s_ascii.upper().encode('ascii')) + elif encoding in ("base16", "hex"): + s_bytes = base64.b16decode(s_ascii.upper().encode('ascii')) + else: + raise NotImplementedError + return s_bytes + +class SigningKey(object): + # this can only be used to reconstruct a key created by create_keypair(). + def __init__(self, sk_s, prefix="", encoding=None): + assert isinstance(sk_s, bytes) + if not isinstance(prefix, bytes): + prefix = prefix.encode('ascii') + sk_s = remove_prefix(sk_s, prefix) + if encoding is not None: + sk_s = from_ascii(sk_s, encoding=encoding) + if len(sk_s) == 32: + # create from seed + vk_s, sk_s = _ed25519.publickey(sk_s) + else: + if len(sk_s) != 32+32: + raise ValueError("SigningKey takes 32-byte seed or 64-byte string") + self.sk_s = sk_s # seed+pubkey + self.vk_s = sk_s[32:] # just pubkey + + def to_bytes(self, prefix=""): + if not isinstance(prefix, bytes): + prefix = prefix.encode('ascii') + return prefix+self.sk_s + + def to_ascii(self, prefix="", encoding=None): + assert encoding + if not isinstance(prefix, bytes): + prefix = prefix.encode('ascii') + return to_ascii(self.to_seed(), prefix, encoding) + + def to_seed(self, prefix=""): + if not isinstance(prefix, bytes): + prefix = prefix.encode('ascii') + return prefix+self.sk_s[:32] + + def __eq__(self, them): + if not isinstance(them, object): return False + return (them.__class__ == self.__class__ + and them.sk_s == self.sk_s) + + def get_verifying_key(self): + return VerifyingKey(self.vk_s) + + def sign(self, msg, prefix="", encoding=None): + assert isinstance(msg, bytes) + if not isinstance(prefix, bytes): + prefix = prefix.encode('ascii') + sig_and_msg = _ed25519.sign(msg, self.sk_s) + # the response is R+S+msg + sig_R = sig_and_msg[0:32] + sig_S = sig_and_msg[32:64] + msg_out = sig_and_msg[64:] + sig_out = sig_R + sig_S + assert msg_out == msg + if encoding: + return to_ascii(sig_out, prefix, encoding) + return prefix+sig_out + +class VerifyingKey(object): + def __init__(self, vk_s, prefix="", encoding=None): + if not isinstance(prefix, bytes): + prefix = prefix.encode('ascii') + if not isinstance(vk_s, bytes): + vk_s = vk_s.encode('ascii') + assert isinstance(vk_s, bytes) + vk_s = remove_prefix(vk_s, prefix) + if encoding is not None: + vk_s = from_ascii(vk_s, encoding=encoding) + + assert len(vk_s) == 32 + self.vk_s = vk_s + + def to_bytes(self, prefix=""): + if not isinstance(prefix, bytes): + prefix = prefix.encode('ascii') + return prefix+self.vk_s + + def to_ascii(self, prefix="", encoding=None): + assert encoding + if not isinstance(prefix, bytes): + prefix = prefix.encode('ascii') + return to_ascii(self.vk_s, prefix, encoding) + + def __eq__(self, them): + if not isinstance(them, object): return False + return (them.__class__ == self.__class__ + and them.vk_s == self.vk_s) + + def verify(self, sig, msg, prefix="", encoding=None): + if not isinstance(sig, bytes): + sig = sig.encode('ascii') + if not isinstance(prefix, bytes): + prefix = prefix.encode('ascii') + assert isinstance(sig, bytes) + assert isinstance(msg, bytes) + if encoding: + sig = from_ascii(sig, prefix, encoding) + else: + sig = remove_prefix(sig, prefix) + assert len(sig) == 64 + sig_R = sig[:32] + sig_S = sig[32:] + sig_and_msg = sig_R + sig_S + msg + # this might raise BadSignatureError + msg2 = _ed25519.open(sig_and_msg, self.vk_s) + assert msg2 == msg + +def selftest(): + message = b"crypto libraries should always test themselves at powerup" + sk = SigningKey(b"priv0-VIsfn5OFGa09Un2MR6Hm7BQ5++xhcQskU2OGXG8jSJl4cWLZrRrVcSN2gVYMGtZT+3354J5jfmqAcuRSD9KIyg", + prefix="priv0-", encoding="base64") + vk = VerifyingKey(b"pub0-eHFi2a0a1XEjdoFWDBrWU/t9+eCeY35qgHLkUg/SiMo", + prefix="pub0-", encoding="base64") + assert sk.get_verifying_key() == vk + sig = sk.sign(message, prefix="sig0-", encoding="base64") + assert sig == b"sig0-E/QrwtSF52x8+q0l4ahA7eJbRKc777ClKNg217Q0z4fiYMCdmAOI+rTLVkiFhX6k3D+wQQfKdJYMxaTUFfv1DQ", sig + vk.verify(sig, message, prefix="sig0-", encoding="base64") + +selftest() diff --git a/lib/python3.4/site-packages/ed25519/test_ed25519.py b/lib/python3.4/site-packages/ed25519/test_ed25519.py new file mode 100644 index 0000000..8dea618 --- /dev/null +++ b/lib/python3.4/site-packages/ed25519/test_ed25519.py @@ -0,0 +1,274 @@ +from __future__ import print_function +import sys +import unittest +import time +from binascii import hexlify, unhexlify +import ed25519 +from ed25519 import _ed25519 as raw + +if sys.version_info[0] == 3: + def int2byte(i): + return bytes((i,)) +else: + int2byte = chr + +def flip_bit(s, bit=0, in_byte=-1): + as_bytes = [ord(b) if isinstance(b, str) else b for b in s] + as_bytes[in_byte] = as_bytes[in_byte] ^ (0x01<`_. +This version of the protocol is often referred to as “IDNA2008”. + +This library also provides support for Unicode Technical Standard 46, +`Unicode IDNA Compatibility Processing `_. + +The library is also intended to act as a suitable replacement for +the “encodings.idna” module that comes with the Python standard library +but currently only supports the older, deprecated IDNA specification +(`RFC 3490 `_). + +Its basic functions are simply executed: + +.. code-block:: pycon + + >>> import idna + >>> idna.encode(u'ドメイン.テスト') + 'xn--eckwd4c7c.xn--zckzah' + >>> print idna.decode('xn--eckwd4c7c.xn--zckzah') + ドメイン.テスト + +Packages +-------- + +The latest tagged release version is published in the PyPI repository: + +.. image:: https://badge.fury.io/py/idna.svg + :target: http://badge.fury.io/py/idna + + +Installation +------------ + +To install this library, you can use PIP: + +.. code-block:: bash + + $ pip install idna + +Alternatively, you can install the package using the bundled setup script: + +.. code-block:: bash + + $ python setup.py install + +This library should work with Python 2.7, and Python 3.3 or later. + + +Usage +----- + +For typical usage, the ``encode`` and ``decode`` functions will take a domain +name argument and perform a conversion to an A-label or U-label respectively. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode(u'ドメイン.テスト') + 'xn--eckwd4c7c.xn--zckzah' + >>> print idna.decode('xn--eckwd4c7c.xn--zckzah') + ドメイン.テスト + +You may use the codec encoding and decoding methods using the +``idna.codec`` module. + +.. code-block:: pycon + + >>> import idna.codec + >>> print u'домена.испытание'.encode('idna') + xn--80ahd1agd.xn--80akhbyknj4f + >>> print 'xn--80ahd1agd.xn--80akhbyknj4f'.decode('idna') + домена.испытание + +Conversions can be applied at a per-label basis using the ``ulabel`` or ``alabel`` +functions if necessary: + +.. code-block:: pycon + + >>> idna.alabel(u'测试') + 'xn--0zwm56d' + +Compatibility Mapping (UTS #46) ++++++++++++++++++++++++++++++++ + +As described in `RFC 5895 `_, the IDNA +specification no longer including mappings from different forms of input that +a user may enter, to the form that is provided to the IDNA functions. This +functionality is now considered by the specification to be a local +user-interface issue distinct from IDNA conversion functionality. + +This library support one user-level mapping, that developed by the Unicode +Consortium, known as `Unicode IDNA Compatibility Processing `_. +It provides for both regular mapping and transitional mapping. + +For example, "Königsgäßchen" is not a permissible label as LATIN CAPITAL +LETTER K is not allowed (as are capital letters in general). UTS46 will convert +this into lower case. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode(u'Königsgäßchen') + ... + idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of u'K\xf6nigsg\xe4\xdfchen' not allowed + >>> idna.encode(u'Königsgäßchen', uts46=True) + 'xn--knigsgchen-b4a3dun' + +Transitional processing provides conversions to help transition from the older +2003 standard to the current standard. For example, in the original IDNA +specification, the LATIN SMALL LETTER SHARP S (ß) was converted into two +LATIN SMALL LETTER S (ss), whereas in the current IDNA specification this +conversion is not performed. + +.. code-block:: pycon + + >>> idna.encode(u'Königsgäßchen', uts46=True, transitional=True) + 'xn--knigsgsschen-lcb0w' + +Implementors should use transitional processing with caution, only in rare +cases where conversion from legacy labels to current labels must be performed +(i.e. IDNA implementations that pre-date 2008). For typical applications +that just need to convert labels, transitional processing is unlikely to be +beneficial and could produce unexpected incompatible results. + +``encodings.idna`` Compatibility +++++++++++++++++++++++++++++++++ + +Function calls from the Python built-in ``encodings.idna`` module are +mapping to their IDNA 2008 equivalents using the ``idna.compat`` module. +Simply substitute the ``import`` clause in your code to refer to the +new module name. + +Exceptions +---------- + +All errors raised during the conversion following the specification should +raise an exception derived from the ``idna.IDNAError`` base class. + +More specific exceptions that may be generated as ``idna.IDNABidiError`` +when the error reflects an illegal combination of left-to-right and right-to-left +characters in a label; ``idna.InvalidCodepoint`` when a specific codepoint is +an illegal character in an IDN label (i.e. INVALID); and ``idna.InvalidCodepointContext`` +when the codepoint is illegal based on its positional context (i.e. it is CONTEXTO +or CONTEXTJ but the contextual requirements are not satisfied.) + +Testing +------- + +The library has a test suite based on each rule of the IDNA specification, as +well as test that are provided as part of the Unicode Technical Standard 46, +`Unicode IDNA Compatibility Processing `_. + +The tests are run automatically on each commit to the master branch of the +idna git repository at Travis CI: + +.. image:: https://travis-ci.org/kjd/idna.svg?branch=master + :target: https://travis-ci.org/kjd/idna + + diff --git a/lib/python3.4/site-packages/idna-2.1.dist-info/METADATA b/lib/python3.4/site-packages/idna-2.1.dist-info/METADATA new file mode 100644 index 0000000..4bd477a --- /dev/null +++ b/lib/python3.4/site-packages/idna-2.1.dist-info/METADATA @@ -0,0 +1,187 @@ +Metadata-Version: 2.0 +Name: idna +Version: 2.1 +Summary: Internationalized Domain Names in Applications (IDNA) +Home-page: https://github.com/kjd/idna +Author: Kim Davies +Author-email: kim@cynosure.com.au +License: BSD-like +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Topic :: Internet :: Name Service (DNS) +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Utilities + +Internationalized Domain Names in Applications (IDNA) +===================================================== + +A library to support the Internationalised Domain Names in Applications +(IDNA) protocol as specified in `RFC 5891 `_. +This version of the protocol is often referred to as “IDNA2008”. + +This library also provides support for Unicode Technical Standard 46, +`Unicode IDNA Compatibility Processing `_. + +The library is also intended to act as a suitable replacement for +the “encodings.idna” module that comes with the Python standard library +but currently only supports the older, deprecated IDNA specification +(`RFC 3490 `_). + +Its basic functions are simply executed: + +.. code-block:: pycon + + >>> import idna + >>> idna.encode(u'ドメイン.テスト') + 'xn--eckwd4c7c.xn--zckzah' + >>> print idna.decode('xn--eckwd4c7c.xn--zckzah') + ドメイン.テスト + +Packages +-------- + +The latest tagged release version is published in the PyPI repository: + +.. image:: https://badge.fury.io/py/idna.svg + :target: http://badge.fury.io/py/idna + + +Installation +------------ + +To install this library, you can use PIP: + +.. code-block:: bash + + $ pip install idna + +Alternatively, you can install the package using the bundled setup script: + +.. code-block:: bash + + $ python setup.py install + +This library should work with Python 2.7, and Python 3.3 or later. + + +Usage +----- + +For typical usage, the ``encode`` and ``decode`` functions will take a domain +name argument and perform a conversion to an A-label or U-label respectively. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode(u'ドメイン.テスト') + 'xn--eckwd4c7c.xn--zckzah' + >>> print idna.decode('xn--eckwd4c7c.xn--zckzah') + ドメイン.テスト + +You may use the codec encoding and decoding methods using the +``idna.codec`` module. + +.. code-block:: pycon + + >>> import idna.codec + >>> print u'домена.испытание'.encode('idna') + xn--80ahd1agd.xn--80akhbyknj4f + >>> print 'xn--80ahd1agd.xn--80akhbyknj4f'.decode('idna') + домена.испытание + +Conversions can be applied at a per-label basis using the ``ulabel`` or ``alabel`` +functions if necessary: + +.. code-block:: pycon + + >>> idna.alabel(u'测试') + 'xn--0zwm56d' + +Compatibility Mapping (UTS #46) ++++++++++++++++++++++++++++++++ + +As described in `RFC 5895 `_, the IDNA +specification no longer including mappings from different forms of input that +a user may enter, to the form that is provided to the IDNA functions. This +functionality is now considered by the specification to be a local +user-interface issue distinct from IDNA conversion functionality. + +This library support one user-level mapping, that developed by the Unicode +Consortium, known as `Unicode IDNA Compatibility Processing `_. +It provides for both regular mapping and transitional mapping. + +For example, "Königsgäßchen" is not a permissible label as LATIN CAPITAL +LETTER K is not allowed (as are capital letters in general). UTS46 will convert +this into lower case. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode(u'Königsgäßchen') + ... + idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of u'K\xf6nigsg\xe4\xdfchen' not allowed + >>> idna.encode(u'Königsgäßchen', uts46=True) + 'xn--knigsgchen-b4a3dun' + +Transitional processing provides conversions to help transition from the older +2003 standard to the current standard. For example, in the original IDNA +specification, the LATIN SMALL LETTER SHARP S (ß) was converted into two +LATIN SMALL LETTER S (ss), whereas in the current IDNA specification this +conversion is not performed. + +.. code-block:: pycon + + >>> idna.encode(u'Königsgäßchen', uts46=True, transitional=True) + 'xn--knigsgsschen-lcb0w' + +Implementors should use transitional processing with caution, only in rare +cases where conversion from legacy labels to current labels must be performed +(i.e. IDNA implementations that pre-date 2008). For typical applications +that just need to convert labels, transitional processing is unlikely to be +beneficial and could produce unexpected incompatible results. + +``encodings.idna`` Compatibility +++++++++++++++++++++++++++++++++ + +Function calls from the Python built-in ``encodings.idna`` module are +mapping to their IDNA 2008 equivalents using the ``idna.compat`` module. +Simply substitute the ``import`` clause in your code to refer to the +new module name. + +Exceptions +---------- + +All errors raised during the conversion following the specification should +raise an exception derived from the ``idna.IDNAError`` base class. + +More specific exceptions that may be generated as ``idna.IDNABidiError`` +when the error reflects an illegal combination of left-to-right and right-to-left +characters in a label; ``idna.InvalidCodepoint`` when a specific codepoint is +an illegal character in an IDN label (i.e. INVALID); and ``idna.InvalidCodepointContext`` +when the codepoint is illegal based on its positional context (i.e. it is CONTEXTO +or CONTEXTJ but the contextual requirements are not satisfied.) + +Testing +------- + +The library has a test suite based on each rule of the IDNA specification, as +well as test that are provided as part of the Unicode Technical Standard 46, +`Unicode IDNA Compatibility Processing `_. + +The tests are run automatically on each commit to the master branch of the +idna git repository at Travis CI: + +.. image:: https://travis-ci.org/kjd/idna.svg?branch=master + :target: https://travis-ci.org/kjd/idna + + diff --git a/lib/python3.4/site-packages/idna-2.1.dist-info/RECORD b/lib/python3.4/site-packages/idna-2.1.dist-info/RECORD new file mode 100644 index 0000000..acf1a4d --- /dev/null +++ b/lib/python3.4/site-packages/idna-2.1.dist-info/RECORD @@ -0,0 +1,20 @@ +idna/__init__.py,sha256=K0kNy26Vm6A-1V5lST3ily6yVsNLUbiqk6AZDFm2nJI,20 +idna/codec.py,sha256=fHb4zE1NzJR6aZmcpcaRd3tq4e8mYaETS1UogAU2IfM,3303 +idna/compat.py,sha256=LXKc5WEVdXNWv5A0FpETd5T15qZyQTKGBeoHF-GbdkM,240 +idna/core.py,sha256=mX6-GoH68ghM1CCj27JtMwWBBDmYUpHewip5hrBgwEk,11352 +idna/idnadata.py,sha256=2xOTM6l-rDNctofaECawwxydpeSUjznGlyYuh9C2Nds,35172 +idna/intranges.py,sha256=mZBLZSPznyrg9DFJSPxrqdfAIrlF814rqj2JXxcQwSg,1521 +idna/uts46data.py,sha256=RAmrTTPJKgSNXPICpBZ0f6Bp8_9R2OHQysKDGPymYlw,181224 +idna-2.1.dist-info/DESCRIPTION.rst,sha256=Bx_H_kyc0EKmQXTf6CD7C3JFmmZk31TBs48J3tc7WXI,5711 +idna-2.1.dist-info/METADATA,sha256=6pveujjevELOd3mC-xtnwFSOT7lZmMkbny4UaIvqRfE,6649 +idna-2.1.dist-info/RECORD,, +idna-2.1.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 +idna-2.1.dist-info/metadata.json,sha256=33plgp2Ik3oW8YiTas-of0Io-dg-VY2q0C23sM0PcbY,1015 +idna-2.1.dist-info/top_level.txt,sha256=jSag9sEDqvSPftxOQy-ABfGV_RSy7oFh4zZJpODV8k0,5 +idna/__pycache__/compat.cpython-34.pyc,, +idna/__pycache__/__init__.cpython-34.pyc,, +idna/__pycache__/codec.cpython-34.pyc,, +idna/__pycache__/idnadata.cpython-34.pyc,, +idna/__pycache__/uts46data.cpython-34.pyc,, +idna/__pycache__/intranges.cpython-34.pyc,, +idna/__pycache__/core.cpython-34.pyc,, diff --git a/lib/python3.4/site-packages/idna-2.1.dist-info/WHEEL b/lib/python3.4/site-packages/idna-2.1.dist-info/WHEEL new file mode 100644 index 0000000..8b6dd1b --- /dev/null +++ b/lib/python3.4/site-packages/idna-2.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/lib/python3.4/site-packages/idna-2.1.dist-info/metadata.json b/lib/python3.4/site-packages/idna-2.1.dist-info/metadata.json new file mode 100644 index 0000000..669392c --- /dev/null +++ b/lib/python3.4/site-packages/idna-2.1.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Topic :: Internet :: Name Service (DNS)", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Utilities"], "extensions": {"python.details": {"contacts": [{"email": "kim@cynosure.com.au", "name": "Kim Davies", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/kjd/idna"}}}, "generator": "bdist_wheel (0.29.0)", "license": "BSD-like", "metadata_version": "2.0", "name": "idna", "summary": "Internationalized Domain Names in Applications (IDNA)", "version": "2.1"} \ No newline at end of file diff --git a/lib/python3.4/site-packages/idna-2.1.dist-info/top_level.txt b/lib/python3.4/site-packages/idna-2.1.dist-info/top_level.txt new file mode 100644 index 0000000..c40472e --- /dev/null +++ b/lib/python3.4/site-packages/idna-2.1.dist-info/top_level.txt @@ -0,0 +1 @@ +idna diff --git a/lib/python3.4/site-packages/idna/__init__.py b/lib/python3.4/site-packages/idna/__init__.py new file mode 100644 index 0000000..bb67a43 --- /dev/null +++ b/lib/python3.4/site-packages/idna/__init__.py @@ -0,0 +1 @@ +from .core import * diff --git a/lib/python3.4/site-packages/idna/codec.py b/lib/python3.4/site-packages/idna/codec.py new file mode 100644 index 0000000..cdd9675 --- /dev/null +++ b/lib/python3.4/site-packages/idna/codec.py @@ -0,0 +1,118 @@ +from idna.core import encode, decode, alabel, ulabel, IDNAError +import codecs +import re + +_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]') + +class Codec(codecs.Codec): + + def encode(self, data, errors='strict'): + + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return "", 0 + + return encode(data), len(data) + + def decode(self, data, errors='strict'): + + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return u"", 0 + + return decode(data), len(data) + +class IncrementalEncoder(codecs.BufferedIncrementalEncoder): + def _buffer_encode(self, data, errors, final): + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return ("", 0) + + labels = _unicode_dots_re.split(data) + trailing_dot = u'' + if labels: + if not labels[-1]: + trailing_dot = '.' + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = '.' + + result = [] + size = 0 + for label in labels: + result.append(alabel(label)) + if size: + size += 1 + size += len(label) + + # Join with U+002E + result = ".".join(result) + trailing_dot + size += len(trailing_dot) + return (result, size) + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def _buffer_decode(self, data, errors, final): + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return (u"", 0) + + # IDNA allows decoding to operate on Unicode strings, too. + if isinstance(data, unicode): + labels = _unicode_dots_re.split(data) + else: + # Must be ASCII string + data = str(data) + unicode(data, "ascii") + labels = data.split(".") + + trailing_dot = u'' + if labels: + if not labels[-1]: + trailing_dot = u'.' + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = u'.' + + result = [] + size = 0 + for label in labels: + result.append(ulabel(label)) + if size: + size += 1 + size += len(label) + + result = u".".join(result) + trailing_dot + size += len(trailing_dot) + return (result, size) + + +class StreamWriter(Codec, codecs.StreamWriter): + pass + +class StreamReader(Codec, codecs.StreamReader): + pass + +def getregentry(): + return codecs.CodecInfo( + name='idna', + encode=Codec().encode, + decode=Codec().decode, + incrementalencoder=IncrementalEncoder, + incrementaldecoder=IncrementalDecoder, + streamwriter=StreamWriter, + streamreader=StreamReader, + ) diff --git a/lib/python3.4/site-packages/idna/compat.py b/lib/python3.4/site-packages/idna/compat.py new file mode 100644 index 0000000..ef9bcbd --- /dev/null +++ b/lib/python3.4/site-packages/idna/compat.py @@ -0,0 +1,12 @@ +from idna.core import * +from idna.codec import * + +def ToASCII(label): + return encode(label) + +def ToUnicode(label): + return decode(label) + +def nameprep(s): + raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol") + diff --git a/lib/python3.4/site-packages/idna/core.py b/lib/python3.4/site-packages/idna/core.py new file mode 100644 index 0000000..ff3b38d --- /dev/null +++ b/lib/python3.4/site-packages/idna/core.py @@ -0,0 +1,387 @@ +from . import idnadata +import bisect +import unicodedata +import re +import sys +from .intranges import intranges_contain + +_virama_combining_class = 9 +_alabel_prefix = b'xn--' +_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]') + +if sys.version_info[0] == 3: + unicode = str + unichr = chr + +class IDNAError(UnicodeError): + """ Base exception for all IDNA-encoding related problems """ + pass + + +class IDNABidiError(IDNAError): + """ Exception when bidirectional requirements are not satisfied """ + pass + + +class InvalidCodepoint(IDNAError): + """ Exception when a disallowed or unallocated codepoint is used """ + pass + + +class InvalidCodepointContext(IDNAError): + """ Exception when the codepoint is not valid in the context it is used """ + pass + + +def _combining_class(cp): + return unicodedata.combining(unichr(cp)) + +def _is_script(cp, script): + return intranges_contain(ord(cp), idnadata.scripts[script]) + +def _punycode(s): + return s.encode('punycode') + +def _unot(s): + return 'U+{0:04X}'.format(s) + + +def valid_label_length(label): + + if len(label) > 63: + return False + return True + + +def valid_string_length(label, trailing_dot): + + if len(label) > (254 if trailing_dot else 253): + return False + return True + + +def check_bidi(label, check_ltr=False): + + # Bidi rules should only be applied if string contains RTL characters + bidi_label = False + for (idx, cp) in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + if direction == '': + # String likely comes from a newer version of Unicode + raise IDNABidiError('Unknown directionality in label {0} at position {1}'.format(repr(label), idx)) + if direction in ['R', 'AL', 'AN']: + bidi_label = True + break + if not bidi_label and not check_ltr: + return True + + # Bidi rule 1 + direction = unicodedata.bidirectional(label[0]) + if direction in ['R', 'AL']: + rtl = True + elif direction == 'L': + rtl = False + else: + raise IDNABidiError('First codepoint in label {0} must be directionality L, R or AL'.format(repr(label))) + + valid_ending = False + number_type = False + for (idx, cp) in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + + if rtl: + # Bidi rule 2 + if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: + raise IDNABidiError('Invalid direction for codepoint at position {0} in a right-to-left label'.format(idx)) + # Bidi rule 3 + if direction in ['R', 'AL', 'EN', 'AN']: + valid_ending = True + elif direction != 'NSM': + valid_ending = False + # Bidi rule 4 + if direction in ['AN', 'EN']: + if not number_type: + number_type = direction + else: + if number_type != direction: + raise IDNABidiError('Can not mix numeral types in a right-to-left label') + else: + # Bidi rule 5 + if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: + raise IDNABidiError('Invalid direction for codepoint at position {0} in a left-to-right label'.format(idx)) + # Bidi rule 6 + if direction in ['L', 'EN']: + valid_ending = True + elif direction != 'NSM': + valid_ending = False + + if not valid_ending: + raise IDNABidiError('Label ends with illegal codepoint directionality') + + return True + + +def check_initial_combiner(label): + + if unicodedata.category(label[0])[0] == 'M': + raise IDNAError('Label begins with an illegal combining character') + return True + + +def check_hyphen_ok(label): + + if label[2:4] == '--': + raise IDNAError('Label has disallowed hyphens in 3rd and 4th position') + if label[0] == '-' or label[-1] == '-': + raise IDNAError('Label must not start or end with a hyphen') + return True + + +def check_nfc(label): + + if unicodedata.normalize('NFC', label) != label: + raise IDNAError('Label must be in Normalization Form C') + + +def valid_contextj(label, pos): + + cp_value = ord(label[pos]) + + if cp_value == 0x200c: + + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + + ok = False + for i in range(pos-1, -1, -1): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == 'T': + continue + if joining_type in ['L', 'D']: + ok = True + break + + if not ok: + return False + + ok = False + for i in range(pos+1, len(label)): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == 'T': + continue + if joining_type in ['R', 'D']: + ok = True + break + return ok + + if cp_value == 0x200d: + + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + return False + + else: + + return False + + +def valid_contexto(label, pos, exception=False): + + cp_value = ord(label[pos]) + + if cp_value == 0x00b7: + if 0 < pos < len(label)-1: + if ord(label[pos - 1]) == 0x006c and ord(label[pos + 1]) == 0x006c: + return True + return False + + elif cp_value == 0x0375: + if pos < len(label)-1 and len(label) > 1: + return _is_script(label[pos + 1], 'Greek') + return False + + elif cp_value == 0x05f3 or cp_value == 0x05f4: + if pos > 0: + return _is_script(label[pos - 1], 'Hebrew') + return False + + elif cp_value == 0x30fb: + for cp in label: + if cp == u'\u30fb': + continue + if not _is_script(cp, 'Hiragana') and not _is_script(cp, 'Katakana') and not _is_script(cp, 'Han'): + return False + return True + + elif 0x660 <= cp_value <= 0x669: + for cp in label: + if 0x6f0 <= ord(cp) <= 0x06f9: + return False + return True + + elif 0x6f0 <= cp_value <= 0x6f9: + for cp in label: + if 0x660 <= ord(cp) <= 0x0669: + return False + return True + + +def check_label(label): + + if isinstance(label, (bytes, bytearray)): + label = label.decode('utf-8') + if len(label) == 0: + raise IDNAError('Empty Label') + + check_nfc(label) + check_hyphen_ok(label) + check_initial_combiner(label) + + for (pos, cp) in enumerate(label): + cp_value = ord(cp) + if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']): + continue + elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']): + if not valid_contextj(label, pos): + raise InvalidCodepointContext('Joiner {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label))) + elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']): + if not valid_contexto(label, pos): + raise InvalidCodepointContext('Codepoint {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label))) + else: + raise InvalidCodepoint('Codepoint {0} at position {1} of {2} not allowed'.format(_unot(cp_value), pos+1, repr(label))) + + check_bidi(label) + + +def alabel(label): + + try: + label = label.encode('ascii') + try: + ulabel(label) + except: + raise IDNAError('The label {0} is not a valid A-label'.format(label)) + if not valid_label_length(label): + raise IDNAError('Label too long') + return label + except UnicodeError: + pass + + if not label: + raise IDNAError('No Input') + + label = unicode(label) + check_label(label) + label = _punycode(label) + label = _alabel_prefix + label + + if not valid_label_length(label): + raise IDNAError('Label too long') + + return label + + +def ulabel(label): + + if not isinstance(label, (bytes, bytearray)): + try: + label = label.encode('ascii') + except UnicodeError: + check_label(label) + return label + + label = label.lower() + if label.startswith(_alabel_prefix): + label = label[len(_alabel_prefix):] + else: + check_label(label) + return label.decode('ascii') + + label = label.decode('punycode') + check_label(label) + return label + + +def uts46_remap(domain, std3_rules=True, transitional=False): + """Re-map the characters in the string according to UTS46 processing.""" + from .uts46data import uts46data + output = u"" + try: + for pos, char in enumerate(domain): + code_point = ord(char) + uts46row = uts46data[code_point if code_point < 256 else + bisect.bisect_left(uts46data, (code_point, "Z")) - 1] + status = uts46row[1] + replacement = uts46row[2] if len(uts46row) == 3 else None + if (status == "V" or + (status == "D" and not transitional) or + (status == "3" and std3_rules and replacement is None)): + output += char + elif replacement is not None and (status == "M" or + (status == "3" and std3_rules) or + (status == "D" and transitional)): + output += replacement + elif status != "I": + raise IndexError() + return unicodedata.normalize("NFC", output) + except IndexError: + raise InvalidCodepoint( + "Codepoint {0} not allowed at position {1} in {2}".format( + _unot(code_point), pos + 1, repr(domain))) + + +def encode(s, strict=False, uts46=False, std3_rules=False, transitional=False): + + if isinstance(s, (bytes, bytearray)): + s = s.decode("ascii") + if uts46: + s = uts46_remap(s, std3_rules, transitional) + trailing_dot = False + result = [] + if strict: + labels = s.split('.') + else: + labels = _unicode_dots_re.split(s) + while labels and not labels[0]: + del labels[0] + if not labels: + raise IDNAError('Empty domain') + if labels[-1] == '': + del labels[-1] + trailing_dot = True + for label in labels: + result.append(alabel(label)) + if trailing_dot: + result.append(b'') + s = b'.'.join(result) + if not valid_string_length(s, trailing_dot): + raise IDNAError('Domain too long') + return s + + +def decode(s, strict=False, uts46=False, std3_rules=False): + + if isinstance(s, (bytes, bytearray)): + s = s.decode("ascii") + if uts46: + s = uts46_remap(s, std3_rules, False) + trailing_dot = False + result = [] + if not strict: + labels = _unicode_dots_re.split(s) + else: + labels = s.split(u'.') + while labels and not labels[0]: + del labels[0] + if not labels: + raise IDNAError('Empty domain') + if not labels[-1]: + del labels[-1] + trailing_dot = True + for label in labels: + result.append(ulabel(label)) + if trailing_dot: + result.append(u'') + return u'.'.join(result) diff --git a/lib/python3.4/site-packages/idna/idnadata.py b/lib/python3.4/site-packages/idna/idnadata.py new file mode 100644 index 0000000..2bffe52 --- /dev/null +++ b/lib/python3.4/site-packages/idna/idnadata.py @@ -0,0 +1,1584 @@ +# This file is automatically generated by build-idnadata.py + +scripts = { + 'Greek': ( + (0x370, 0x374), + (0x375, 0x378), + (0x37a, 0x37e), + (0x384, 0x385), + (0x386, 0x387), + (0x388, 0x38b), + (0x38c, 0x38d), + (0x38e, 0x3a2), + (0x3a3, 0x3e2), + (0x3f0, 0x400), + (0x1d26, 0x1d2b), + (0x1d5d, 0x1d62), + (0x1d66, 0x1d6b), + (0x1dbf, 0x1dc0), + (0x1f00, 0x1f16), + (0x1f18, 0x1f1e), + (0x1f20, 0x1f46), + (0x1f48, 0x1f4e), + (0x1f50, 0x1f58), + (0x1f59, 0x1f5a), + (0x1f5b, 0x1f5c), + (0x1f5d, 0x1f5e), + (0x1f5f, 0x1f7e), + (0x1f80, 0x1fb5), + (0x1fb6, 0x1fc5), + (0x1fc6, 0x1fd4), + (0x1fd6, 0x1fdc), + (0x1fdd, 0x1ff0), + (0x1ff2, 0x1ff5), + (0x1ff6, 0x1fff), + (0x2126, 0x2127), + (0x10140, 0x1018b), + (0x1d200, 0x1d246), + ), + 'Han': ( + (0x2e80, 0x2e9a), + (0x2e9b, 0x2ef4), + (0x2f00, 0x2fd6), + (0x3005, 0x3006), + (0x3007, 0x3008), + (0x3021, 0x302a), + (0x3038, 0x303c), + (0x3400, 0x4db6), + (0x4e00, 0x9fcd), + (0xf900, 0xfa6e), + (0xfa70, 0xfada), + (0x20000, 0x2a6d7), + (0x2a700, 0x2b735), + (0x2b740, 0x2b81e), + (0x2f800, 0x2fa1e), + ), + 'Hebrew': ( + (0x591, 0x5c8), + (0x5d0, 0x5eb), + (0x5f0, 0x5f5), + (0xfb1d, 0xfb37), + (0xfb38, 0xfb3d), + (0xfb3e, 0xfb3f), + (0xfb40, 0xfb42), + (0xfb43, 0xfb45), + (0xfb46, 0xfb50), + ), + 'Hiragana': ( + (0x3041, 0x3097), + (0x309d, 0x30a0), + (0x1b001, 0x1b002), + (0x1f200, 0x1f201), + ), + 'Katakana': ( + (0x30a1, 0x30fb), + (0x30fd, 0x3100), + (0x31f0, 0x3200), + (0x32d0, 0x32ff), + (0x3300, 0x3358), + (0xff66, 0xff70), + (0xff71, 0xff9e), + (0x1b000, 0x1b001), + ), +} +joining_types = { + 0x600: 'U', + 0x601: 'U', + 0x602: 'U', + 0x603: 'U', + 0x604: 'U', + 0x608: 'U', + 0x60b: 'U', + 0x620: 'D', + 0x621: 'U', + 0x622: 'R', + 0x623: 'R', + 0x624: 'R', + 0x625: 'R', + 0x626: 'D', + 0x627: 'R', + 0x628: 'D', + 0x629: 'R', + 0x62a: 'D', + 0x62b: 'D', + 0x62c: 'D', + 0x62d: 'D', + 0x62e: 'D', + 0x62f: 'R', + 0x630: 'R', + 0x631: 'R', + 0x632: 'R', + 0x633: 'D', + 0x634: 'D', + 0x635: 'D', + 0x636: 'D', + 0x637: 'D', + 0x638: 'D', + 0x639: 'D', + 0x63a: 'D', + 0x63b: 'D', + 0x63c: 'D', + 0x63d: 'D', + 0x63e: 'D', + 0x63f: 'D', + 0x640: 'C', + 0x641: 'D', + 0x642: 'D', + 0x643: 'D', + 0x644: 'D', + 0x645: 'D', + 0x646: 'D', + 0x647: 'D', + 0x648: 'R', + 0x649: 'D', + 0x64a: 'D', + 0x66e: 'D', + 0x66f: 'D', + 0x671: 'R', + 0x672: 'R', + 0x673: 'R', + 0x674: 'U', + 0x675: 'R', + 0x676: 'R', + 0x677: 'R', + 0x678: 'D', + 0x679: 'D', + 0x67a: 'D', + 0x67b: 'D', + 0x67c: 'D', + 0x67d: 'D', + 0x67e: 'D', + 0x67f: 'D', + 0x680: 'D', + 0x681: 'D', + 0x682: 'D', + 0x683: 'D', + 0x684: 'D', + 0x685: 'D', + 0x686: 'D', + 0x687: 'D', + 0x688: 'R', + 0x689: 'R', + 0x68a: 'R', + 0x68b: 'R', + 0x68c: 'R', + 0x68d: 'R', + 0x68e: 'R', + 0x68f: 'R', + 0x690: 'R', + 0x691: 'R', + 0x692: 'R', + 0x693: 'R', + 0x694: 'R', + 0x695: 'R', + 0x696: 'R', + 0x697: 'R', + 0x698: 'R', + 0x699: 'R', + 0x69a: 'D', + 0x69b: 'D', + 0x69c: 'D', + 0x69d: 'D', + 0x69e: 'D', + 0x69f: 'D', + 0x6a0: 'D', + 0x6a1: 'D', + 0x6a2: 'D', + 0x6a3: 'D', + 0x6a4: 'D', + 0x6a5: 'D', + 0x6a6: 'D', + 0x6a7: 'D', + 0x6a8: 'D', + 0x6a9: 'D', + 0x6aa: 'D', + 0x6ab: 'D', + 0x6ac: 'D', + 0x6ad: 'D', + 0x6ae: 'D', + 0x6af: 'D', + 0x6b0: 'D', + 0x6b1: 'D', + 0x6b2: 'D', + 0x6b3: 'D', + 0x6b4: 'D', + 0x6b5: 'D', + 0x6b6: 'D', + 0x6b7: 'D', + 0x6b8: 'D', + 0x6b9: 'D', + 0x6ba: 'D', + 0x6bb: 'D', + 0x6bc: 'D', + 0x6bd: 'D', + 0x6be: 'D', + 0x6bf: 'D', + 0x6c0: 'R', + 0x6c1: 'D', + 0x6c2: 'D', + 0x6c3: 'R', + 0x6c4: 'R', + 0x6c5: 'R', + 0x6c6: 'R', + 0x6c7: 'R', + 0x6c8: 'R', + 0x6c9: 'R', + 0x6ca: 'R', + 0x6cb: 'R', + 0x6cc: 'D', + 0x6cd: 'R', + 0x6ce: 'D', + 0x6cf: 'R', + 0x6d0: 'D', + 0x6d1: 'D', + 0x6d2: 'R', + 0x6d3: 'R', + 0x6d5: 'R', + 0x6dd: 'U', + 0x6ee: 'R', + 0x6ef: 'R', + 0x6fa: 'D', + 0x6fb: 'D', + 0x6fc: 'D', + 0x6ff: 'D', + 0x710: 'R', + 0x712: 'D', + 0x713: 'D', + 0x714: 'D', + 0x715: 'R', + 0x716: 'R', + 0x717: 'R', + 0x718: 'R', + 0x719: 'R', + 0x71a: 'D', + 0x71b: 'D', + 0x71c: 'D', + 0x71d: 'D', + 0x71e: 'R', + 0x71f: 'D', + 0x720: 'D', + 0x721: 'D', + 0x722: 'D', + 0x723: 'D', + 0x724: 'D', + 0x725: 'D', + 0x726: 'D', + 0x727: 'D', + 0x728: 'R', + 0x729: 'D', + 0x72a: 'R', + 0x72b: 'D', + 0x72c: 'R', + 0x72d: 'D', + 0x72e: 'D', + 0x72f: 'R', + 0x74d: 'R', + 0x74e: 'D', + 0x74f: 'D', + 0x750: 'D', + 0x751: 'D', + 0x752: 'D', + 0x753: 'D', + 0x754: 'D', + 0x755: 'D', + 0x756: 'D', + 0x757: 'D', + 0x758: 'D', + 0x759: 'R', + 0x75a: 'R', + 0x75b: 'R', + 0x75c: 'D', + 0x75d: 'D', + 0x75e: 'D', + 0x75f: 'D', + 0x760: 'D', + 0x761: 'D', + 0x762: 'D', + 0x763: 'D', + 0x764: 'D', + 0x765: 'D', + 0x766: 'D', + 0x767: 'D', + 0x768: 'D', + 0x769: 'D', + 0x76a: 'D', + 0x76b: 'R', + 0x76c: 'R', + 0x76d: 'D', + 0x76e: 'D', + 0x76f: 'D', + 0x770: 'D', + 0x771: 'R', + 0x772: 'D', + 0x773: 'R', + 0x774: 'R', + 0x775: 'D', + 0x776: 'D', + 0x777: 'D', + 0x778: 'R', + 0x779: 'R', + 0x77a: 'D', + 0x77b: 'D', + 0x77c: 'D', + 0x77d: 'D', + 0x77e: 'D', + 0x77f: 'D', + 0x7ca: 'D', + 0x7cb: 'D', + 0x7cc: 'D', + 0x7cd: 'D', + 0x7ce: 'D', + 0x7cf: 'D', + 0x7d0: 'D', + 0x7d1: 'D', + 0x7d2: 'D', + 0x7d3: 'D', + 0x7d4: 'D', + 0x7d5: 'D', + 0x7d6: 'D', + 0x7d7: 'D', + 0x7d8: 'D', + 0x7d9: 'D', + 0x7da: 'D', + 0x7db: 'D', + 0x7dc: 'D', + 0x7dd: 'D', + 0x7de: 'D', + 0x7df: 'D', + 0x7e0: 'D', + 0x7e1: 'D', + 0x7e2: 'D', + 0x7e3: 'D', + 0x7e4: 'D', + 0x7e5: 'D', + 0x7e6: 'D', + 0x7e7: 'D', + 0x7e8: 'D', + 0x7e9: 'D', + 0x7ea: 'D', + 0x7fa: 'C', + 0x840: 'R', + 0x841: 'D', + 0x842: 'D', + 0x843: 'D', + 0x844: 'D', + 0x845: 'D', + 0x846: 'R', + 0x847: 'D', + 0x848: 'D', + 0x849: 'R', + 0x84a: 'D', + 0x84b: 'D', + 0x84c: 'D', + 0x84d: 'D', + 0x84e: 'D', + 0x84f: 'R', + 0x850: 'D', + 0x851: 'D', + 0x852: 'D', + 0x853: 'D', + 0x854: 'R', + 0x855: 'D', + 0x856: 'U', + 0x857: 'U', + 0x858: 'U', + 0x8a0: 'D', + 0x8a2: 'D', + 0x8a3: 'D', + 0x8a4: 'D', + 0x8a5: 'D', + 0x8a6: 'D', + 0x8a7: 'D', + 0x8a8: 'D', + 0x8a9: 'D', + 0x8aa: 'R', + 0x8ab: 'R', + 0x8ac: 'R', + 0x1806: 'U', + 0x1807: 'D', + 0x180a: 'C', + 0x180e: 'U', + 0x1820: 'D', + 0x1821: 'D', + 0x1822: 'D', + 0x1823: 'D', + 0x1824: 'D', + 0x1825: 'D', + 0x1826: 'D', + 0x1827: 'D', + 0x1828: 'D', + 0x1829: 'D', + 0x182a: 'D', + 0x182b: 'D', + 0x182c: 'D', + 0x182d: 'D', + 0x182e: 'D', + 0x182f: 'D', + 0x1830: 'D', + 0x1831: 'D', + 0x1832: 'D', + 0x1833: 'D', + 0x1834: 'D', + 0x1835: 'D', + 0x1836: 'D', + 0x1837: 'D', + 0x1838: 'D', + 0x1839: 'D', + 0x183a: 'D', + 0x183b: 'D', + 0x183c: 'D', + 0x183d: 'D', + 0x183e: 'D', + 0x183f: 'D', + 0x1840: 'D', + 0x1841: 'D', + 0x1842: 'D', + 0x1843: 'D', + 0x1844: 'D', + 0x1845: 'D', + 0x1846: 'D', + 0x1847: 'D', + 0x1848: 'D', + 0x1849: 'D', + 0x184a: 'D', + 0x184b: 'D', + 0x184c: 'D', + 0x184d: 'D', + 0x184e: 'D', + 0x184f: 'D', + 0x1850: 'D', + 0x1851: 'D', + 0x1852: 'D', + 0x1853: 'D', + 0x1854: 'D', + 0x1855: 'D', + 0x1856: 'D', + 0x1857: 'D', + 0x1858: 'D', + 0x1859: 'D', + 0x185a: 'D', + 0x185b: 'D', + 0x185c: 'D', + 0x185d: 'D', + 0x185e: 'D', + 0x185f: 'D', + 0x1860: 'D', + 0x1861: 'D', + 0x1862: 'D', + 0x1863: 'D', + 0x1864: 'D', + 0x1865: 'D', + 0x1866: 'D', + 0x1867: 'D', + 0x1868: 'D', + 0x1869: 'D', + 0x186a: 'D', + 0x186b: 'D', + 0x186c: 'D', + 0x186d: 'D', + 0x186e: 'D', + 0x186f: 'D', + 0x1870: 'D', + 0x1871: 'D', + 0x1872: 'D', + 0x1873: 'D', + 0x1874: 'D', + 0x1875: 'D', + 0x1876: 'D', + 0x1877: 'D', + 0x1880: 'U', + 0x1881: 'U', + 0x1882: 'U', + 0x1883: 'U', + 0x1884: 'U', + 0x1885: 'U', + 0x1886: 'U', + 0x1887: 'D', + 0x1888: 'D', + 0x1889: 'D', + 0x188a: 'D', + 0x188b: 'D', + 0x188c: 'D', + 0x188d: 'D', + 0x188e: 'D', + 0x188f: 'D', + 0x1890: 'D', + 0x1891: 'D', + 0x1892: 'D', + 0x1893: 'D', + 0x1894: 'D', + 0x1895: 'D', + 0x1896: 'D', + 0x1897: 'D', + 0x1898: 'D', + 0x1899: 'D', + 0x189a: 'D', + 0x189b: 'D', + 0x189c: 'D', + 0x189d: 'D', + 0x189e: 'D', + 0x189f: 'D', + 0x18a0: 'D', + 0x18a1: 'D', + 0x18a2: 'D', + 0x18a3: 'D', + 0x18a4: 'D', + 0x18a5: 'D', + 0x18a6: 'D', + 0x18a7: 'D', + 0x18a8: 'D', + 0x18aa: 'D', + 0x200c: 'U', + 0x200d: 'C', + 0x2066: 'U', + 0x2067: 'U', + 0x2068: 'U', + 0x2069: 'U', + 0xa840: 'D', + 0xa841: 'D', + 0xa842: 'D', + 0xa843: 'D', + 0xa844: 'D', + 0xa845: 'D', + 0xa846: 'D', + 0xa847: 'D', + 0xa848: 'D', + 0xa849: 'D', + 0xa84a: 'D', + 0xa84b: 'D', + 0xa84c: 'D', + 0xa84d: 'D', + 0xa84e: 'D', + 0xa84f: 'D', + 0xa850: 'D', + 0xa851: 'D', + 0xa852: 'D', + 0xa853: 'D', + 0xa854: 'D', + 0xa855: 'D', + 0xa856: 'D', + 0xa857: 'D', + 0xa858: 'D', + 0xa859: 'D', + 0xa85a: 'D', + 0xa85b: 'D', + 0xa85c: 'D', + 0xa85d: 'D', + 0xa85e: 'D', + 0xa85f: 'D', + 0xa860: 'D', + 0xa861: 'D', + 0xa862: 'D', + 0xa863: 'D', + 0xa864: 'D', + 0xa865: 'D', + 0xa866: 'D', + 0xa867: 'D', + 0xa868: 'D', + 0xa869: 'D', + 0xa86a: 'D', + 0xa86b: 'D', + 0xa86c: 'D', + 0xa86d: 'D', + 0xa86e: 'D', + 0xa86f: 'D', + 0xa870: 'D', + 0xa871: 'D', + 0xa872: 'L', + 0xa873: 'U', +} +codepoint_classes = { + 'PVALID': ( + (0x2d, 0x2e), + (0x30, 0x3a), + (0x61, 0x7b), + (0xdf, 0xf7), + (0xf8, 0x100), + (0x101, 0x102), + (0x103, 0x104), + (0x105, 0x106), + (0x107, 0x108), + (0x109, 0x10a), + (0x10b, 0x10c), + (0x10d, 0x10e), + (0x10f, 0x110), + (0x111, 0x112), + (0x113, 0x114), + (0x115, 0x116), + (0x117, 0x118), + (0x119, 0x11a), + (0x11b, 0x11c), + (0x11d, 0x11e), + (0x11f, 0x120), + (0x121, 0x122), + (0x123, 0x124), + (0x125, 0x126), + (0x127, 0x128), + (0x129, 0x12a), + (0x12b, 0x12c), + (0x12d, 0x12e), + (0x12f, 0x130), + (0x131, 0x132), + (0x135, 0x136), + (0x137, 0x139), + (0x13a, 0x13b), + (0x13c, 0x13d), + (0x13e, 0x13f), + (0x142, 0x143), + (0x144, 0x145), + (0x146, 0x147), + (0x148, 0x149), + (0x14b, 0x14c), + (0x14d, 0x14e), + (0x14f, 0x150), + (0x151, 0x152), + (0x153, 0x154), + (0x155, 0x156), + (0x157, 0x158), + (0x159, 0x15a), + (0x15b, 0x15c), + (0x15d, 0x15e), + (0x15f, 0x160), + (0x161, 0x162), + (0x163, 0x164), + (0x165, 0x166), + (0x167, 0x168), + (0x169, 0x16a), + (0x16b, 0x16c), + (0x16d, 0x16e), + (0x16f, 0x170), + (0x171, 0x172), + (0x173, 0x174), + (0x175, 0x176), + (0x177, 0x178), + (0x17a, 0x17b), + (0x17c, 0x17d), + (0x17e, 0x17f), + (0x180, 0x181), + (0x183, 0x184), + (0x185, 0x186), + (0x188, 0x189), + (0x18c, 0x18e), + (0x192, 0x193), + (0x195, 0x196), + (0x199, 0x19c), + (0x19e, 0x19f), + (0x1a1, 0x1a2), + (0x1a3, 0x1a4), + (0x1a5, 0x1a6), + (0x1a8, 0x1a9), + (0x1aa, 0x1ac), + (0x1ad, 0x1ae), + (0x1b0, 0x1b1), + (0x1b4, 0x1b5), + (0x1b6, 0x1b7), + (0x1b9, 0x1bc), + (0x1bd, 0x1c4), + (0x1ce, 0x1cf), + (0x1d0, 0x1d1), + (0x1d2, 0x1d3), + (0x1d4, 0x1d5), + (0x1d6, 0x1d7), + (0x1d8, 0x1d9), + (0x1da, 0x1db), + (0x1dc, 0x1de), + (0x1df, 0x1e0), + (0x1e1, 0x1e2), + (0x1e3, 0x1e4), + (0x1e5, 0x1e6), + (0x1e7, 0x1e8), + (0x1e9, 0x1ea), + (0x1eb, 0x1ec), + (0x1ed, 0x1ee), + (0x1ef, 0x1f1), + (0x1f5, 0x1f6), + (0x1f9, 0x1fa), + (0x1fb, 0x1fc), + (0x1fd, 0x1fe), + (0x1ff, 0x200), + (0x201, 0x202), + (0x203, 0x204), + (0x205, 0x206), + (0x207, 0x208), + (0x209, 0x20a), + (0x20b, 0x20c), + (0x20d, 0x20e), + (0x20f, 0x210), + (0x211, 0x212), + (0x213, 0x214), + (0x215, 0x216), + (0x217, 0x218), + (0x219, 0x21a), + (0x21b, 0x21c), + (0x21d, 0x21e), + (0x21f, 0x220), + (0x221, 0x222), + (0x223, 0x224), + (0x225, 0x226), + (0x227, 0x228), + (0x229, 0x22a), + (0x22b, 0x22c), + (0x22d, 0x22e), + (0x22f, 0x230), + (0x231, 0x232), + (0x233, 0x23a), + (0x23c, 0x23d), + (0x23f, 0x241), + (0x242, 0x243), + (0x247, 0x248), + (0x249, 0x24a), + (0x24b, 0x24c), + (0x24d, 0x24e), + (0x24f, 0x2b0), + (0x2b9, 0x2c2), + (0x2c6, 0x2d2), + (0x2ec, 0x2ed), + (0x2ee, 0x2ef), + (0x300, 0x340), + (0x342, 0x343), + (0x346, 0x34f), + (0x350, 0x370), + (0x371, 0x372), + (0x373, 0x374), + (0x377, 0x378), + (0x37b, 0x37e), + (0x390, 0x391), + (0x3ac, 0x3cf), + (0x3d7, 0x3d8), + (0x3d9, 0x3da), + (0x3db, 0x3dc), + (0x3dd, 0x3de), + (0x3df, 0x3e0), + (0x3e1, 0x3e2), + (0x3e3, 0x3e4), + (0x3e5, 0x3e6), + (0x3e7, 0x3e8), + (0x3e9, 0x3ea), + (0x3eb, 0x3ec), + (0x3ed, 0x3ee), + (0x3ef, 0x3f0), + (0x3f3, 0x3f4), + (0x3f8, 0x3f9), + (0x3fb, 0x3fd), + (0x430, 0x460), + (0x461, 0x462), + (0x463, 0x464), + (0x465, 0x466), + (0x467, 0x468), + (0x469, 0x46a), + (0x46b, 0x46c), + (0x46d, 0x46e), + (0x46f, 0x470), + (0x471, 0x472), + (0x473, 0x474), + (0x475, 0x476), + (0x477, 0x478), + (0x479, 0x47a), + (0x47b, 0x47c), + (0x47d, 0x47e), + (0x47f, 0x480), + (0x481, 0x482), + (0x483, 0x488), + (0x48b, 0x48c), + (0x48d, 0x48e), + (0x48f, 0x490), + (0x491, 0x492), + (0x493, 0x494), + (0x495, 0x496), + (0x497, 0x498), + (0x499, 0x49a), + (0x49b, 0x49c), + (0x49d, 0x49e), + (0x49f, 0x4a0), + (0x4a1, 0x4a2), + (0x4a3, 0x4a4), + (0x4a5, 0x4a6), + (0x4a7, 0x4a8), + (0x4a9, 0x4aa), + (0x4ab, 0x4ac), + (0x4ad, 0x4ae), + (0x4af, 0x4b0), + (0x4b1, 0x4b2), + (0x4b3, 0x4b4), + (0x4b5, 0x4b6), + (0x4b7, 0x4b8), + (0x4b9, 0x4ba), + (0x4bb, 0x4bc), + (0x4bd, 0x4be), + (0x4bf, 0x4c0), + (0x4c2, 0x4c3), + (0x4c4, 0x4c5), + (0x4c6, 0x4c7), + (0x4c8, 0x4c9), + (0x4ca, 0x4cb), + (0x4cc, 0x4cd), + (0x4ce, 0x4d0), + (0x4d1, 0x4d2), + (0x4d3, 0x4d4), + (0x4d5, 0x4d6), + (0x4d7, 0x4d8), + (0x4d9, 0x4da), + (0x4db, 0x4dc), + (0x4dd, 0x4de), + (0x4df, 0x4e0), + (0x4e1, 0x4e2), + (0x4e3, 0x4e4), + (0x4e5, 0x4e6), + (0x4e7, 0x4e8), + (0x4e9, 0x4ea), + (0x4eb, 0x4ec), + (0x4ed, 0x4ee), + (0x4ef, 0x4f0), + (0x4f1, 0x4f2), + (0x4f3, 0x4f4), + (0x4f5, 0x4f6), + (0x4f7, 0x4f8), + (0x4f9, 0x4fa), + (0x4fb, 0x4fc), + (0x4fd, 0x4fe), + (0x4ff, 0x500), + (0x501, 0x502), + (0x503, 0x504), + (0x505, 0x506), + (0x507, 0x508), + (0x509, 0x50a), + (0x50b, 0x50c), + (0x50d, 0x50e), + (0x50f, 0x510), + (0x511, 0x512), + (0x513, 0x514), + (0x515, 0x516), + (0x517, 0x518), + (0x519, 0x51a), + (0x51b, 0x51c), + (0x51d, 0x51e), + (0x51f, 0x520), + (0x521, 0x522), + (0x523, 0x524), + (0x525, 0x526), + (0x527, 0x528), + (0x559, 0x55a), + (0x561, 0x587), + (0x591, 0x5be), + (0x5bf, 0x5c0), + (0x5c1, 0x5c3), + (0x5c4, 0x5c6), + (0x5c7, 0x5c8), + (0x5d0, 0x5eb), + (0x5f0, 0x5f3), + (0x610, 0x61b), + (0x620, 0x640), + (0x641, 0x660), + (0x66e, 0x675), + (0x679, 0x6d4), + (0x6d5, 0x6dd), + (0x6df, 0x6e9), + (0x6ea, 0x6f0), + (0x6fa, 0x700), + (0x710, 0x74b), + (0x74d, 0x7b2), + (0x7c0, 0x7f6), + (0x800, 0x82e), + (0x840, 0x85c), + (0x8a0, 0x8a1), + (0x8a2, 0x8ad), + (0x8e4, 0x8ff), + (0x900, 0x958), + (0x960, 0x964), + (0x966, 0x970), + (0x971, 0x978), + (0x979, 0x980), + (0x981, 0x984), + (0x985, 0x98d), + (0x98f, 0x991), + (0x993, 0x9a9), + (0x9aa, 0x9b1), + (0x9b2, 0x9b3), + (0x9b6, 0x9ba), + (0x9bc, 0x9c5), + (0x9c7, 0x9c9), + (0x9cb, 0x9cf), + (0x9d7, 0x9d8), + (0x9e0, 0x9e4), + (0x9e6, 0x9f2), + (0xa01, 0xa04), + (0xa05, 0xa0b), + (0xa0f, 0xa11), + (0xa13, 0xa29), + (0xa2a, 0xa31), + (0xa32, 0xa33), + (0xa35, 0xa36), + (0xa38, 0xa3a), + (0xa3c, 0xa3d), + (0xa3e, 0xa43), + (0xa47, 0xa49), + (0xa4b, 0xa4e), + (0xa51, 0xa52), + (0xa5c, 0xa5d), + (0xa66, 0xa76), + (0xa81, 0xa84), + (0xa85, 0xa8e), + (0xa8f, 0xa92), + (0xa93, 0xaa9), + (0xaaa, 0xab1), + (0xab2, 0xab4), + (0xab5, 0xaba), + (0xabc, 0xac6), + (0xac7, 0xaca), + (0xacb, 0xace), + (0xad0, 0xad1), + (0xae0, 0xae4), + (0xae6, 0xaf0), + (0xb01, 0xb04), + (0xb05, 0xb0d), + (0xb0f, 0xb11), + (0xb13, 0xb29), + (0xb2a, 0xb31), + (0xb32, 0xb34), + (0xb35, 0xb3a), + (0xb3c, 0xb45), + (0xb47, 0xb49), + (0xb4b, 0xb4e), + (0xb56, 0xb58), + (0xb5f, 0xb64), + (0xb66, 0xb70), + (0xb71, 0xb72), + (0xb82, 0xb84), + (0xb85, 0xb8b), + (0xb8e, 0xb91), + (0xb92, 0xb96), + (0xb99, 0xb9b), + (0xb9c, 0xb9d), + (0xb9e, 0xba0), + (0xba3, 0xba5), + (0xba8, 0xbab), + (0xbae, 0xbba), + (0xbbe, 0xbc3), + (0xbc6, 0xbc9), + (0xbca, 0xbce), + (0xbd0, 0xbd1), + (0xbd7, 0xbd8), + (0xbe6, 0xbf0), + (0xc01, 0xc04), + (0xc05, 0xc0d), + (0xc0e, 0xc11), + (0xc12, 0xc29), + (0xc2a, 0xc34), + (0xc35, 0xc3a), + (0xc3d, 0xc45), + (0xc46, 0xc49), + (0xc4a, 0xc4e), + (0xc55, 0xc57), + (0xc58, 0xc5a), + (0xc60, 0xc64), + (0xc66, 0xc70), + (0xc82, 0xc84), + (0xc85, 0xc8d), + (0xc8e, 0xc91), + (0xc92, 0xca9), + (0xcaa, 0xcb4), + (0xcb5, 0xcba), + (0xcbc, 0xcc5), + (0xcc6, 0xcc9), + (0xcca, 0xcce), + (0xcd5, 0xcd7), + (0xcde, 0xcdf), + (0xce0, 0xce4), + (0xce6, 0xcf0), + (0xcf1, 0xcf3), + (0xd02, 0xd04), + (0xd05, 0xd0d), + (0xd0e, 0xd11), + (0xd12, 0xd3b), + (0xd3d, 0xd45), + (0xd46, 0xd49), + (0xd4a, 0xd4f), + (0xd57, 0xd58), + (0xd60, 0xd64), + (0xd66, 0xd70), + (0xd7a, 0xd80), + (0xd82, 0xd84), + (0xd85, 0xd97), + (0xd9a, 0xdb2), + (0xdb3, 0xdbc), + (0xdbd, 0xdbe), + (0xdc0, 0xdc7), + (0xdca, 0xdcb), + (0xdcf, 0xdd5), + (0xdd6, 0xdd7), + (0xdd8, 0xde0), + (0xdf2, 0xdf4), + (0xe01, 0xe33), + (0xe34, 0xe3b), + (0xe40, 0xe4f), + (0xe50, 0xe5a), + (0xe81, 0xe83), + (0xe84, 0xe85), + (0xe87, 0xe89), + (0xe8a, 0xe8b), + (0xe8d, 0xe8e), + (0xe94, 0xe98), + (0xe99, 0xea0), + (0xea1, 0xea4), + (0xea5, 0xea6), + (0xea7, 0xea8), + (0xeaa, 0xeac), + (0xead, 0xeb3), + (0xeb4, 0xeba), + (0xebb, 0xebe), + (0xec0, 0xec5), + (0xec6, 0xec7), + (0xec8, 0xece), + (0xed0, 0xeda), + (0xede, 0xee0), + (0xf00, 0xf01), + (0xf0b, 0xf0c), + (0xf18, 0xf1a), + (0xf20, 0xf2a), + (0xf35, 0xf36), + (0xf37, 0xf38), + (0xf39, 0xf3a), + (0xf3e, 0xf43), + (0xf44, 0xf48), + (0xf49, 0xf4d), + (0xf4e, 0xf52), + (0xf53, 0xf57), + (0xf58, 0xf5c), + (0xf5d, 0xf69), + (0xf6a, 0xf6d), + (0xf71, 0xf73), + (0xf74, 0xf75), + (0xf7a, 0xf81), + (0xf82, 0xf85), + (0xf86, 0xf93), + (0xf94, 0xf98), + (0xf99, 0xf9d), + (0xf9e, 0xfa2), + (0xfa3, 0xfa7), + (0xfa8, 0xfac), + (0xfad, 0xfb9), + (0xfba, 0xfbd), + (0xfc6, 0xfc7), + (0x1000, 0x104a), + (0x1050, 0x109e), + (0x10d0, 0x10fb), + (0x10fd, 0x1100), + (0x1200, 0x1249), + (0x124a, 0x124e), + (0x1250, 0x1257), + (0x1258, 0x1259), + (0x125a, 0x125e), + (0x1260, 0x1289), + (0x128a, 0x128e), + (0x1290, 0x12b1), + (0x12b2, 0x12b6), + (0x12b8, 0x12bf), + (0x12c0, 0x12c1), + (0x12c2, 0x12c6), + (0x12c8, 0x12d7), + (0x12d8, 0x1311), + (0x1312, 0x1316), + (0x1318, 0x135b), + (0x135d, 0x1360), + (0x1380, 0x1390), + (0x13a0, 0x13f5), + (0x1401, 0x166d), + (0x166f, 0x1680), + (0x1681, 0x169b), + (0x16a0, 0x16eb), + (0x1700, 0x170d), + (0x170e, 0x1715), + (0x1720, 0x1735), + (0x1740, 0x1754), + (0x1760, 0x176d), + (0x176e, 0x1771), + (0x1772, 0x1774), + (0x1780, 0x17b4), + (0x17b6, 0x17d4), + (0x17d7, 0x17d8), + (0x17dc, 0x17de), + (0x17e0, 0x17ea), + (0x1810, 0x181a), + (0x1820, 0x1878), + (0x1880, 0x18ab), + (0x18b0, 0x18f6), + (0x1900, 0x191d), + (0x1920, 0x192c), + (0x1930, 0x193c), + (0x1946, 0x196e), + (0x1970, 0x1975), + (0x1980, 0x19ac), + (0x19b0, 0x19ca), + (0x19d0, 0x19da), + (0x1a00, 0x1a1c), + (0x1a20, 0x1a5f), + (0x1a60, 0x1a7d), + (0x1a7f, 0x1a8a), + (0x1a90, 0x1a9a), + (0x1aa7, 0x1aa8), + (0x1b00, 0x1b4c), + (0x1b50, 0x1b5a), + (0x1b6b, 0x1b74), + (0x1b80, 0x1bf4), + (0x1c00, 0x1c38), + (0x1c40, 0x1c4a), + (0x1c4d, 0x1c7e), + (0x1cd0, 0x1cd3), + (0x1cd4, 0x1cf7), + (0x1d00, 0x1d2c), + (0x1d2f, 0x1d30), + (0x1d3b, 0x1d3c), + (0x1d4e, 0x1d4f), + (0x1d6b, 0x1d78), + (0x1d79, 0x1d9b), + (0x1dc0, 0x1de7), + (0x1dfc, 0x1e00), + (0x1e01, 0x1e02), + (0x1e03, 0x1e04), + (0x1e05, 0x1e06), + (0x1e07, 0x1e08), + (0x1e09, 0x1e0a), + (0x1e0b, 0x1e0c), + (0x1e0d, 0x1e0e), + (0x1e0f, 0x1e10), + (0x1e11, 0x1e12), + (0x1e13, 0x1e14), + (0x1e15, 0x1e16), + (0x1e17, 0x1e18), + (0x1e19, 0x1e1a), + (0x1e1b, 0x1e1c), + (0x1e1d, 0x1e1e), + (0x1e1f, 0x1e20), + (0x1e21, 0x1e22), + (0x1e23, 0x1e24), + (0x1e25, 0x1e26), + (0x1e27, 0x1e28), + (0x1e29, 0x1e2a), + (0x1e2b, 0x1e2c), + (0x1e2d, 0x1e2e), + (0x1e2f, 0x1e30), + (0x1e31, 0x1e32), + (0x1e33, 0x1e34), + (0x1e35, 0x1e36), + (0x1e37, 0x1e38), + (0x1e39, 0x1e3a), + (0x1e3b, 0x1e3c), + (0x1e3d, 0x1e3e), + (0x1e3f, 0x1e40), + (0x1e41, 0x1e42), + (0x1e43, 0x1e44), + (0x1e45, 0x1e46), + (0x1e47, 0x1e48), + (0x1e49, 0x1e4a), + (0x1e4b, 0x1e4c), + (0x1e4d, 0x1e4e), + (0x1e4f, 0x1e50), + (0x1e51, 0x1e52), + (0x1e53, 0x1e54), + (0x1e55, 0x1e56), + (0x1e57, 0x1e58), + (0x1e59, 0x1e5a), + (0x1e5b, 0x1e5c), + (0x1e5d, 0x1e5e), + (0x1e5f, 0x1e60), + (0x1e61, 0x1e62), + (0x1e63, 0x1e64), + (0x1e65, 0x1e66), + (0x1e67, 0x1e68), + (0x1e69, 0x1e6a), + (0x1e6b, 0x1e6c), + (0x1e6d, 0x1e6e), + (0x1e6f, 0x1e70), + (0x1e71, 0x1e72), + (0x1e73, 0x1e74), + (0x1e75, 0x1e76), + (0x1e77, 0x1e78), + (0x1e79, 0x1e7a), + (0x1e7b, 0x1e7c), + (0x1e7d, 0x1e7e), + (0x1e7f, 0x1e80), + (0x1e81, 0x1e82), + (0x1e83, 0x1e84), + (0x1e85, 0x1e86), + (0x1e87, 0x1e88), + (0x1e89, 0x1e8a), + (0x1e8b, 0x1e8c), + (0x1e8d, 0x1e8e), + (0x1e8f, 0x1e90), + (0x1e91, 0x1e92), + (0x1e93, 0x1e94), + (0x1e95, 0x1e9a), + (0x1e9c, 0x1e9e), + (0x1e9f, 0x1ea0), + (0x1ea1, 0x1ea2), + (0x1ea3, 0x1ea4), + (0x1ea5, 0x1ea6), + (0x1ea7, 0x1ea8), + (0x1ea9, 0x1eaa), + (0x1eab, 0x1eac), + (0x1ead, 0x1eae), + (0x1eaf, 0x1eb0), + (0x1eb1, 0x1eb2), + (0x1eb3, 0x1eb4), + (0x1eb5, 0x1eb6), + (0x1eb7, 0x1eb8), + (0x1eb9, 0x1eba), + (0x1ebb, 0x1ebc), + (0x1ebd, 0x1ebe), + (0x1ebf, 0x1ec0), + (0x1ec1, 0x1ec2), + (0x1ec3, 0x1ec4), + (0x1ec5, 0x1ec6), + (0x1ec7, 0x1ec8), + (0x1ec9, 0x1eca), + (0x1ecb, 0x1ecc), + (0x1ecd, 0x1ece), + (0x1ecf, 0x1ed0), + (0x1ed1, 0x1ed2), + (0x1ed3, 0x1ed4), + (0x1ed5, 0x1ed6), + (0x1ed7, 0x1ed8), + (0x1ed9, 0x1eda), + (0x1edb, 0x1edc), + (0x1edd, 0x1ede), + (0x1edf, 0x1ee0), + (0x1ee1, 0x1ee2), + (0x1ee3, 0x1ee4), + (0x1ee5, 0x1ee6), + (0x1ee7, 0x1ee8), + (0x1ee9, 0x1eea), + (0x1eeb, 0x1eec), + (0x1eed, 0x1eee), + (0x1eef, 0x1ef0), + (0x1ef1, 0x1ef2), + (0x1ef3, 0x1ef4), + (0x1ef5, 0x1ef6), + (0x1ef7, 0x1ef8), + (0x1ef9, 0x1efa), + (0x1efb, 0x1efc), + (0x1efd, 0x1efe), + (0x1eff, 0x1f08), + (0x1f10, 0x1f16), + (0x1f20, 0x1f28), + (0x1f30, 0x1f38), + (0x1f40, 0x1f46), + (0x1f50, 0x1f58), + (0x1f60, 0x1f68), + (0x1f70, 0x1f71), + (0x1f72, 0x1f73), + (0x1f74, 0x1f75), + (0x1f76, 0x1f77), + (0x1f78, 0x1f79), + (0x1f7a, 0x1f7b), + (0x1f7c, 0x1f7d), + (0x1fb0, 0x1fb2), + (0x1fb6, 0x1fb7), + (0x1fc6, 0x1fc7), + (0x1fd0, 0x1fd3), + (0x1fd6, 0x1fd8), + (0x1fe0, 0x1fe3), + (0x1fe4, 0x1fe8), + (0x1ff6, 0x1ff7), + (0x214e, 0x214f), + (0x2184, 0x2185), + (0x2c30, 0x2c5f), + (0x2c61, 0x2c62), + (0x2c65, 0x2c67), + (0x2c68, 0x2c69), + (0x2c6a, 0x2c6b), + (0x2c6c, 0x2c6d), + (0x2c71, 0x2c72), + (0x2c73, 0x2c75), + (0x2c76, 0x2c7c), + (0x2c81, 0x2c82), + (0x2c83, 0x2c84), + (0x2c85, 0x2c86), + (0x2c87, 0x2c88), + (0x2c89, 0x2c8a), + (0x2c8b, 0x2c8c), + (0x2c8d, 0x2c8e), + (0x2c8f, 0x2c90), + (0x2c91, 0x2c92), + (0x2c93, 0x2c94), + (0x2c95, 0x2c96), + (0x2c97, 0x2c98), + (0x2c99, 0x2c9a), + (0x2c9b, 0x2c9c), + (0x2c9d, 0x2c9e), + (0x2c9f, 0x2ca0), + (0x2ca1, 0x2ca2), + (0x2ca3, 0x2ca4), + (0x2ca5, 0x2ca6), + (0x2ca7, 0x2ca8), + (0x2ca9, 0x2caa), + (0x2cab, 0x2cac), + (0x2cad, 0x2cae), + (0x2caf, 0x2cb0), + (0x2cb1, 0x2cb2), + (0x2cb3, 0x2cb4), + (0x2cb5, 0x2cb6), + (0x2cb7, 0x2cb8), + (0x2cb9, 0x2cba), + (0x2cbb, 0x2cbc), + (0x2cbd, 0x2cbe), + (0x2cbf, 0x2cc0), + (0x2cc1, 0x2cc2), + (0x2cc3, 0x2cc4), + (0x2cc5, 0x2cc6), + (0x2cc7, 0x2cc8), + (0x2cc9, 0x2cca), + (0x2ccb, 0x2ccc), + (0x2ccd, 0x2cce), + (0x2ccf, 0x2cd0), + (0x2cd1, 0x2cd2), + (0x2cd3, 0x2cd4), + (0x2cd5, 0x2cd6), + (0x2cd7, 0x2cd8), + (0x2cd9, 0x2cda), + (0x2cdb, 0x2cdc), + (0x2cdd, 0x2cde), + (0x2cdf, 0x2ce0), + (0x2ce1, 0x2ce2), + (0x2ce3, 0x2ce5), + (0x2cec, 0x2ced), + (0x2cee, 0x2cf2), + (0x2cf3, 0x2cf4), + (0x2d00, 0x2d26), + (0x2d27, 0x2d28), + (0x2d2d, 0x2d2e), + (0x2d30, 0x2d68), + (0x2d7f, 0x2d97), + (0x2da0, 0x2da7), + (0x2da8, 0x2daf), + (0x2db0, 0x2db7), + (0x2db8, 0x2dbf), + (0x2dc0, 0x2dc7), + (0x2dc8, 0x2dcf), + (0x2dd0, 0x2dd7), + (0x2dd8, 0x2ddf), + (0x2de0, 0x2e00), + (0x2e2f, 0x2e30), + (0x3005, 0x3008), + (0x302a, 0x302e), + (0x303c, 0x303d), + (0x3041, 0x3097), + (0x3099, 0x309b), + (0x309d, 0x309f), + (0x30a1, 0x30fb), + (0x30fc, 0x30ff), + (0x3105, 0x312e), + (0x31a0, 0x31bb), + (0x31f0, 0x3200), + (0x3400, 0x4db6), + (0x4e00, 0x9fcd), + (0xa000, 0xa48d), + (0xa4d0, 0xa4fe), + (0xa500, 0xa60d), + (0xa610, 0xa62c), + (0xa641, 0xa642), + (0xa643, 0xa644), + (0xa645, 0xa646), + (0xa647, 0xa648), + (0xa649, 0xa64a), + (0xa64b, 0xa64c), + (0xa64d, 0xa64e), + (0xa64f, 0xa650), + (0xa651, 0xa652), + (0xa653, 0xa654), + (0xa655, 0xa656), + (0xa657, 0xa658), + (0xa659, 0xa65a), + (0xa65b, 0xa65c), + (0xa65d, 0xa65e), + (0xa65f, 0xa660), + (0xa661, 0xa662), + (0xa663, 0xa664), + (0xa665, 0xa666), + (0xa667, 0xa668), + (0xa669, 0xa66a), + (0xa66b, 0xa66c), + (0xa66d, 0xa670), + (0xa674, 0xa67e), + (0xa67f, 0xa680), + (0xa681, 0xa682), + (0xa683, 0xa684), + (0xa685, 0xa686), + (0xa687, 0xa688), + (0xa689, 0xa68a), + (0xa68b, 0xa68c), + (0xa68d, 0xa68e), + (0xa68f, 0xa690), + (0xa691, 0xa692), + (0xa693, 0xa694), + (0xa695, 0xa696), + (0xa697, 0xa698), + (0xa69f, 0xa6e6), + (0xa6f0, 0xa6f2), + (0xa717, 0xa720), + (0xa723, 0xa724), + (0xa725, 0xa726), + (0xa727, 0xa728), + (0xa729, 0xa72a), + (0xa72b, 0xa72c), + (0xa72d, 0xa72e), + (0xa72f, 0xa732), + (0xa733, 0xa734), + (0xa735, 0xa736), + (0xa737, 0xa738), + (0xa739, 0xa73a), + (0xa73b, 0xa73c), + (0xa73d, 0xa73e), + (0xa73f, 0xa740), + (0xa741, 0xa742), + (0xa743, 0xa744), + (0xa745, 0xa746), + (0xa747, 0xa748), + (0xa749, 0xa74a), + (0xa74b, 0xa74c), + (0xa74d, 0xa74e), + (0xa74f, 0xa750), + (0xa751, 0xa752), + (0xa753, 0xa754), + (0xa755, 0xa756), + (0xa757, 0xa758), + (0xa759, 0xa75a), + (0xa75b, 0xa75c), + (0xa75d, 0xa75e), + (0xa75f, 0xa760), + (0xa761, 0xa762), + (0xa763, 0xa764), + (0xa765, 0xa766), + (0xa767, 0xa768), + (0xa769, 0xa76a), + (0xa76b, 0xa76c), + (0xa76d, 0xa76e), + (0xa76f, 0xa770), + (0xa771, 0xa779), + (0xa77a, 0xa77b), + (0xa77c, 0xa77d), + (0xa77f, 0xa780), + (0xa781, 0xa782), + (0xa783, 0xa784), + (0xa785, 0xa786), + (0xa787, 0xa789), + (0xa78c, 0xa78d), + (0xa78e, 0xa78f), + (0xa791, 0xa792), + (0xa793, 0xa794), + (0xa7a1, 0xa7a2), + (0xa7a3, 0xa7a4), + (0xa7a5, 0xa7a6), + (0xa7a7, 0xa7a8), + (0xa7a9, 0xa7aa), + (0xa7fa, 0xa828), + (0xa840, 0xa874), + (0xa880, 0xa8c5), + (0xa8d0, 0xa8da), + (0xa8e0, 0xa8f8), + (0xa8fb, 0xa8fc), + (0xa900, 0xa92e), + (0xa930, 0xa954), + (0xa980, 0xa9c1), + (0xa9cf, 0xa9da), + (0xaa00, 0xaa37), + (0xaa40, 0xaa4e), + (0xaa50, 0xaa5a), + (0xaa60, 0xaa77), + (0xaa7a, 0xaa7c), + (0xaa80, 0xaac3), + (0xaadb, 0xaade), + (0xaae0, 0xaaf0), + (0xaaf2, 0xaaf7), + (0xab01, 0xab07), + (0xab09, 0xab0f), + (0xab11, 0xab17), + (0xab20, 0xab27), + (0xab28, 0xab2f), + (0xabc0, 0xabeb), + (0xabec, 0xabee), + (0xabf0, 0xabfa), + (0xac00, 0xd7a4), + (0xfa0e, 0xfa10), + (0xfa11, 0xfa12), + (0xfa13, 0xfa15), + (0xfa1f, 0xfa20), + (0xfa21, 0xfa22), + (0xfa23, 0xfa25), + (0xfa27, 0xfa2a), + (0xfb1e, 0xfb1f), + (0xfe20, 0xfe27), + (0xfe73, 0xfe74), + (0x10000, 0x1000c), + (0x1000d, 0x10027), + (0x10028, 0x1003b), + (0x1003c, 0x1003e), + (0x1003f, 0x1004e), + (0x10050, 0x1005e), + (0x10080, 0x100fb), + (0x101fd, 0x101fe), + (0x10280, 0x1029d), + (0x102a0, 0x102d1), + (0x10300, 0x1031f), + (0x10330, 0x10341), + (0x10342, 0x1034a), + (0x10380, 0x1039e), + (0x103a0, 0x103c4), + (0x103c8, 0x103d0), + (0x10428, 0x1049e), + (0x104a0, 0x104aa), + (0x10800, 0x10806), + (0x10808, 0x10809), + (0x1080a, 0x10836), + (0x10837, 0x10839), + (0x1083c, 0x1083d), + (0x1083f, 0x10856), + (0x10900, 0x10916), + (0x10920, 0x1093a), + (0x10980, 0x109b8), + (0x109be, 0x109c0), + (0x10a00, 0x10a04), + (0x10a05, 0x10a07), + (0x10a0c, 0x10a14), + (0x10a15, 0x10a18), + (0x10a19, 0x10a34), + (0x10a38, 0x10a3b), + (0x10a3f, 0x10a40), + (0x10a60, 0x10a7d), + (0x10b00, 0x10b36), + (0x10b40, 0x10b56), + (0x10b60, 0x10b73), + (0x10c00, 0x10c49), + (0x11000, 0x11047), + (0x11066, 0x11070), + (0x11080, 0x110bb), + (0x110d0, 0x110e9), + (0x110f0, 0x110fa), + (0x11100, 0x11135), + (0x11136, 0x11140), + (0x11180, 0x111c5), + (0x111d0, 0x111da), + (0x11680, 0x116b8), + (0x116c0, 0x116ca), + (0x12000, 0x1236f), + (0x13000, 0x1342f), + (0x16800, 0x16a39), + (0x16f00, 0x16f45), + (0x16f50, 0x16f7f), + (0x16f8f, 0x16fa0), + (0x1b000, 0x1b002), + (0x20000, 0x2a6d7), + (0x2a700, 0x2b735), + (0x2b740, 0x2b81e), + ), + 'CONTEXTJ': ( + (0x200c, 0x200e), + ), + 'CONTEXTO': ( + (0xb7, 0xb8), + (0x375, 0x376), + (0x5f3, 0x5f5), + (0x660, 0x66a), + (0x6f0, 0x6fa), + (0x30fb, 0x30fc), + ), +} diff --git a/lib/python3.4/site-packages/idna/intranges.py b/lib/python3.4/site-packages/idna/intranges.py new file mode 100644 index 0000000..ee8a175 --- /dev/null +++ b/lib/python3.4/site-packages/idna/intranges.py @@ -0,0 +1,46 @@ +""" +Given a list of integers, made up of (hopefully) a small number of long runs +of consecutive integers, compute a representation of the form +((start1, end1), (start2, end2) ...). Then answer the question "was x present +in the original list?" in time O(log(# runs)). +""" + +import bisect + +def intranges_from_list(list_): + """Represent a list of integers as a sequence of ranges: + ((start_0, end_0), (start_1, end_1), ...), such that the original + integers are exactly those x such that start_i <= x < end_i for some i. + """ + + sorted_list = sorted(list_) + ranges = [] + last_write = -1 + for i in range(len(sorted_list)): + if i+1 < len(sorted_list): + if sorted_list[i] == sorted_list[i+1]-1: + continue + current_range = sorted_list[last_write+1:i+1] + range_tuple = (current_range[0], current_range[-1] + 1) + ranges.append(range_tuple) + last_write = i + + return tuple(ranges) + + +def intranges_contain(int_, ranges): + """Determine if `int_` falls into one of the ranges in `ranges`.""" + tuple_ = (int_, int_) + pos = bisect.bisect_left(ranges, tuple_) + # we could be immediately ahead of a tuple (start, end) + # with start < int_ <= end + if pos > 0: + left, right = ranges[pos-1] + if left <= int_ < right: + return True + # or we could be immediately behind a tuple (int_, end) + if pos < len(ranges): + left, _ = ranges[pos] + if left == int_: + return True + return False diff --git a/lib/python3.4/site-packages/idna/uts46data.py b/lib/python3.4/site-packages/idna/uts46data.py new file mode 100644 index 0000000..64e2c68 --- /dev/null +++ b/lib/python3.4/site-packages/idna/uts46data.py @@ -0,0 +1,7267 @@ +# This file is automatically generated by tools/build-uts46data.py +# vim: set fileencoding=utf-8 : + +"""IDNA Mapping Table from UTS46.""" + +uts46data = ( + (0x0, '3'), + (0x1, '3'), + (0x2, '3'), + (0x3, '3'), + (0x4, '3'), + (0x5, '3'), + (0x6, '3'), + (0x7, '3'), + (0x8, '3'), + (0x9, '3'), + (0xA, '3'), + (0xB, '3'), + (0xC, '3'), + (0xD, '3'), + (0xE, '3'), + (0xF, '3'), + (0x10, '3'), + (0x11, '3'), + (0x12, '3'), + (0x13, '3'), + (0x14, '3'), + (0x15, '3'), + (0x16, '3'), + (0x17, '3'), + (0x18, '3'), + (0x19, '3'), + (0x1A, '3'), + (0x1B, '3'), + (0x1C, '3'), + (0x1D, '3'), + (0x1E, '3'), + (0x1F, '3'), + (0x20, '3'), + (0x21, '3'), + (0x22, '3'), + (0x23, '3'), + (0x24, '3'), + (0x25, '3'), + (0x26, '3'), + (0x27, '3'), + (0x28, '3'), + (0x29, '3'), + (0x2A, '3'), + (0x2B, '3'), + (0x2C, '3'), + (0x2D, 'V'), + (0x2E, 'V'), + (0x2F, '3'), + (0x30, 'V'), + (0x31, 'V'), + (0x32, 'V'), + (0x33, 'V'), + (0x34, 'V'), + (0x35, 'V'), + (0x36, 'V'), + (0x37, 'V'), + (0x38, 'V'), + (0x39, 'V'), + (0x3A, '3'), + (0x3B, '3'), + (0x3C, '3'), + (0x3D, '3'), + (0x3E, '3'), + (0x3F, '3'), + (0x40, '3'), + (0x41, 'M', u'a'), + (0x42, 'M', u'b'), + (0x43, 'M', u'c'), + (0x44, 'M', u'd'), + (0x45, 'M', u'e'), + (0x46, 'M', u'f'), + (0x47, 'M', u'g'), + (0x48, 'M', u'h'), + (0x49, 'M', u'i'), + (0x4A, 'M', u'j'), + (0x4B, 'M', u'k'), + (0x4C, 'M', u'l'), + (0x4D, 'M', u'm'), + (0x4E, 'M', u'n'), + (0x4F, 'M', u'o'), + (0x50, 'M', u'p'), + (0x51, 'M', u'q'), + (0x52, 'M', u'r'), + (0x53, 'M', u's'), + (0x54, 'M', u't'), + (0x55, 'M', u'u'), + (0x56, 'M', u'v'), + (0x57, 'M', u'w'), + (0x58, 'M', u'x'), + (0x59, 'M', u'y'), + (0x5A, 'M', u'z'), + (0x5B, '3'), + (0x5C, '3'), + (0x5D, '3'), + (0x5E, '3'), + (0x5F, '3'), + (0x60, '3'), + (0x61, 'V'), + (0x62, 'V'), + (0x63, 'V'), + (0x64, 'V'), + (0x65, 'V'), + (0x66, 'V'), + (0x67, 'V'), + (0x68, 'V'), + (0x69, 'V'), + (0x6A, 'V'), + (0x6B, 'V'), + (0x6C, 'V'), + (0x6D, 'V'), + (0x6E, 'V'), + (0x6F, 'V'), + (0x70, 'V'), + (0x71, 'V'), + (0x72, 'V'), + (0x73, 'V'), + (0x74, 'V'), + (0x75, 'V'), + (0x76, 'V'), + (0x77, 'V'), + (0x78, 'V'), + (0x79, 'V'), + (0x7A, 'V'), + (0x7B, '3'), + (0x7C, '3'), + (0x7D, '3'), + (0x7E, '3'), + (0x7F, '3'), + (0x80, 'X'), + (0x81, 'X'), + (0x82, 'X'), + (0x83, 'X'), + (0x84, 'X'), + (0x85, 'X'), + (0x86, 'X'), + (0x87, 'X'), + (0x88, 'X'), + (0x89, 'X'), + (0x8A, 'X'), + (0x8B, 'X'), + (0x8C, 'X'), + (0x8D, 'X'), + (0x8E, 'X'), + (0x8F, 'X'), + (0x90, 'X'), + (0x91, 'X'), + (0x92, 'X'), + (0x93, 'X'), + (0x94, 'X'), + (0x95, 'X'), + (0x96, 'X'), + (0x97, 'X'), + (0x98, 'X'), + (0x99, 'X'), + (0x9A, 'X'), + (0x9B, 'X'), + (0x9C, 'X'), + (0x9D, 'X'), + (0x9E, 'X'), + (0x9F, 'X'), + (0xA0, '3', u' '), + (0xA1, 'V'), + (0xA2, 'V'), + (0xA3, 'V'), + (0xA4, 'V'), + (0xA5, 'V'), + (0xA6, 'V'), + (0xA7, 'V'), + (0xA8, '3', u' ̈'), + (0xA9, 'V'), + (0xAA, 'M', u'a'), + (0xAB, 'V'), + (0xAC, 'V'), + (0xAD, 'I'), + (0xAE, 'V'), + (0xAF, '3', u' ̄'), + (0xB0, 'V'), + (0xB1, 'V'), + (0xB2, 'M', u'2'), + (0xB3, 'M', u'3'), + (0xB4, '3', u' ́'), + (0xB5, 'M', u'μ'), + (0xB6, 'V'), + (0xB7, 'V'), + (0xB8, '3', u' ̧'), + (0xB9, 'M', u'1'), + (0xBA, 'M', u'o'), + (0xBB, 'V'), + (0xBC, 'M', u'1⁄4'), + (0xBD, 'M', u'1⁄2'), + (0xBE, 'M', u'3⁄4'), + (0xBF, 'V'), + (0xC0, 'M', u'à'), + (0xC1, 'M', u'á'), + (0xC2, 'M', u'â'), + (0xC3, 'M', u'ã'), + (0xC4, 'M', u'ä'), + (0xC5, 'M', u'å'), + (0xC6, 'M', u'æ'), + (0xC7, 'M', u'ç'), + (0xC8, 'M', u'è'), + (0xC9, 'M', u'é'), + (0xCA, 'M', u'ê'), + (0xCB, 'M', u'ë'), + (0xCC, 'M', u'ì'), + (0xCD, 'M', u'í'), + (0xCE, 'M', u'î'), + (0xCF, 'M', u'ï'), + (0xD0, 'M', u'ð'), + (0xD1, 'M', u'ñ'), + (0xD2, 'M', u'ò'), + (0xD3, 'M', u'ó'), + (0xD4, 'M', u'ô'), + (0xD5, 'M', u'õ'), + (0xD6, 'M', u'ö'), + (0xD7, 'V'), + (0xD8, 'M', u'ø'), + (0xD9, 'M', u'ù'), + (0xDA, 'M', u'ú'), + (0xDB, 'M', u'û'), + (0xDC, 'M', u'ü'), + (0xDD, 'M', u'ý'), + (0xDE, 'M', u'þ'), + (0xDF, 'D', u'ss'), + (0xE0, 'V'), + (0xE1, 'V'), + (0xE2, 'V'), + (0xE3, 'V'), + (0xE4, 'V'), + (0xE5, 'V'), + (0xE6, 'V'), + (0xE7, 'V'), + (0xE8, 'V'), + (0xE9, 'V'), + (0xEA, 'V'), + (0xEB, 'V'), + (0xEC, 'V'), + (0xED, 'V'), + (0xEE, 'V'), + (0xEF, 'V'), + (0xF0, 'V'), + (0xF1, 'V'), + (0xF2, 'V'), + (0xF3, 'V'), + (0xF4, 'V'), + (0xF5, 'V'), + (0xF6, 'V'), + (0xF7, 'V'), + (0xF8, 'V'), + (0xF9, 'V'), + (0xFA, 'V'), + (0xFB, 'V'), + (0xFC, 'V'), + (0xFD, 'V'), + (0xFE, 'V'), + (0xFF, 'V'), + (0x100, 'M', u'ā'), + (0x101, 'V'), + (0x102, 'M', u'ă'), + (0x103, 'V'), + (0x104, 'M', u'ą'), + (0x105, 'V'), + (0x106, 'M', u'ć'), + (0x107, 'V'), + (0x108, 'M', u'ĉ'), + (0x109, 'V'), + (0x10A, 'M', u'ċ'), + (0x10B, 'V'), + (0x10C, 'M', u'č'), + (0x10D, 'V'), + (0x10E, 'M', u'ď'), + (0x10F, 'V'), + (0x110, 'M', u'đ'), + (0x111, 'V'), + (0x112, 'M', u'ē'), + (0x113, 'V'), + (0x114, 'M', u'ĕ'), + (0x115, 'V'), + (0x116, 'M', u'ė'), + (0x117, 'V'), + (0x118, 'M', u'ę'), + (0x119, 'V'), + (0x11A, 'M', u'ě'), + (0x11B, 'V'), + (0x11C, 'M', u'ĝ'), + (0x11D, 'V'), + (0x11E, 'M', u'ğ'), + (0x11F, 'V'), + (0x120, 'M', u'ġ'), + (0x121, 'V'), + (0x122, 'M', u'ģ'), + (0x123, 'V'), + (0x124, 'M', u'ĥ'), + (0x125, 'V'), + (0x126, 'M', u'ħ'), + (0x127, 'V'), + (0x128, 'M', u'ĩ'), + (0x129, 'V'), + (0x12A, 'M', u'ī'), + (0x12B, 'V'), + (0x12C, 'M', u'ĭ'), + (0x12D, 'V'), + (0x12E, 'M', u'į'), + (0x12F, 'V'), + (0x130, 'M', u'i̇'), + (0x131, 'V'), + (0x132, 'M', u'ij'), + (0x134, 'M', u'ĵ'), + (0x135, 'V'), + (0x136, 'M', u'ķ'), + (0x137, 'V'), + (0x139, 'M', u'ĺ'), + (0x13A, 'V'), + (0x13B, 'M', u'ļ'), + (0x13C, 'V'), + (0x13D, 'M', u'ľ'), + (0x13E, 'V'), + (0x13F, 'M', u'l·'), + (0x141, 'M', u'ł'), + (0x142, 'V'), + (0x143, 'M', u'ń'), + (0x144, 'V'), + (0x145, 'M', u'ņ'), + (0x146, 'V'), + (0x147, 'M', u'ň'), + (0x148, 'V'), + (0x149, 'M', u'ʼn'), + (0x14A, 'M', u'ŋ'), + (0x14B, 'V'), + (0x14C, 'M', u'ō'), + (0x14D, 'V'), + (0x14E, 'M', u'ŏ'), + (0x14F, 'V'), + (0x150, 'M', u'ő'), + (0x151, 'V'), + (0x152, 'M', u'œ'), + (0x153, 'V'), + (0x154, 'M', u'ŕ'), + (0x155, 'V'), + (0x156, 'M', u'ŗ'), + (0x157, 'V'), + (0x158, 'M', u'ř'), + (0x159, 'V'), + (0x15A, 'M', u'ś'), + (0x15B, 'V'), + (0x15C, 'M', u'ŝ'), + (0x15D, 'V'), + (0x15E, 'M', u'ş'), + (0x15F, 'V'), + (0x160, 'M', u'š'), + (0x161, 'V'), + (0x162, 'M', u'ţ'), + (0x163, 'V'), + (0x164, 'M', u'ť'), + (0x165, 'V'), + (0x166, 'M', u'ŧ'), + (0x167, 'V'), + (0x168, 'M', u'ũ'), + (0x169, 'V'), + (0x16A, 'M', u'ū'), + (0x16B, 'V'), + (0x16C, 'M', u'ŭ'), + (0x16D, 'V'), + (0x16E, 'M', u'ů'), + (0x16F, 'V'), + (0x170, 'M', u'ű'), + (0x171, 'V'), + (0x172, 'M', u'ų'), + (0x173, 'V'), + (0x174, 'M', u'ŵ'), + (0x175, 'V'), + (0x176, 'M', u'ŷ'), + (0x177, 'V'), + (0x178, 'M', u'ÿ'), + (0x179, 'M', u'ź'), + (0x17A, 'V'), + (0x17B, 'M', u'ż'), + (0x17C, 'V'), + (0x17D, 'M', u'ž'), + (0x17E, 'V'), + (0x17F, 'M', u's'), + (0x180, 'V'), + (0x181, 'M', u'ɓ'), + (0x182, 'M', u'ƃ'), + (0x183, 'V'), + (0x184, 'M', u'ƅ'), + (0x185, 'V'), + (0x186, 'M', u'ɔ'), + (0x187, 'M', u'ƈ'), + (0x188, 'V'), + (0x189, 'M', u'ɖ'), + (0x18A, 'M', u'ɗ'), + (0x18B, 'M', u'ƌ'), + (0x18C, 'V'), + (0x18E, 'M', u'ǝ'), + (0x18F, 'M', u'ə'), + (0x190, 'M', u'ɛ'), + (0x191, 'M', u'ƒ'), + (0x192, 'V'), + (0x193, 'M', u'ɠ'), + (0x194, 'M', u'ɣ'), + (0x195, 'V'), + (0x196, 'M', u'ɩ'), + (0x197, 'M', u'ɨ'), + (0x198, 'M', u'ƙ'), + (0x199, 'V'), + (0x19C, 'M', u'ɯ'), + (0x19D, 'M', u'ɲ'), + (0x19E, 'V'), + (0x19F, 'M', u'ɵ'), + (0x1A0, 'M', u'ơ'), + (0x1A1, 'V'), + (0x1A2, 'M', u'ƣ'), + (0x1A3, 'V'), + (0x1A4, 'M', u'ƥ'), + (0x1A5, 'V'), + (0x1A6, 'M', u'ʀ'), + (0x1A7, 'M', u'ƨ'), + (0x1A8, 'V'), + (0x1A9, 'M', u'ʃ'), + (0x1AA, 'V'), + (0x1AC, 'M', u'ƭ'), + (0x1AD, 'V'), + (0x1AE, 'M', u'ʈ'), + (0x1AF, 'M', u'ư'), + (0x1B0, 'V'), + (0x1B1, 'M', u'ʊ'), + (0x1B2, 'M', u'ʋ'), + (0x1B3, 'M', u'ƴ'), + (0x1B4, 'V'), + (0x1B5, 'M', u'ƶ'), + (0x1B6, 'V'), + (0x1B7, 'M', u'ʒ'), + (0x1B8, 'M', u'ƹ'), + (0x1B9, 'V'), + (0x1BC, 'M', u'ƽ'), + (0x1BD, 'V'), + (0x1C4, 'M', u'dž'), + (0x1C7, 'M', u'lj'), + (0x1CA, 'M', u'nj'), + (0x1CD, 'M', u'ǎ'), + (0x1CE, 'V'), + (0x1CF, 'M', u'ǐ'), + (0x1D0, 'V'), + (0x1D1, 'M', u'ǒ'), + (0x1D2, 'V'), + (0x1D3, 'M', u'ǔ'), + (0x1D4, 'V'), + (0x1D5, 'M', u'ǖ'), + (0x1D6, 'V'), + (0x1D7, 'M', u'ǘ'), + (0x1D8, 'V'), + (0x1D9, 'M', u'ǚ'), + (0x1DA, 'V'), + (0x1DB, 'M', u'ǜ'), + (0x1DC, 'V'), + (0x1DE, 'M', u'ǟ'), + (0x1DF, 'V'), + (0x1E0, 'M', u'ǡ'), + (0x1E1, 'V'), + (0x1E2, 'M', u'ǣ'), + (0x1E3, 'V'), + (0x1E4, 'M', u'ǥ'), + (0x1E5, 'V'), + (0x1E6, 'M', u'ǧ'), + (0x1E7, 'V'), + (0x1E8, 'M', u'ǩ'), + (0x1E9, 'V'), + (0x1EA, 'M', u'ǫ'), + (0x1EB, 'V'), + (0x1EC, 'M', u'ǭ'), + (0x1ED, 'V'), + (0x1EE, 'M', u'ǯ'), + (0x1EF, 'V'), + (0x1F1, 'M', u'dz'), + (0x1F4, 'M', u'ǵ'), + (0x1F5, 'V'), + (0x1F6, 'M', u'ƕ'), + (0x1F7, 'M', u'ƿ'), + (0x1F8, 'M', u'ǹ'), + (0x1F9, 'V'), + (0x1FA, 'M', u'ǻ'), + (0x1FB, 'V'), + (0x1FC, 'M', u'ǽ'), + (0x1FD, 'V'), + (0x1FE, 'M', u'ǿ'), + (0x1FF, 'V'), + (0x200, 'M', u'ȁ'), + (0x201, 'V'), + (0x202, 'M', u'ȃ'), + (0x203, 'V'), + (0x204, 'M', u'ȅ'), + (0x205, 'V'), + (0x206, 'M', u'ȇ'), + (0x207, 'V'), + (0x208, 'M', u'ȉ'), + (0x209, 'V'), + (0x20A, 'M', u'ȋ'), + (0x20B, 'V'), + (0x20C, 'M', u'ȍ'), + (0x20D, 'V'), + (0x20E, 'M', u'ȏ'), + (0x20F, 'V'), + (0x210, 'M', u'ȑ'), + (0x211, 'V'), + (0x212, 'M', u'ȓ'), + (0x213, 'V'), + (0x214, 'M', u'ȕ'), + (0x215, 'V'), + (0x216, 'M', u'ȗ'), + (0x217, 'V'), + (0x218, 'M', u'ș'), + (0x219, 'V'), + (0x21A, 'M', u'ț'), + (0x21B, 'V'), + (0x21C, 'M', u'ȝ'), + (0x21D, 'V'), + (0x21E, 'M', u'ȟ'), + (0x21F, 'V'), + (0x220, 'M', u'ƞ'), + (0x221, 'V'), + (0x222, 'M', u'ȣ'), + (0x223, 'V'), + (0x224, 'M', u'ȥ'), + (0x225, 'V'), + (0x226, 'M', u'ȧ'), + (0x227, 'V'), + (0x228, 'M', u'ȩ'), + (0x229, 'V'), + (0x22A, 'M', u'ȫ'), + (0x22B, 'V'), + (0x22C, 'M', u'ȭ'), + (0x22D, 'V'), + (0x22E, 'M', u'ȯ'), + (0x22F, 'V'), + (0x230, 'M', u'ȱ'), + (0x231, 'V'), + (0x232, 'M', u'ȳ'), + (0x233, 'V'), + (0x23A, 'M', u'ⱥ'), + (0x23B, 'M', u'ȼ'), + (0x23C, 'V'), + (0x23D, 'M', u'ƚ'), + (0x23E, 'M', u'ⱦ'), + (0x23F, 'V'), + (0x241, 'M', u'ɂ'), + (0x242, 'V'), + (0x243, 'M', u'ƀ'), + (0x244, 'M', u'ʉ'), + (0x245, 'M', u'ʌ'), + (0x246, 'M', u'ɇ'), + (0x247, 'V'), + (0x248, 'M', u'ɉ'), + (0x249, 'V'), + (0x24A, 'M', u'ɋ'), + (0x24B, 'V'), + (0x24C, 'M', u'ɍ'), + (0x24D, 'V'), + (0x24E, 'M', u'ɏ'), + (0x24F, 'V'), + (0x2B0, 'M', u'h'), + (0x2B1, 'M', u'ɦ'), + (0x2B2, 'M', u'j'), + (0x2B3, 'M', u'r'), + (0x2B4, 'M', u'ɹ'), + (0x2B5, 'M', u'ɻ'), + (0x2B6, 'M', u'ʁ'), + (0x2B7, 'M', u'w'), + (0x2B8, 'M', u'y'), + (0x2B9, 'V'), + (0x2D8, '3', u' ̆'), + (0x2D9, '3', u' ̇'), + (0x2DA, '3', u' ̊'), + (0x2DB, '3', u' ̨'), + (0x2DC, '3', u' ̃'), + (0x2DD, '3', u' ̋'), + (0x2DE, 'V'), + (0x2E0, 'M', u'ɣ'), + (0x2E1, 'M', u'l'), + (0x2E2, 'M', u's'), + (0x2E3, 'M', u'x'), + (0x2E4, 'M', u'ʕ'), + (0x2E5, 'V'), + (0x340, 'M', u'̀'), + (0x341, 'M', u'́'), + (0x342, 'V'), + (0x343, 'M', u'̓'), + (0x344, 'M', u'̈́'), + (0x345, 'M', u'ι'), + (0x346, 'V'), + (0x34F, 'I'), + (0x350, 'V'), + (0x370, 'M', u'ͱ'), + (0x371, 'V'), + (0x372, 'M', u'ͳ'), + (0x373, 'V'), + (0x374, 'M', u'ʹ'), + (0x375, 'V'), + (0x376, 'M', u'ͷ'), + (0x377, 'V'), + (0x378, 'X'), + (0x37A, '3', u' ι'), + (0x37B, 'V'), + (0x37E, '3', u';'), + (0x37F, 'X'), + (0x384, '3', u' ́'), + (0x385, '3', u' ̈́'), + (0x386, 'M', u'ά'), + (0x387, 'M', u'·'), + (0x388, 'M', u'έ'), + (0x389, 'M', u'ή'), + (0x38A, 'M', u'ί'), + (0x38B, 'X'), + (0x38C, 'M', u'ό'), + (0x38D, 'X'), + (0x38E, 'M', u'ύ'), + (0x38F, 'M', u'ώ'), + (0x390, 'V'), + (0x391, 'M', u'α'), + (0x392, 'M', u'β'), + (0x393, 'M', u'γ'), + (0x394, 'M', u'δ'), + (0x395, 'M', u'ε'), + (0x396, 'M', u'ζ'), + (0x397, 'M', u'η'), + (0x398, 'M', u'θ'), + (0x399, 'M', u'ι'), + (0x39A, 'M', u'κ'), + (0x39B, 'M', u'λ'), + (0x39C, 'M', u'μ'), + (0x39D, 'M', u'ν'), + (0x39E, 'M', u'ξ'), + (0x39F, 'M', u'ο'), + (0x3A0, 'M', u'π'), + (0x3A1, 'M', u'ρ'), + (0x3A2, 'X'), + (0x3A3, 'M', u'σ'), + (0x3A4, 'M', u'τ'), + (0x3A5, 'M', u'υ'), + (0x3A6, 'M', u'φ'), + (0x3A7, 'M', u'χ'), + (0x3A8, 'M', u'ψ'), + (0x3A9, 'M', u'ω'), + (0x3AA, 'M', u'ϊ'), + (0x3AB, 'M', u'ϋ'), + (0x3AC, 'V'), + (0x3C2, 'D', u'σ'), + (0x3C3, 'V'), + (0x3CF, 'M', u'ϗ'), + (0x3D0, 'M', u'β'), + (0x3D1, 'M', u'θ'), + (0x3D2, 'M', u'υ'), + (0x3D3, 'M', u'ύ'), + (0x3D4, 'M', u'ϋ'), + (0x3D5, 'M', u'φ'), + (0x3D6, 'M', u'π'), + (0x3D7, 'V'), + (0x3D8, 'M', u'ϙ'), + (0x3D9, 'V'), + (0x3DA, 'M', u'ϛ'), + (0x3DB, 'V'), + (0x3DC, 'M', u'ϝ'), + (0x3DD, 'V'), + (0x3DE, 'M', u'ϟ'), + (0x3DF, 'V'), + (0x3E0, 'M', u'ϡ'), + (0x3E1, 'V'), + (0x3E2, 'M', u'ϣ'), + (0x3E3, 'V'), + (0x3E4, 'M', u'ϥ'), + (0x3E5, 'V'), + (0x3E6, 'M', u'ϧ'), + (0x3E7, 'V'), + (0x3E8, 'M', u'ϩ'), + (0x3E9, 'V'), + (0x3EA, 'M', u'ϫ'), + (0x3EB, 'V'), + (0x3EC, 'M', u'ϭ'), + (0x3ED, 'V'), + (0x3EE, 'M', u'ϯ'), + (0x3EF, 'V'), + (0x3F0, 'M', u'κ'), + (0x3F1, 'M', u'ρ'), + (0x3F2, 'M', u'σ'), + (0x3F3, 'V'), + (0x3F4, 'M', u'θ'), + (0x3F5, 'M', u'ε'), + (0x3F6, 'V'), + (0x3F7, 'M', u'ϸ'), + (0x3F8, 'V'), + (0x3F9, 'M', u'σ'), + (0x3FA, 'M', u'ϻ'), + (0x3FB, 'V'), + (0x3FD, 'M', u'ͻ'), + (0x3FE, 'M', u'ͼ'), + (0x3FF, 'M', u'ͽ'), + (0x400, 'M', u'ѐ'), + (0x401, 'M', u'ё'), + (0x402, 'M', u'ђ'), + (0x403, 'M', u'ѓ'), + (0x404, 'M', u'є'), + (0x405, 'M', u'ѕ'), + (0x406, 'M', u'і'), + (0x407, 'M', u'ї'), + (0x408, 'M', u'ј'), + (0x409, 'M', u'љ'), + (0x40A, 'M', u'њ'), + (0x40B, 'M', u'ћ'), + (0x40C, 'M', u'ќ'), + (0x40D, 'M', u'ѝ'), + (0x40E, 'M', u'ў'), + (0x40F, 'M', u'џ'), + (0x410, 'M', u'а'), + (0x411, 'M', u'б'), + (0x412, 'M', u'в'), + (0x413, 'M', u'г'), + (0x414, 'M', u'д'), + (0x415, 'M', u'е'), + (0x416, 'M', u'ж'), + (0x417, 'M', u'з'), + (0x418, 'M', u'и'), + (0x419, 'M', u'й'), + (0x41A, 'M', u'к'), + (0x41B, 'M', u'л'), + (0x41C, 'M', u'м'), + (0x41D, 'M', u'н'), + (0x41E, 'M', u'о'), + (0x41F, 'M', u'п'), + (0x420, 'M', u'р'), + (0x421, 'M', u'с'), + (0x422, 'M', u'т'), + (0x423, 'M', u'у'), + (0x424, 'M', u'ф'), + (0x425, 'M', u'х'), + (0x426, 'M', u'ц'), + (0x427, 'M', u'ч'), + (0x428, 'M', u'ш'), + (0x429, 'M', u'щ'), + (0x42A, 'M', u'ъ'), + (0x42B, 'M', u'ы'), + (0x42C, 'M', u'ь'), + (0x42D, 'M', u'э'), + (0x42E, 'M', u'ю'), + (0x42F, 'M', u'я'), + (0x430, 'V'), + (0x460, 'M', u'ѡ'), + (0x461, 'V'), + (0x462, 'M', u'ѣ'), + (0x463, 'V'), + (0x464, 'M', u'ѥ'), + (0x465, 'V'), + (0x466, 'M', u'ѧ'), + (0x467, 'V'), + (0x468, 'M', u'ѩ'), + (0x469, 'V'), + (0x46A, 'M', u'ѫ'), + (0x46B, 'V'), + (0x46C, 'M', u'ѭ'), + (0x46D, 'V'), + (0x46E, 'M', u'ѯ'), + (0x46F, 'V'), + (0x470, 'M', u'ѱ'), + (0x471, 'V'), + (0x472, 'M', u'ѳ'), + (0x473, 'V'), + (0x474, 'M', u'ѵ'), + (0x475, 'V'), + (0x476, 'M', u'ѷ'), + (0x477, 'V'), + (0x478, 'M', u'ѹ'), + (0x479, 'V'), + (0x47A, 'M', u'ѻ'), + (0x47B, 'V'), + (0x47C, 'M', u'ѽ'), + (0x47D, 'V'), + (0x47E, 'M', u'ѿ'), + (0x47F, 'V'), + (0x480, 'M', u'ҁ'), + (0x481, 'V'), + (0x48A, 'M', u'ҋ'), + (0x48B, 'V'), + (0x48C, 'M', u'ҍ'), + (0x48D, 'V'), + (0x48E, 'M', u'ҏ'), + (0x48F, 'V'), + (0x490, 'M', u'ґ'), + (0x491, 'V'), + (0x492, 'M', u'ғ'), + (0x493, 'V'), + (0x494, 'M', u'ҕ'), + (0x495, 'V'), + (0x496, 'M', u'җ'), + (0x497, 'V'), + (0x498, 'M', u'ҙ'), + (0x499, 'V'), + (0x49A, 'M', u'қ'), + (0x49B, 'V'), + (0x49C, 'M', u'ҝ'), + (0x49D, 'V'), + (0x49E, 'M', u'ҟ'), + (0x49F, 'V'), + (0x4A0, 'M', u'ҡ'), + (0x4A1, 'V'), + (0x4A2, 'M', u'ң'), + (0x4A3, 'V'), + (0x4A4, 'M', u'ҥ'), + (0x4A5, 'V'), + (0x4A6, 'M', u'ҧ'), + (0x4A7, 'V'), + (0x4A8, 'M', u'ҩ'), + (0x4A9, 'V'), + (0x4AA, 'M', u'ҫ'), + (0x4AB, 'V'), + (0x4AC, 'M', u'ҭ'), + (0x4AD, 'V'), + (0x4AE, 'M', u'ү'), + (0x4AF, 'V'), + (0x4B0, 'M', u'ұ'), + (0x4B1, 'V'), + (0x4B2, 'M', u'ҳ'), + (0x4B3, 'V'), + (0x4B4, 'M', u'ҵ'), + (0x4B5, 'V'), + (0x4B6, 'M', u'ҷ'), + (0x4B7, 'V'), + (0x4B8, 'M', u'ҹ'), + (0x4B9, 'V'), + (0x4BA, 'M', u'һ'), + (0x4BB, 'V'), + (0x4BC, 'M', u'ҽ'), + (0x4BD, 'V'), + (0x4BE, 'M', u'ҿ'), + (0x4BF, 'V'), + (0x4C0, 'X'), + (0x4C1, 'M', u'ӂ'), + (0x4C2, 'V'), + (0x4C3, 'M', u'ӄ'), + (0x4C4, 'V'), + (0x4C5, 'M', u'ӆ'), + (0x4C6, 'V'), + (0x4C7, 'M', u'ӈ'), + (0x4C8, 'V'), + (0x4C9, 'M', u'ӊ'), + (0x4CA, 'V'), + (0x4CB, 'M', u'ӌ'), + (0x4CC, 'V'), + (0x4CD, 'M', u'ӎ'), + (0x4CE, 'V'), + (0x4D0, 'M', u'ӑ'), + (0x4D1, 'V'), + (0x4D2, 'M', u'ӓ'), + (0x4D3, 'V'), + (0x4D4, 'M', u'ӕ'), + (0x4D5, 'V'), + (0x4D6, 'M', u'ӗ'), + (0x4D7, 'V'), + (0x4D8, 'M', u'ә'), + (0x4D9, 'V'), + (0x4DA, 'M', u'ӛ'), + (0x4DB, 'V'), + (0x4DC, 'M', u'ӝ'), + (0x4DD, 'V'), + (0x4DE, 'M', u'ӟ'), + (0x4DF, 'V'), + (0x4E0, 'M', u'ӡ'), + (0x4E1, 'V'), + (0x4E2, 'M', u'ӣ'), + (0x4E3, 'V'), + (0x4E4, 'M', u'ӥ'), + (0x4E5, 'V'), + (0x4E6, 'M', u'ӧ'), + (0x4E7, 'V'), + (0x4E8, 'M', u'ө'), + (0x4E9, 'V'), + (0x4EA, 'M', u'ӫ'), + (0x4EB, 'V'), + (0x4EC, 'M', u'ӭ'), + (0x4ED, 'V'), + (0x4EE, 'M', u'ӯ'), + (0x4EF, 'V'), + (0x4F0, 'M', u'ӱ'), + (0x4F1, 'V'), + (0x4F2, 'M', u'ӳ'), + (0x4F3, 'V'), + (0x4F4, 'M', u'ӵ'), + (0x4F5, 'V'), + (0x4F6, 'M', u'ӷ'), + (0x4F7, 'V'), + (0x4F8, 'M', u'ӹ'), + (0x4F9, 'V'), + (0x4FA, 'M', u'ӻ'), + (0x4FB, 'V'), + (0x4FC, 'M', u'ӽ'), + (0x4FD, 'V'), + (0x4FE, 'M', u'ӿ'), + (0x4FF, 'V'), + (0x500, 'M', u'ԁ'), + (0x501, 'V'), + (0x502, 'M', u'ԃ'), + (0x503, 'V'), + (0x504, 'M', u'ԅ'), + (0x505, 'V'), + (0x506, 'M', u'ԇ'), + (0x507, 'V'), + (0x508, 'M', u'ԉ'), + (0x509, 'V'), + (0x50A, 'M', u'ԋ'), + (0x50B, 'V'), + (0x50C, 'M', u'ԍ'), + (0x50D, 'V'), + (0x50E, 'M', u'ԏ'), + (0x50F, 'V'), + (0x510, 'M', u'ԑ'), + (0x511, 'V'), + (0x512, 'M', u'ԓ'), + (0x513, 'V'), + (0x514, 'M', u'ԕ'), + (0x515, 'V'), + (0x516, 'M', u'ԗ'), + (0x517, 'V'), + (0x518, 'M', u'ԙ'), + (0x519, 'V'), + (0x51A, 'M', u'ԛ'), + (0x51B, 'V'), + (0x51C, 'M', u'ԝ'), + (0x51D, 'V'), + (0x51E, 'M', u'ԟ'), + (0x51F, 'V'), + (0x520, 'M', u'ԡ'), + (0x521, 'V'), + (0x522, 'M', u'ԣ'), + (0x523, 'V'), + (0x524, 'M', u'ԥ'), + (0x525, 'V'), + (0x526, 'M', u'ԧ'), + (0x527, 'V'), + (0x528, 'X'), + (0x531, 'M', u'ա'), + (0x532, 'M', u'բ'), + (0x533, 'M', u'գ'), + (0x534, 'M', u'դ'), + (0x535, 'M', u'ե'), + (0x536, 'M', u'զ'), + (0x537, 'M', u'է'), + (0x538, 'M', u'ը'), + (0x539, 'M', u'թ'), + (0x53A, 'M', u'ժ'), + (0x53B, 'M', u'ի'), + (0x53C, 'M', u'լ'), + (0x53D, 'M', u'խ'), + (0x53E, 'M', u'ծ'), + (0x53F, 'M', u'կ'), + (0x540, 'M', u'հ'), + (0x541, 'M', u'ձ'), + (0x542, 'M', u'ղ'), + (0x543, 'M', u'ճ'), + (0x544, 'M', u'մ'), + (0x545, 'M', u'յ'), + (0x546, 'M', u'ն'), + (0x547, 'M', u'շ'), + (0x548, 'M', u'ո'), + (0x549, 'M', u'չ'), + (0x54A, 'M', u'պ'), + (0x54B, 'M', u'ջ'), + (0x54C, 'M', u'ռ'), + (0x54D, 'M', u'ս'), + (0x54E, 'M', u'վ'), + (0x54F, 'M', u'տ'), + (0x550, 'M', u'ր'), + (0x551, 'M', u'ց'), + (0x552, 'M', u'ւ'), + (0x553, 'M', u'փ'), + (0x554, 'M', u'ք'), + (0x555, 'M', u'օ'), + (0x556, 'M', u'ֆ'), + (0x557, 'X'), + (0x559, 'V'), + (0x560, 'X'), + (0x561, 'V'), + (0x587, 'M', u'եւ'), + (0x588, 'X'), + (0x589, 'V'), + (0x58B, 'X'), + (0x58F, 'V'), + (0x590, 'X'), + (0x591, 'V'), + (0x5C8, 'X'), + (0x5D0, 'V'), + (0x5EB, 'X'), + (0x5F0, 'V'), + (0x5F5, 'X'), + (0x606, 'V'), + (0x61C, 'X'), + (0x61E, 'V'), + (0x675, 'M', u'اٴ'), + (0x676, 'M', u'وٴ'), + (0x677, 'M', u'ۇٴ'), + (0x678, 'M', u'يٴ'), + (0x679, 'V'), + (0x6DD, 'X'), + (0x6DE, 'V'), + (0x70E, 'X'), + (0x710, 'V'), + (0x74B, 'X'), + (0x74D, 'V'), + (0x7B2, 'X'), + (0x7C0, 'V'), + (0x7FB, 'X'), + (0x800, 'V'), + (0x82E, 'X'), + (0x830, 'V'), + (0x83F, 'X'), + (0x840, 'V'), + (0x85C, 'X'), + (0x85E, 'V'), + (0x85F, 'X'), + (0x8A0, 'V'), + (0x8A1, 'X'), + (0x8A2, 'V'), + (0x8AD, 'X'), + (0x8E4, 'V'), + (0x8FF, 'X'), + (0x900, 'V'), + (0x958, 'M', u'क़'), + (0x959, 'M', u'ख़'), + (0x95A, 'M', u'ग़'), + (0x95B, 'M', u'ज़'), + (0x95C, 'M', u'ड़'), + (0x95D, 'M', u'ढ़'), + (0x95E, 'M', u'फ़'), + (0x95F, 'M', u'य़'), + (0x960, 'V'), + (0x978, 'X'), + (0x979, 'V'), + (0x980, 'X'), + (0x981, 'V'), + (0x984, 'X'), + (0x985, 'V'), + (0x98D, 'X'), + (0x98F, 'V'), + (0x991, 'X'), + (0x993, 'V'), + (0x9A9, 'X'), + (0x9AA, 'V'), + (0x9B1, 'X'), + (0x9B2, 'V'), + (0x9B3, 'X'), + (0x9B6, 'V'), + (0x9BA, 'X'), + (0x9BC, 'V'), + (0x9C5, 'X'), + (0x9C7, 'V'), + (0x9C9, 'X'), + (0x9CB, 'V'), + (0x9CF, 'X'), + (0x9D7, 'V'), + (0x9D8, 'X'), + (0x9DC, 'M', u'ড়'), + (0x9DD, 'M', u'ঢ়'), + (0x9DE, 'X'), + (0x9DF, 'M', u'য়'), + (0x9E0, 'V'), + (0x9E4, 'X'), + (0x9E6, 'V'), + (0x9FC, 'X'), + (0xA01, 'V'), + (0xA04, 'X'), + (0xA05, 'V'), + (0xA0B, 'X'), + (0xA0F, 'V'), + (0xA11, 'X'), + (0xA13, 'V'), + (0xA29, 'X'), + (0xA2A, 'V'), + (0xA31, 'X'), + (0xA32, 'V'), + (0xA33, 'M', u'ਲ਼'), + (0xA34, 'X'), + (0xA35, 'V'), + (0xA36, 'M', u'ਸ਼'), + (0xA37, 'X'), + (0xA38, 'V'), + (0xA3A, 'X'), + (0xA3C, 'V'), + (0xA3D, 'X'), + (0xA3E, 'V'), + (0xA43, 'X'), + (0xA47, 'V'), + (0xA49, 'X'), + (0xA4B, 'V'), + (0xA4E, 'X'), + (0xA51, 'V'), + (0xA52, 'X'), + (0xA59, 'M', u'ਖ਼'), + (0xA5A, 'M', u'ਗ਼'), + (0xA5B, 'M', u'ਜ਼'), + (0xA5C, 'V'), + (0xA5D, 'X'), + (0xA5E, 'M', u'ਫ਼'), + (0xA5F, 'X'), + (0xA66, 'V'), + (0xA76, 'X'), + (0xA81, 'V'), + (0xA84, 'X'), + (0xA85, 'V'), + (0xA8E, 'X'), + (0xA8F, 'V'), + (0xA92, 'X'), + (0xA93, 'V'), + (0xAA9, 'X'), + (0xAAA, 'V'), + (0xAB1, 'X'), + (0xAB2, 'V'), + (0xAB4, 'X'), + (0xAB5, 'V'), + (0xABA, 'X'), + (0xABC, 'V'), + (0xAC6, 'X'), + (0xAC7, 'V'), + (0xACA, 'X'), + (0xACB, 'V'), + (0xACE, 'X'), + (0xAD0, 'V'), + (0xAD1, 'X'), + (0xAE0, 'V'), + (0xAE4, 'X'), + (0xAE6, 'V'), + (0xAF2, 'X'), + (0xB01, 'V'), + (0xB04, 'X'), + (0xB05, 'V'), + (0xB0D, 'X'), + (0xB0F, 'V'), + (0xB11, 'X'), + (0xB13, 'V'), + (0xB29, 'X'), + (0xB2A, 'V'), + (0xB31, 'X'), + (0xB32, 'V'), + (0xB34, 'X'), + (0xB35, 'V'), + (0xB3A, 'X'), + (0xB3C, 'V'), + (0xB45, 'X'), + (0xB47, 'V'), + (0xB49, 'X'), + (0xB4B, 'V'), + (0xB4E, 'X'), + (0xB56, 'V'), + (0xB58, 'X'), + (0xB5C, 'M', u'ଡ଼'), + (0xB5D, 'M', u'ଢ଼'), + (0xB5E, 'X'), + (0xB5F, 'V'), + (0xB64, 'X'), + (0xB66, 'V'), + (0xB78, 'X'), + (0xB82, 'V'), + (0xB84, 'X'), + (0xB85, 'V'), + (0xB8B, 'X'), + (0xB8E, 'V'), + (0xB91, 'X'), + (0xB92, 'V'), + (0xB96, 'X'), + (0xB99, 'V'), + (0xB9B, 'X'), + (0xB9C, 'V'), + (0xB9D, 'X'), + (0xB9E, 'V'), + (0xBA0, 'X'), + (0xBA3, 'V'), + (0xBA5, 'X'), + (0xBA8, 'V'), + (0xBAB, 'X'), + (0xBAE, 'V'), + (0xBBA, 'X'), + (0xBBE, 'V'), + (0xBC3, 'X'), + (0xBC6, 'V'), + (0xBC9, 'X'), + (0xBCA, 'V'), + (0xBCE, 'X'), + (0xBD0, 'V'), + (0xBD1, 'X'), + (0xBD7, 'V'), + (0xBD8, 'X'), + (0xBE6, 'V'), + (0xBFB, 'X'), + (0xC01, 'V'), + (0xC04, 'X'), + (0xC05, 'V'), + (0xC0D, 'X'), + (0xC0E, 'V'), + (0xC11, 'X'), + (0xC12, 'V'), + (0xC29, 'X'), + (0xC2A, 'V'), + (0xC34, 'X'), + (0xC35, 'V'), + (0xC3A, 'X'), + (0xC3D, 'V'), + (0xC45, 'X'), + (0xC46, 'V'), + (0xC49, 'X'), + (0xC4A, 'V'), + (0xC4E, 'X'), + (0xC55, 'V'), + (0xC57, 'X'), + (0xC58, 'V'), + (0xC5A, 'X'), + (0xC60, 'V'), + (0xC64, 'X'), + (0xC66, 'V'), + (0xC70, 'X'), + (0xC78, 'V'), + (0xC80, 'X'), + (0xC82, 'V'), + (0xC84, 'X'), + (0xC85, 'V'), + (0xC8D, 'X'), + (0xC8E, 'V'), + (0xC91, 'X'), + (0xC92, 'V'), + (0xCA9, 'X'), + (0xCAA, 'V'), + (0xCB4, 'X'), + (0xCB5, 'V'), + (0xCBA, 'X'), + (0xCBC, 'V'), + (0xCC5, 'X'), + (0xCC6, 'V'), + (0xCC9, 'X'), + (0xCCA, 'V'), + (0xCCE, 'X'), + (0xCD5, 'V'), + (0xCD7, 'X'), + (0xCDE, 'V'), + (0xCDF, 'X'), + (0xCE0, 'V'), + (0xCE4, 'X'), + (0xCE6, 'V'), + (0xCF0, 'X'), + (0xCF1, 'V'), + (0xCF3, 'X'), + (0xD02, 'V'), + (0xD04, 'X'), + (0xD05, 'V'), + (0xD0D, 'X'), + (0xD0E, 'V'), + (0xD11, 'X'), + (0xD12, 'V'), + (0xD3B, 'X'), + (0xD3D, 'V'), + (0xD45, 'X'), + (0xD46, 'V'), + (0xD49, 'X'), + (0xD4A, 'V'), + (0xD4F, 'X'), + (0xD57, 'V'), + (0xD58, 'X'), + (0xD60, 'V'), + (0xD64, 'X'), + (0xD66, 'V'), + (0xD76, 'X'), + (0xD79, 'V'), + (0xD80, 'X'), + (0xD82, 'V'), + (0xD84, 'X'), + (0xD85, 'V'), + (0xD97, 'X'), + (0xD9A, 'V'), + (0xDB2, 'X'), + (0xDB3, 'V'), + (0xDBC, 'X'), + (0xDBD, 'V'), + (0xDBE, 'X'), + (0xDC0, 'V'), + (0xDC7, 'X'), + (0xDCA, 'V'), + (0xDCB, 'X'), + (0xDCF, 'V'), + (0xDD5, 'X'), + (0xDD6, 'V'), + (0xDD7, 'X'), + (0xDD8, 'V'), + (0xDE0, 'X'), + (0xDF2, 'V'), + (0xDF5, 'X'), + (0xE01, 'V'), + (0xE33, 'M', u'ํา'), + (0xE34, 'V'), + (0xE3B, 'X'), + (0xE3F, 'V'), + (0xE5C, 'X'), + (0xE81, 'V'), + (0xE83, 'X'), + (0xE84, 'V'), + (0xE85, 'X'), + (0xE87, 'V'), + (0xE89, 'X'), + (0xE8A, 'V'), + (0xE8B, 'X'), + (0xE8D, 'V'), + (0xE8E, 'X'), + (0xE94, 'V'), + (0xE98, 'X'), + (0xE99, 'V'), + (0xEA0, 'X'), + (0xEA1, 'V'), + (0xEA4, 'X'), + (0xEA5, 'V'), + (0xEA6, 'X'), + (0xEA7, 'V'), + (0xEA8, 'X'), + (0xEAA, 'V'), + (0xEAC, 'X'), + (0xEAD, 'V'), + (0xEB3, 'M', u'ໍາ'), + (0xEB4, 'V'), + (0xEBA, 'X'), + (0xEBB, 'V'), + (0xEBE, 'X'), + (0xEC0, 'V'), + (0xEC5, 'X'), + (0xEC6, 'V'), + (0xEC7, 'X'), + (0xEC8, 'V'), + (0xECE, 'X'), + (0xED0, 'V'), + (0xEDA, 'X'), + (0xEDC, 'M', u'ຫນ'), + (0xEDD, 'M', u'ຫມ'), + (0xEDE, 'V'), + (0xEE0, 'X'), + (0xF00, 'V'), + (0xF0C, 'M', u'་'), + (0xF0D, 'V'), + (0xF43, 'M', u'གྷ'), + (0xF44, 'V'), + (0xF48, 'X'), + (0xF49, 'V'), + (0xF4D, 'M', u'ཌྷ'), + (0xF4E, 'V'), + (0xF52, 'M', u'དྷ'), + (0xF53, 'V'), + (0xF57, 'M', u'བྷ'), + (0xF58, 'V'), + (0xF5C, 'M', u'ཛྷ'), + (0xF5D, 'V'), + (0xF69, 'M', u'ཀྵ'), + (0xF6A, 'V'), + (0xF6D, 'X'), + (0xF71, 'V'), + (0xF73, 'M', u'ཱི'), + (0xF74, 'V'), + (0xF75, 'M', u'ཱུ'), + (0xF76, 'M', u'ྲྀ'), + (0xF77, 'M', u'ྲཱྀ'), + (0xF78, 'M', u'ླྀ'), + (0xF79, 'M', u'ླཱྀ'), + (0xF7A, 'V'), + (0xF81, 'M', u'ཱྀ'), + (0xF82, 'V'), + (0xF93, 'M', u'ྒྷ'), + (0xF94, 'V'), + (0xF98, 'X'), + (0xF99, 'V'), + (0xF9D, 'M', u'ྜྷ'), + (0xF9E, 'V'), + (0xFA2, 'M', u'ྡྷ'), + (0xFA3, 'V'), + (0xFA7, 'M', u'ྦྷ'), + (0xFA8, 'V'), + (0xFAC, 'M', u'ྫྷ'), + (0xFAD, 'V'), + (0xFB9, 'M', u'ྐྵ'), + (0xFBA, 'V'), + (0xFBD, 'X'), + (0xFBE, 'V'), + (0xFCD, 'X'), + (0xFCE, 'V'), + (0xFDB, 'X'), + (0x1000, 'V'), + (0x10A0, 'X'), + (0x10C7, 'M', u'ⴧ'), + (0x10C8, 'X'), + (0x10CD, 'M', u'ⴭ'), + (0x10CE, 'X'), + (0x10D0, 'V'), + (0x10FC, 'M', u'ნ'), + (0x10FD, 'V'), + (0x115F, 'X'), + (0x1161, 'V'), + (0x1249, 'X'), + (0x124A, 'V'), + (0x124E, 'X'), + (0x1250, 'V'), + (0x1257, 'X'), + (0x1258, 'V'), + (0x1259, 'X'), + (0x125A, 'V'), + (0x125E, 'X'), + (0x1260, 'V'), + (0x1289, 'X'), + (0x128A, 'V'), + (0x128E, 'X'), + (0x1290, 'V'), + (0x12B1, 'X'), + (0x12B2, 'V'), + (0x12B6, 'X'), + (0x12B8, 'V'), + (0x12BF, 'X'), + (0x12C0, 'V'), + (0x12C1, 'X'), + (0x12C2, 'V'), + (0x12C6, 'X'), + (0x12C8, 'V'), + (0x12D7, 'X'), + (0x12D8, 'V'), + (0x1311, 'X'), + (0x1312, 'V'), + (0x1316, 'X'), + (0x1318, 'V'), + (0x135B, 'X'), + (0x135D, 'V'), + (0x137D, 'X'), + (0x1380, 'V'), + (0x139A, 'X'), + (0x13A0, 'V'), + (0x13F5, 'X'), + (0x1400, 'V'), + (0x1680, 'X'), + (0x1681, 'V'), + (0x169D, 'X'), + (0x16A0, 'V'), + (0x16F1, 'X'), + (0x1700, 'V'), + (0x170D, 'X'), + (0x170E, 'V'), + (0x1715, 'X'), + (0x1720, 'V'), + (0x1737, 'X'), + (0x1740, 'V'), + (0x1754, 'X'), + (0x1760, 'V'), + (0x176D, 'X'), + (0x176E, 'V'), + (0x1771, 'X'), + (0x1772, 'V'), + (0x1774, 'X'), + (0x1780, 'V'), + (0x17B4, 'X'), + (0x17B6, 'V'), + (0x17DE, 'X'), + (0x17E0, 'V'), + (0x17EA, 'X'), + (0x17F0, 'V'), + (0x17FA, 'X'), + (0x1800, 'V'), + (0x1806, 'X'), + (0x1807, 'V'), + (0x180B, 'I'), + (0x180E, 'X'), + (0x1810, 'V'), + (0x181A, 'X'), + (0x1820, 'V'), + (0x1878, 'X'), + (0x1880, 'V'), + (0x18AB, 'X'), + (0x18B0, 'V'), + (0x18F6, 'X'), + (0x1900, 'V'), + (0x191D, 'X'), + (0x1920, 'V'), + (0x192C, 'X'), + (0x1930, 'V'), + (0x193C, 'X'), + (0x1940, 'V'), + (0x1941, 'X'), + (0x1944, 'V'), + (0x196E, 'X'), + (0x1970, 'V'), + (0x1975, 'X'), + (0x1980, 'V'), + (0x19AC, 'X'), + (0x19B0, 'V'), + (0x19CA, 'X'), + (0x19D0, 'V'), + (0x19DB, 'X'), + (0x19DE, 'V'), + (0x1A1C, 'X'), + (0x1A1E, 'V'), + (0x1A5F, 'X'), + (0x1A60, 'V'), + (0x1A7D, 'X'), + (0x1A7F, 'V'), + (0x1A8A, 'X'), + (0x1A90, 'V'), + (0x1A9A, 'X'), + (0x1AA0, 'V'), + (0x1AAE, 'X'), + (0x1B00, 'V'), + (0x1B4C, 'X'), + (0x1B50, 'V'), + (0x1B7D, 'X'), + (0x1B80, 'V'), + (0x1BF4, 'X'), + (0x1BFC, 'V'), + (0x1C38, 'X'), + (0x1C3B, 'V'), + (0x1C4A, 'X'), + (0x1C4D, 'V'), + (0x1C80, 'X'), + (0x1CC0, 'V'), + (0x1CC8, 'X'), + (0x1CD0, 'V'), + (0x1CF7, 'X'), + (0x1D00, 'V'), + (0x1D2C, 'M', u'a'), + (0x1D2D, 'M', u'æ'), + (0x1D2E, 'M', u'b'), + (0x1D2F, 'V'), + (0x1D30, 'M', u'd'), + (0x1D31, 'M', u'e'), + (0x1D32, 'M', u'ǝ'), + (0x1D33, 'M', u'g'), + (0x1D34, 'M', u'h'), + (0x1D35, 'M', u'i'), + (0x1D36, 'M', u'j'), + (0x1D37, 'M', u'k'), + (0x1D38, 'M', u'l'), + (0x1D39, 'M', u'm'), + (0x1D3A, 'M', u'n'), + (0x1D3B, 'V'), + (0x1D3C, 'M', u'o'), + (0x1D3D, 'M', u'ȣ'), + (0x1D3E, 'M', u'p'), + (0x1D3F, 'M', u'r'), + (0x1D40, 'M', u't'), + (0x1D41, 'M', u'u'), + (0x1D42, 'M', u'w'), + (0x1D43, 'M', u'a'), + (0x1D44, 'M', u'ɐ'), + (0x1D45, 'M', u'ɑ'), + (0x1D46, 'M', u'ᴂ'), + (0x1D47, 'M', u'b'), + (0x1D48, 'M', u'd'), + (0x1D49, 'M', u'e'), + (0x1D4A, 'M', u'ə'), + (0x1D4B, 'M', u'ɛ'), + (0x1D4C, 'M', u'ɜ'), + (0x1D4D, 'M', u'g'), + (0x1D4E, 'V'), + (0x1D4F, 'M', u'k'), + (0x1D50, 'M', u'm'), + (0x1D51, 'M', u'ŋ'), + (0x1D52, 'M', u'o'), + (0x1D53, 'M', u'ɔ'), + (0x1D54, 'M', u'ᴖ'), + (0x1D55, 'M', u'ᴗ'), + (0x1D56, 'M', u'p'), + (0x1D57, 'M', u't'), + (0x1D58, 'M', u'u'), + (0x1D59, 'M', u'ᴝ'), + (0x1D5A, 'M', u'ɯ'), + (0x1D5B, 'M', u'v'), + (0x1D5C, 'M', u'ᴥ'), + (0x1D5D, 'M', u'β'), + (0x1D5E, 'M', u'γ'), + (0x1D5F, 'M', u'δ'), + (0x1D60, 'M', u'φ'), + (0x1D61, 'M', u'χ'), + (0x1D62, 'M', u'i'), + (0x1D63, 'M', u'r'), + (0x1D64, 'M', u'u'), + (0x1D65, 'M', u'v'), + (0x1D66, 'M', u'β'), + (0x1D67, 'M', u'γ'), + (0x1D68, 'M', u'ρ'), + (0x1D69, 'M', u'φ'), + (0x1D6A, 'M', u'χ'), + (0x1D6B, 'V'), + (0x1D78, 'M', u'н'), + (0x1D79, 'V'), + (0x1D9B, 'M', u'ɒ'), + (0x1D9C, 'M', u'c'), + (0x1D9D, 'M', u'ɕ'), + (0x1D9E, 'M', u'ð'), + (0x1D9F, 'M', u'ɜ'), + (0x1DA0, 'M', u'f'), + (0x1DA1, 'M', u'ɟ'), + (0x1DA2, 'M', u'ɡ'), + (0x1DA3, 'M', u'ɥ'), + (0x1DA4, 'M', u'ɨ'), + (0x1DA5, 'M', u'ɩ'), + (0x1DA6, 'M', u'ɪ'), + (0x1DA7, 'M', u'ᵻ'), + (0x1DA8, 'M', u'ʝ'), + (0x1DA9, 'M', u'ɭ'), + (0x1DAA, 'M', u'ᶅ'), + (0x1DAB, 'M', u'ʟ'), + (0x1DAC, 'M', u'ɱ'), + (0x1DAD, 'M', u'ɰ'), + (0x1DAE, 'M', u'ɲ'), + (0x1DAF, 'M', u'ɳ'), + (0x1DB0, 'M', u'ɴ'), + (0x1DB1, 'M', u'ɵ'), + (0x1DB2, 'M', u'ɸ'), + (0x1DB3, 'M', u'ʂ'), + (0x1DB4, 'M', u'ʃ'), + (0x1DB5, 'M', u'ƫ'), + (0x1DB6, 'M', u'ʉ'), + (0x1DB7, 'M', u'ʊ'), + (0x1DB8, 'M', u'ᴜ'), + (0x1DB9, 'M', u'ʋ'), + (0x1DBA, 'M', u'ʌ'), + (0x1DBB, 'M', u'z'), + (0x1DBC, 'M', u'ʐ'), + (0x1DBD, 'M', u'ʑ'), + (0x1DBE, 'M', u'ʒ'), + (0x1DBF, 'M', u'θ'), + (0x1DC0, 'V'), + (0x1DE7, 'X'), + (0x1DFC, 'V'), + (0x1E00, 'M', u'ḁ'), + (0x1E01, 'V'), + (0x1E02, 'M', u'ḃ'), + (0x1E03, 'V'), + (0x1E04, 'M', u'ḅ'), + (0x1E05, 'V'), + (0x1E06, 'M', u'ḇ'), + (0x1E07, 'V'), + (0x1E08, 'M', u'ḉ'), + (0x1E09, 'V'), + (0x1E0A, 'M', u'ḋ'), + (0x1E0B, 'V'), + (0x1E0C, 'M', u'ḍ'), + (0x1E0D, 'V'), + (0x1E0E, 'M', u'ḏ'), + (0x1E0F, 'V'), + (0x1E10, 'M', u'ḑ'), + (0x1E11, 'V'), + (0x1E12, 'M', u'ḓ'), + (0x1E13, 'V'), + (0x1E14, 'M', u'ḕ'), + (0x1E15, 'V'), + (0x1E16, 'M', u'ḗ'), + (0x1E17, 'V'), + (0x1E18, 'M', u'ḙ'), + (0x1E19, 'V'), + (0x1E1A, 'M', u'ḛ'), + (0x1E1B, 'V'), + (0x1E1C, 'M', u'ḝ'), + (0x1E1D, 'V'), + (0x1E1E, 'M', u'ḟ'), + (0x1E1F, 'V'), + (0x1E20, 'M', u'ḡ'), + (0x1E21, 'V'), + (0x1E22, 'M', u'ḣ'), + (0x1E23, 'V'), + (0x1E24, 'M', u'ḥ'), + (0x1E25, 'V'), + (0x1E26, 'M', u'ḧ'), + (0x1E27, 'V'), + (0x1E28, 'M', u'ḩ'), + (0x1E29, 'V'), + (0x1E2A, 'M', u'ḫ'), + (0x1E2B, 'V'), + (0x1E2C, 'M', u'ḭ'), + (0x1E2D, 'V'), + (0x1E2E, 'M', u'ḯ'), + (0x1E2F, 'V'), + (0x1E30, 'M', u'ḱ'), + (0x1E31, 'V'), + (0x1E32, 'M', u'ḳ'), + (0x1E33, 'V'), + (0x1E34, 'M', u'ḵ'), + (0x1E35, 'V'), + (0x1E36, 'M', u'ḷ'), + (0x1E37, 'V'), + (0x1E38, 'M', u'ḹ'), + (0x1E39, 'V'), + (0x1E3A, 'M', u'ḻ'), + (0x1E3B, 'V'), + (0x1E3C, 'M', u'ḽ'), + (0x1E3D, 'V'), + (0x1E3E, 'M', u'ḿ'), + (0x1E3F, 'V'), + (0x1E40, 'M', u'ṁ'), + (0x1E41, 'V'), + (0x1E42, 'M', u'ṃ'), + (0x1E43, 'V'), + (0x1E44, 'M', u'ṅ'), + (0x1E45, 'V'), + (0x1E46, 'M', u'ṇ'), + (0x1E47, 'V'), + (0x1E48, 'M', u'ṉ'), + (0x1E49, 'V'), + (0x1E4A, 'M', u'ṋ'), + (0x1E4B, 'V'), + (0x1E4C, 'M', u'ṍ'), + (0x1E4D, 'V'), + (0x1E4E, 'M', u'ṏ'), + (0x1E4F, 'V'), + (0x1E50, 'M', u'ṑ'), + (0x1E51, 'V'), + (0x1E52, 'M', u'ṓ'), + (0x1E53, 'V'), + (0x1E54, 'M', u'ṕ'), + (0x1E55, 'V'), + (0x1E56, 'M', u'ṗ'), + (0x1E57, 'V'), + (0x1E58, 'M', u'ṙ'), + (0x1E59, 'V'), + (0x1E5A, 'M', u'ṛ'), + (0x1E5B, 'V'), + (0x1E5C, 'M', u'ṝ'), + (0x1E5D, 'V'), + (0x1E5E, 'M', u'ṟ'), + (0x1E5F, 'V'), + (0x1E60, 'M', u'ṡ'), + (0x1E61, 'V'), + (0x1E62, 'M', u'ṣ'), + (0x1E63, 'V'), + (0x1E64, 'M', u'ṥ'), + (0x1E65, 'V'), + (0x1E66, 'M', u'ṧ'), + (0x1E67, 'V'), + (0x1E68, 'M', u'ṩ'), + (0x1E69, 'V'), + (0x1E6A, 'M', u'ṫ'), + (0x1E6B, 'V'), + (0x1E6C, 'M', u'ṭ'), + (0x1E6D, 'V'), + (0x1E6E, 'M', u'ṯ'), + (0x1E6F, 'V'), + (0x1E70, 'M', u'ṱ'), + (0x1E71, 'V'), + (0x1E72, 'M', u'ṳ'), + (0x1E73, 'V'), + (0x1E74, 'M', u'ṵ'), + (0x1E75, 'V'), + (0x1E76, 'M', u'ṷ'), + (0x1E77, 'V'), + (0x1E78, 'M', u'ṹ'), + (0x1E79, 'V'), + (0x1E7A, 'M', u'ṻ'), + (0x1E7B, 'V'), + (0x1E7C, 'M', u'ṽ'), + (0x1E7D, 'V'), + (0x1E7E, 'M', u'ṿ'), + (0x1E7F, 'V'), + (0x1E80, 'M', u'ẁ'), + (0x1E81, 'V'), + (0x1E82, 'M', u'ẃ'), + (0x1E83, 'V'), + (0x1E84, 'M', u'ẅ'), + (0x1E85, 'V'), + (0x1E86, 'M', u'ẇ'), + (0x1E87, 'V'), + (0x1E88, 'M', u'ẉ'), + (0x1E89, 'V'), + (0x1E8A, 'M', u'ẋ'), + (0x1E8B, 'V'), + (0x1E8C, 'M', u'ẍ'), + (0x1E8D, 'V'), + (0x1E8E, 'M', u'ẏ'), + (0x1E8F, 'V'), + (0x1E90, 'M', u'ẑ'), + (0x1E91, 'V'), + (0x1E92, 'M', u'ẓ'), + (0x1E93, 'V'), + (0x1E94, 'M', u'ẕ'), + (0x1E95, 'V'), + (0x1E9A, 'M', u'aʾ'), + (0x1E9B, 'M', u'ṡ'), + (0x1E9C, 'V'), + (0x1E9E, 'M', u'ss'), + (0x1E9F, 'V'), + (0x1EA0, 'M', u'ạ'), + (0x1EA1, 'V'), + (0x1EA2, 'M', u'ả'), + (0x1EA3, 'V'), + (0x1EA4, 'M', u'ấ'), + (0x1EA5, 'V'), + (0x1EA6, 'M', u'ầ'), + (0x1EA7, 'V'), + (0x1EA8, 'M', u'ẩ'), + (0x1EA9, 'V'), + (0x1EAA, 'M', u'ẫ'), + (0x1EAB, 'V'), + (0x1EAC, 'M', u'ậ'), + (0x1EAD, 'V'), + (0x1EAE, 'M', u'ắ'), + (0x1EAF, 'V'), + (0x1EB0, 'M', u'ằ'), + (0x1EB1, 'V'), + (0x1EB2, 'M', u'ẳ'), + (0x1EB3, 'V'), + (0x1EB4, 'M', u'ẵ'), + (0x1EB5, 'V'), + (0x1EB6, 'M', u'ặ'), + (0x1EB7, 'V'), + (0x1EB8, 'M', u'ẹ'), + (0x1EB9, 'V'), + (0x1EBA, 'M', u'ẻ'), + (0x1EBB, 'V'), + (0x1EBC, 'M', u'ẽ'), + (0x1EBD, 'V'), + (0x1EBE, 'M', u'ế'), + (0x1EBF, 'V'), + (0x1EC0, 'M', u'ề'), + (0x1EC1, 'V'), + (0x1EC2, 'M', u'ể'), + (0x1EC3, 'V'), + (0x1EC4, 'M', u'ễ'), + (0x1EC5, 'V'), + (0x1EC6, 'M', u'ệ'), + (0x1EC7, 'V'), + (0x1EC8, 'M', u'ỉ'), + (0x1EC9, 'V'), + (0x1ECA, 'M', u'ị'), + (0x1ECB, 'V'), + (0x1ECC, 'M', u'ọ'), + (0x1ECD, 'V'), + (0x1ECE, 'M', u'ỏ'), + (0x1ECF, 'V'), + (0x1ED0, 'M', u'ố'), + (0x1ED1, 'V'), + (0x1ED2, 'M', u'ồ'), + (0x1ED3, 'V'), + (0x1ED4, 'M', u'ổ'), + (0x1ED5, 'V'), + (0x1ED6, 'M', u'ỗ'), + (0x1ED7, 'V'), + (0x1ED8, 'M', u'ộ'), + (0x1ED9, 'V'), + (0x1EDA, 'M', u'ớ'), + (0x1EDB, 'V'), + (0x1EDC, 'M', u'ờ'), + (0x1EDD, 'V'), + (0x1EDE, 'M', u'ở'), + (0x1EDF, 'V'), + (0x1EE0, 'M', u'ỡ'), + (0x1EE1, 'V'), + (0x1EE2, 'M', u'ợ'), + (0x1EE3, 'V'), + (0x1EE4, 'M', u'ụ'), + (0x1EE5, 'V'), + (0x1EE6, 'M', u'ủ'), + (0x1EE7, 'V'), + (0x1EE8, 'M', u'ứ'), + (0x1EE9, 'V'), + (0x1EEA, 'M', u'ừ'), + (0x1EEB, 'V'), + (0x1EEC, 'M', u'ử'), + (0x1EED, 'V'), + (0x1EEE, 'M', u'ữ'), + (0x1EEF, 'V'), + (0x1EF0, 'M', u'ự'), + (0x1EF1, 'V'), + (0x1EF2, 'M', u'ỳ'), + (0x1EF3, 'V'), + (0x1EF4, 'M', u'ỵ'), + (0x1EF5, 'V'), + (0x1EF6, 'M', u'ỷ'), + (0x1EF7, 'V'), + (0x1EF8, 'M', u'ỹ'), + (0x1EF9, 'V'), + (0x1EFA, 'M', u'ỻ'), + (0x1EFB, 'V'), + (0x1EFC, 'M', u'ỽ'), + (0x1EFD, 'V'), + (0x1EFE, 'M', u'ỿ'), + (0x1EFF, 'V'), + (0x1F08, 'M', u'ἀ'), + (0x1F09, 'M', u'ἁ'), + (0x1F0A, 'M', u'ἂ'), + (0x1F0B, 'M', u'ἃ'), + (0x1F0C, 'M', u'ἄ'), + (0x1F0D, 'M', u'ἅ'), + (0x1F0E, 'M', u'ἆ'), + (0x1F0F, 'M', u'ἇ'), + (0x1F10, 'V'), + (0x1F16, 'X'), + (0x1F18, 'M', u'ἐ'), + (0x1F19, 'M', u'ἑ'), + (0x1F1A, 'M', u'ἒ'), + (0x1F1B, 'M', u'ἓ'), + (0x1F1C, 'M', u'ἔ'), + (0x1F1D, 'M', u'ἕ'), + (0x1F1E, 'X'), + (0x1F20, 'V'), + (0x1F28, 'M', u'ἠ'), + (0x1F29, 'M', u'ἡ'), + (0x1F2A, 'M', u'ἢ'), + (0x1F2B, 'M', u'ἣ'), + (0x1F2C, 'M', u'ἤ'), + (0x1F2D, 'M', u'ἥ'), + (0x1F2E, 'M', u'ἦ'), + (0x1F2F, 'M', u'ἧ'), + (0x1F30, 'V'), + (0x1F38, 'M', u'ἰ'), + (0x1F39, 'M', u'ἱ'), + (0x1F3A, 'M', u'ἲ'), + (0x1F3B, 'M', u'ἳ'), + (0x1F3C, 'M', u'ἴ'), + (0x1F3D, 'M', u'ἵ'), + (0x1F3E, 'M', u'ἶ'), + (0x1F3F, 'M', u'ἷ'), + (0x1F40, 'V'), + (0x1F46, 'X'), + (0x1F48, 'M', u'ὀ'), + (0x1F49, 'M', u'ὁ'), + (0x1F4A, 'M', u'ὂ'), + (0x1F4B, 'M', u'ὃ'), + (0x1F4C, 'M', u'ὄ'), + (0x1F4D, 'M', u'ὅ'), + (0x1F4E, 'X'), + (0x1F50, 'V'), + (0x1F58, 'X'), + (0x1F59, 'M', u'ὑ'), + (0x1F5A, 'X'), + (0x1F5B, 'M', u'ὓ'), + (0x1F5C, 'X'), + (0x1F5D, 'M', u'ὕ'), + (0x1F5E, 'X'), + (0x1F5F, 'M', u'ὗ'), + (0x1F60, 'V'), + (0x1F68, 'M', u'ὠ'), + (0x1F69, 'M', u'ὡ'), + (0x1F6A, 'M', u'ὢ'), + (0x1F6B, 'M', u'ὣ'), + (0x1F6C, 'M', u'ὤ'), + (0x1F6D, 'M', u'ὥ'), + (0x1F6E, 'M', u'ὦ'), + (0x1F6F, 'M', u'ὧ'), + (0x1F70, 'V'), + (0x1F71, 'M', u'ά'), + (0x1F72, 'V'), + (0x1F73, 'M', u'έ'), + (0x1F74, 'V'), + (0x1F75, 'M', u'ή'), + (0x1F76, 'V'), + (0x1F77, 'M', u'ί'), + (0x1F78, 'V'), + (0x1F79, 'M', u'ό'), + (0x1F7A, 'V'), + (0x1F7B, 'M', u'ύ'), + (0x1F7C, 'V'), + (0x1F7D, 'M', u'ώ'), + (0x1F7E, 'X'), + (0x1F80, 'M', u'ἀι'), + (0x1F81, 'M', u'ἁι'), + (0x1F82, 'M', u'ἂι'), + (0x1F83, 'M', u'ἃι'), + (0x1F84, 'M', u'ἄι'), + (0x1F85, 'M', u'ἅι'), + (0x1F86, 'M', u'ἆι'), + (0x1F87, 'M', u'ἇι'), + (0x1F88, 'M', u'ἀι'), + (0x1F89, 'M', u'ἁι'), + (0x1F8A, 'M', u'ἂι'), + (0x1F8B, 'M', u'ἃι'), + (0x1F8C, 'M', u'ἄι'), + (0x1F8D, 'M', u'ἅι'), + (0x1F8E, 'M', u'ἆι'), + (0x1F8F, 'M', u'ἇι'), + (0x1F90, 'M', u'ἠι'), + (0x1F91, 'M', u'ἡι'), + (0x1F92, 'M', u'ἢι'), + (0x1F93, 'M', u'ἣι'), + (0x1F94, 'M', u'ἤι'), + (0x1F95, 'M', u'ἥι'), + (0x1F96, 'M', u'ἦι'), + (0x1F97, 'M', u'ἧι'), + (0x1F98, 'M', u'ἠι'), + (0x1F99, 'M', u'ἡι'), + (0x1F9A, 'M', u'ἢι'), + (0x1F9B, 'M', u'ἣι'), + (0x1F9C, 'M', u'ἤι'), + (0x1F9D, 'M', u'ἥι'), + (0x1F9E, 'M', u'ἦι'), + (0x1F9F, 'M', u'ἧι'), + (0x1FA0, 'M', u'ὠι'), + (0x1FA1, 'M', u'ὡι'), + (0x1FA2, 'M', u'ὢι'), + (0x1FA3, 'M', u'ὣι'), + (0x1FA4, 'M', u'ὤι'), + (0x1FA5, 'M', u'ὥι'), + (0x1FA6, 'M', u'ὦι'), + (0x1FA7, 'M', u'ὧι'), + (0x1FA8, 'M', u'ὠι'), + (0x1FA9, 'M', u'ὡι'), + (0x1FAA, 'M', u'ὢι'), + (0x1FAB, 'M', u'ὣι'), + (0x1FAC, 'M', u'ὤι'), + (0x1FAD, 'M', u'ὥι'), + (0x1FAE, 'M', u'ὦι'), + (0x1FAF, 'M', u'ὧι'), + (0x1FB0, 'V'), + (0x1FB2, 'M', u'ὰι'), + (0x1FB3, 'M', u'αι'), + (0x1FB4, 'M', u'άι'), + (0x1FB5, 'X'), + (0x1FB6, 'V'), + (0x1FB7, 'M', u'ᾶι'), + (0x1FB8, 'M', u'ᾰ'), + (0x1FB9, 'M', u'ᾱ'), + (0x1FBA, 'M', u'ὰ'), + (0x1FBB, 'M', u'ά'), + (0x1FBC, 'M', u'αι'), + (0x1FBD, '3', u' ̓'), + (0x1FBE, 'M', u'ι'), + (0x1FBF, '3', u' ̓'), + (0x1FC0, '3', u' ͂'), + (0x1FC1, '3', u' ̈͂'), + (0x1FC2, 'M', u'ὴι'), + (0x1FC3, 'M', u'ηι'), + (0x1FC4, 'M', u'ήι'), + (0x1FC5, 'X'), + (0x1FC6, 'V'), + (0x1FC7, 'M', u'ῆι'), + (0x1FC8, 'M', u'ὲ'), + (0x1FC9, 'M', u'έ'), + (0x1FCA, 'M', u'ὴ'), + (0x1FCB, 'M', u'ή'), + (0x1FCC, 'M', u'ηι'), + (0x1FCD, '3', u' ̓̀'), + (0x1FCE, '3', u' ̓́'), + (0x1FCF, '3', u' ̓͂'), + (0x1FD0, 'V'), + (0x1FD3, 'M', u'ΐ'), + (0x1FD4, 'X'), + (0x1FD6, 'V'), + (0x1FD8, 'M', u'ῐ'), + (0x1FD9, 'M', u'ῑ'), + (0x1FDA, 'M', u'ὶ'), + (0x1FDB, 'M', u'ί'), + (0x1FDC, 'X'), + (0x1FDD, '3', u' ̔̀'), + (0x1FDE, '3', u' ̔́'), + (0x1FDF, '3', u' ̔͂'), + (0x1FE0, 'V'), + (0x1FE3, 'M', u'ΰ'), + (0x1FE4, 'V'), + (0x1FE8, 'M', u'ῠ'), + (0x1FE9, 'M', u'ῡ'), + (0x1FEA, 'M', u'ὺ'), + (0x1FEB, 'M', u'ύ'), + (0x1FEC, 'M', u'ῥ'), + (0x1FED, '3', u' ̈̀'), + (0x1FEE, '3', u' ̈́'), + (0x1FEF, '3', u'`'), + (0x1FF0, 'X'), + (0x1FF2, 'M', u'ὼι'), + (0x1FF3, 'M', u'ωι'), + (0x1FF4, 'M', u'ώι'), + (0x1FF5, 'X'), + (0x1FF6, 'V'), + (0x1FF7, 'M', u'ῶι'), + (0x1FF8, 'M', u'ὸ'), + (0x1FF9, 'M', u'ό'), + (0x1FFA, 'M', u'ὼ'), + (0x1FFB, 'M', u'ώ'), + (0x1FFC, 'M', u'ωι'), + (0x1FFD, '3', u' ́'), + (0x1FFE, '3', u' ̔'), + (0x1FFF, 'X'), + (0x2000, '3', u' '), + (0x200B, 'I'), + (0x200C, 'D', u''), + (0x200E, 'X'), + (0x2010, 'V'), + (0x2011, 'M', u'‐'), + (0x2012, 'V'), + (0x2017, '3', u' ̳'), + (0x2018, 'V'), + (0x2024, 'X'), + (0x2027, 'V'), + (0x2028, 'X'), + (0x202F, '3', u' '), + (0x2030, 'V'), + (0x2033, 'M', u'′′'), + (0x2034, 'M', u'′′′'), + (0x2035, 'V'), + (0x2036, 'M', u'‵‵'), + (0x2037, 'M', u'‵‵‵'), + (0x2038, 'V'), + (0x203C, '3', u'!!'), + (0x203D, 'V'), + (0x203E, '3', u' ̅'), + (0x203F, 'V'), + (0x2047, '3', u'??'), + (0x2048, '3', u'?!'), + (0x2049, '3', u'!?'), + (0x204A, 'V'), + (0x2057, 'M', u'′′′′'), + (0x2058, 'V'), + (0x205F, '3', u' '), + (0x2060, 'I'), + (0x2061, 'X'), + (0x2064, 'I'), + (0x2065, 'X'), + (0x2070, 'M', u'0'), + (0x2071, 'M', u'i'), + (0x2072, 'X'), + (0x2074, 'M', u'4'), + (0x2075, 'M', u'5'), + (0x2076, 'M', u'6'), + (0x2077, 'M', u'7'), + (0x2078, 'M', u'8'), + (0x2079, 'M', u'9'), + (0x207A, '3', u'+'), + (0x207B, 'M', u'−'), + (0x207C, '3', u'='), + (0x207D, '3', u'('), + (0x207E, '3', u')'), + (0x207F, 'M', u'n'), + (0x2080, 'M', u'0'), + (0x2081, 'M', u'1'), + (0x2082, 'M', u'2'), + (0x2083, 'M', u'3'), + (0x2084, 'M', u'4'), + (0x2085, 'M', u'5'), + (0x2086, 'M', u'6'), + (0x2087, 'M', u'7'), + (0x2088, 'M', u'8'), + (0x2089, 'M', u'9'), + (0x208A, '3', u'+'), + (0x208B, 'M', u'−'), + (0x208C, '3', u'='), + (0x208D, '3', u'('), + (0x208E, '3', u')'), + (0x208F, 'X'), + (0x2090, 'M', u'a'), + (0x2091, 'M', u'e'), + (0x2092, 'M', u'o'), + (0x2093, 'M', u'x'), + (0x2094, 'M', u'ə'), + (0x2095, 'M', u'h'), + (0x2096, 'M', u'k'), + (0x2097, 'M', u'l'), + (0x2098, 'M', u'm'), + (0x2099, 'M', u'n'), + (0x209A, 'M', u'p'), + (0x209B, 'M', u's'), + (0x209C, 'M', u't'), + (0x209D, 'X'), + (0x20A0, 'V'), + (0x20A8, 'M', u'rs'), + (0x20A9, 'V'), + (0x20BB, 'X'), + (0x20D0, 'V'), + (0x20F1, 'X'), + (0x2100, '3', u'a/c'), + (0x2101, '3', u'a/s'), + (0x2102, 'M', u'c'), + (0x2103, 'M', u'°c'), + (0x2104, 'V'), + (0x2105, '3', u'c/o'), + (0x2106, '3', u'c/u'), + (0x2107, 'M', u'ɛ'), + (0x2108, 'V'), + (0x2109, 'M', u'°f'), + (0x210A, 'M', u'g'), + (0x210B, 'M', u'h'), + (0x210F, 'M', u'ħ'), + (0x2110, 'M', u'i'), + (0x2112, 'M', u'l'), + (0x2114, 'V'), + (0x2115, 'M', u'n'), + (0x2116, 'M', u'no'), + (0x2117, 'V'), + (0x2119, 'M', u'p'), + (0x211A, 'M', u'q'), + (0x211B, 'M', u'r'), + (0x211E, 'V'), + (0x2120, 'M', u'sm'), + (0x2121, 'M', u'tel'), + (0x2122, 'M', u'tm'), + (0x2123, 'V'), + (0x2124, 'M', u'z'), + (0x2125, 'V'), + (0x2126, 'M', u'ω'), + (0x2127, 'V'), + (0x2128, 'M', u'z'), + (0x2129, 'V'), + (0x212A, 'M', u'k'), + (0x212B, 'M', u'å'), + (0x212C, 'M', u'b'), + (0x212D, 'M', u'c'), + (0x212E, 'V'), + (0x212F, 'M', u'e'), + (0x2131, 'M', u'f'), + (0x2132, 'X'), + (0x2133, 'M', u'm'), + (0x2134, 'M', u'o'), + (0x2135, 'M', u'א'), + (0x2136, 'M', u'ב'), + (0x2137, 'M', u'ג'), + (0x2138, 'M', u'ד'), + (0x2139, 'M', u'i'), + (0x213A, 'V'), + (0x213B, 'M', u'fax'), + (0x213C, 'M', u'π'), + (0x213D, 'M', u'γ'), + (0x213F, 'M', u'π'), + (0x2140, 'M', u'∑'), + (0x2141, 'V'), + (0x2145, 'M', u'd'), + (0x2147, 'M', u'e'), + (0x2148, 'M', u'i'), + (0x2149, 'M', u'j'), + (0x214A, 'V'), + (0x2150, 'M', u'1⁄7'), + (0x2151, 'M', u'1⁄9'), + (0x2152, 'M', u'1⁄10'), + (0x2153, 'M', u'1⁄3'), + (0x2154, 'M', u'2⁄3'), + (0x2155, 'M', u'1⁄5'), + (0x2156, 'M', u'2⁄5'), + (0x2157, 'M', u'3⁄5'), + (0x2158, 'M', u'4⁄5'), + (0x2159, 'M', u'1⁄6'), + (0x215A, 'M', u'5⁄6'), + (0x215B, 'M', u'1⁄8'), + (0x215C, 'M', u'3⁄8'), + (0x215D, 'M', u'5⁄8'), + (0x215E, 'M', u'7⁄8'), + (0x215F, 'M', u'1⁄'), + (0x2160, 'M', u'i'), + (0x2161, 'M', u'ii'), + (0x2162, 'M', u'iii'), + (0x2163, 'M', u'iv'), + (0x2164, 'M', u'v'), + (0x2165, 'M', u'vi'), + (0x2166, 'M', u'vii'), + (0x2167, 'M', u'viii'), + (0x2168, 'M', u'ix'), + (0x2169, 'M', u'x'), + (0x216A, 'M', u'xi'), + (0x216B, 'M', u'xii'), + (0x216C, 'M', u'l'), + (0x216D, 'M', u'c'), + (0x216E, 'M', u'd'), + (0x216F, 'M', u'm'), + (0x2170, 'M', u'i'), + (0x2171, 'M', u'ii'), + (0x2172, 'M', u'iii'), + (0x2173, 'M', u'iv'), + (0x2174, 'M', u'v'), + (0x2175, 'M', u'vi'), + (0x2176, 'M', u'vii'), + (0x2177, 'M', u'viii'), + (0x2178, 'M', u'ix'), + (0x2179, 'M', u'x'), + (0x217A, 'M', u'xi'), + (0x217B, 'M', u'xii'), + (0x217C, 'M', u'l'), + (0x217D, 'M', u'c'), + (0x217E, 'M', u'd'), + (0x217F, 'M', u'm'), + (0x2180, 'V'), + (0x2183, 'X'), + (0x2184, 'V'), + (0x2189, 'M', u'0⁄3'), + (0x218A, 'X'), + (0x2190, 'V'), + (0x222C, 'M', u'∫∫'), + (0x222D, 'M', u'∫∫∫'), + (0x222E, 'V'), + (0x222F, 'M', u'∮∮'), + (0x2230, 'M', u'∮∮∮'), + (0x2231, 'V'), + (0x2260, '3'), + (0x2261, 'V'), + (0x226E, '3'), + (0x2270, 'V'), + (0x2329, 'M', u'〈'), + (0x232A, 'M', u'〉'), + (0x232B, 'V'), + (0x23F4, 'X'), + (0x2400, 'V'), + (0x2427, 'X'), + (0x2440, 'V'), + (0x244B, 'X'), + (0x2460, 'M', u'1'), + (0x2461, 'M', u'2'), + (0x2462, 'M', u'3'), + (0x2463, 'M', u'4'), + (0x2464, 'M', u'5'), + (0x2465, 'M', u'6'), + (0x2466, 'M', u'7'), + (0x2467, 'M', u'8'), + (0x2468, 'M', u'9'), + (0x2469, 'M', u'10'), + (0x246A, 'M', u'11'), + (0x246B, 'M', u'12'), + (0x246C, 'M', u'13'), + (0x246D, 'M', u'14'), + (0x246E, 'M', u'15'), + (0x246F, 'M', u'16'), + (0x2470, 'M', u'17'), + (0x2471, 'M', u'18'), + (0x2472, 'M', u'19'), + (0x2473, 'M', u'20'), + (0x2474, '3', u'(1)'), + (0x2475, '3', u'(2)'), + (0x2476, '3', u'(3)'), + (0x2477, '3', u'(4)'), + (0x2478, '3', u'(5)'), + (0x2479, '3', u'(6)'), + (0x247A, '3', u'(7)'), + (0x247B, '3', u'(8)'), + (0x247C, '3', u'(9)'), + (0x247D, '3', u'(10)'), + (0x247E, '3', u'(11)'), + (0x247F, '3', u'(12)'), + (0x2480, '3', u'(13)'), + (0x2481, '3', u'(14)'), + (0x2482, '3', u'(15)'), + (0x2483, '3', u'(16)'), + (0x2484, '3', u'(17)'), + (0x2485, '3', u'(18)'), + (0x2486, '3', u'(19)'), + (0x2487, '3', u'(20)'), + (0x2488, 'X'), + (0x249C, '3', u'(a)'), + (0x249D, '3', u'(b)'), + (0x249E, '3', u'(c)'), + (0x249F, '3', u'(d)'), + (0x24A0, '3', u'(e)'), + (0x24A1, '3', u'(f)'), + (0x24A2, '3', u'(g)'), + (0x24A3, '3', u'(h)'), + (0x24A4, '3', u'(i)'), + (0x24A5, '3', u'(j)'), + (0x24A6, '3', u'(k)'), + (0x24A7, '3', u'(l)'), + (0x24A8, '3', u'(m)'), + (0x24A9, '3', u'(n)'), + (0x24AA, '3', u'(o)'), + (0x24AB, '3', u'(p)'), + (0x24AC, '3', u'(q)'), + (0x24AD, '3', u'(r)'), + (0x24AE, '3', u'(s)'), + (0x24AF, '3', u'(t)'), + (0x24B0, '3', u'(u)'), + (0x24B1, '3', u'(v)'), + (0x24B2, '3', u'(w)'), + (0x24B3, '3', u'(x)'), + (0x24B4, '3', u'(y)'), + (0x24B5, '3', u'(z)'), + (0x24B6, 'M', u'a'), + (0x24B7, 'M', u'b'), + (0x24B8, 'M', u'c'), + (0x24B9, 'M', u'd'), + (0x24BA, 'M', u'e'), + (0x24BB, 'M', u'f'), + (0x24BC, 'M', u'g'), + (0x24BD, 'M', u'h'), + (0x24BE, 'M', u'i'), + (0x24BF, 'M', u'j'), + (0x24C0, 'M', u'k'), + (0x24C1, 'M', u'l'), + (0x24C2, 'M', u'm'), + (0x24C3, 'M', u'n'), + (0x24C4, 'M', u'o'), + (0x24C5, 'M', u'p'), + (0x24C6, 'M', u'q'), + (0x24C7, 'M', u'r'), + (0x24C8, 'M', u's'), + (0x24C9, 'M', u't'), + (0x24CA, 'M', u'u'), + (0x24CB, 'M', u'v'), + (0x24CC, 'M', u'w'), + (0x24CD, 'M', u'x'), + (0x24CE, 'M', u'y'), + (0x24CF, 'M', u'z'), + (0x24D0, 'M', u'a'), + (0x24D1, 'M', u'b'), + (0x24D2, 'M', u'c'), + (0x24D3, 'M', u'd'), + (0x24D4, 'M', u'e'), + (0x24D5, 'M', u'f'), + (0x24D6, 'M', u'g'), + (0x24D7, 'M', u'h'), + (0x24D8, 'M', u'i'), + (0x24D9, 'M', u'j'), + (0x24DA, 'M', u'k'), + (0x24DB, 'M', u'l'), + (0x24DC, 'M', u'm'), + (0x24DD, 'M', u'n'), + (0x24DE, 'M', u'o'), + (0x24DF, 'M', u'p'), + (0x24E0, 'M', u'q'), + (0x24E1, 'M', u'r'), + (0x24E2, 'M', u's'), + (0x24E3, 'M', u't'), + (0x24E4, 'M', u'u'), + (0x24E5, 'M', u'v'), + (0x24E6, 'M', u'w'), + (0x24E7, 'M', u'x'), + (0x24E8, 'M', u'y'), + (0x24E9, 'M', u'z'), + (0x24EA, 'M', u'0'), + (0x24EB, 'V'), + (0x2700, 'X'), + (0x2701, 'V'), + (0x2A0C, 'M', u'∫∫∫∫'), + (0x2A0D, 'V'), + (0x2A74, '3', u'::='), + (0x2A75, '3', u'=='), + (0x2A76, '3', u'==='), + (0x2A77, 'V'), + (0x2ADC, 'M', u'⫝̸'), + (0x2ADD, 'V'), + (0x2B4D, 'X'), + (0x2B50, 'V'), + (0x2B5A, 'X'), + (0x2C00, 'M', u'ⰰ'), + (0x2C01, 'M', u'ⰱ'), + (0x2C02, 'M', u'ⰲ'), + (0x2C03, 'M', u'ⰳ'), + (0x2C04, 'M', u'ⰴ'), + (0x2C05, 'M', u'ⰵ'), + (0x2C06, 'M', u'ⰶ'), + (0x2C07, 'M', u'ⰷ'), + (0x2C08, 'M', u'ⰸ'), + (0x2C09, 'M', u'ⰹ'), + (0x2C0A, 'M', u'ⰺ'), + (0x2C0B, 'M', u'ⰻ'), + (0x2C0C, 'M', u'ⰼ'), + (0x2C0D, 'M', u'ⰽ'), + (0x2C0E, 'M', u'ⰾ'), + (0x2C0F, 'M', u'ⰿ'), + (0x2C10, 'M', u'ⱀ'), + (0x2C11, 'M', u'ⱁ'), + (0x2C12, 'M', u'ⱂ'), + (0x2C13, 'M', u'ⱃ'), + (0x2C14, 'M', u'ⱄ'), + (0x2C15, 'M', u'ⱅ'), + (0x2C16, 'M', u'ⱆ'), + (0x2C17, 'M', u'ⱇ'), + (0x2C18, 'M', u'ⱈ'), + (0x2C19, 'M', u'ⱉ'), + (0x2C1A, 'M', u'ⱊ'), + (0x2C1B, 'M', u'ⱋ'), + (0x2C1C, 'M', u'ⱌ'), + (0x2C1D, 'M', u'ⱍ'), + (0x2C1E, 'M', u'ⱎ'), + (0x2C1F, 'M', u'ⱏ'), + (0x2C20, 'M', u'ⱐ'), + (0x2C21, 'M', u'ⱑ'), + (0x2C22, 'M', u'ⱒ'), + (0x2C23, 'M', u'ⱓ'), + (0x2C24, 'M', u'ⱔ'), + (0x2C25, 'M', u'ⱕ'), + (0x2C26, 'M', u'ⱖ'), + (0x2C27, 'M', u'ⱗ'), + (0x2C28, 'M', u'ⱘ'), + (0x2C29, 'M', u'ⱙ'), + (0x2C2A, 'M', u'ⱚ'), + (0x2C2B, 'M', u'ⱛ'), + (0x2C2C, 'M', u'ⱜ'), + (0x2C2D, 'M', u'ⱝ'), + (0x2C2E, 'M', u'ⱞ'), + (0x2C2F, 'X'), + (0x2C30, 'V'), + (0x2C5F, 'X'), + (0x2C60, 'M', u'ⱡ'), + (0x2C61, 'V'), + (0x2C62, 'M', u'ɫ'), + (0x2C63, 'M', u'ᵽ'), + (0x2C64, 'M', u'ɽ'), + (0x2C65, 'V'), + (0x2C67, 'M', u'ⱨ'), + (0x2C68, 'V'), + (0x2C69, 'M', u'ⱪ'), + (0x2C6A, 'V'), + (0x2C6B, 'M', u'ⱬ'), + (0x2C6C, 'V'), + (0x2C6D, 'M', u'ɑ'), + (0x2C6E, 'M', u'ɱ'), + (0x2C6F, 'M', u'ɐ'), + (0x2C70, 'M', u'ɒ'), + (0x2C71, 'V'), + (0x2C72, 'M', u'ⱳ'), + (0x2C73, 'V'), + (0x2C75, 'M', u'ⱶ'), + (0x2C76, 'V'), + (0x2C7C, 'M', u'j'), + (0x2C7D, 'M', u'v'), + (0x2C7E, 'M', u'ȿ'), + (0x2C7F, 'M', u'ɀ'), + (0x2C80, 'M', u'ⲁ'), + (0x2C81, 'V'), + (0x2C82, 'M', u'ⲃ'), + (0x2C83, 'V'), + (0x2C84, 'M', u'ⲅ'), + (0x2C85, 'V'), + (0x2C86, 'M', u'ⲇ'), + (0x2C87, 'V'), + (0x2C88, 'M', u'ⲉ'), + (0x2C89, 'V'), + (0x2C8A, 'M', u'ⲋ'), + (0x2C8B, 'V'), + (0x2C8C, 'M', u'ⲍ'), + (0x2C8D, 'V'), + (0x2C8E, 'M', u'ⲏ'), + (0x2C8F, 'V'), + (0x2C90, 'M', u'ⲑ'), + (0x2C91, 'V'), + (0x2C92, 'M', u'ⲓ'), + (0x2C93, 'V'), + (0x2C94, 'M', u'ⲕ'), + (0x2C95, 'V'), + (0x2C96, 'M', u'ⲗ'), + (0x2C97, 'V'), + (0x2C98, 'M', u'ⲙ'), + (0x2C99, 'V'), + (0x2C9A, 'M', u'ⲛ'), + (0x2C9B, 'V'), + (0x2C9C, 'M', u'ⲝ'), + (0x2C9D, 'V'), + (0x2C9E, 'M', u'ⲟ'), + (0x2C9F, 'V'), + (0x2CA0, 'M', u'ⲡ'), + (0x2CA1, 'V'), + (0x2CA2, 'M', u'ⲣ'), + (0x2CA3, 'V'), + (0x2CA4, 'M', u'ⲥ'), + (0x2CA5, 'V'), + (0x2CA6, 'M', u'ⲧ'), + (0x2CA7, 'V'), + (0x2CA8, 'M', u'ⲩ'), + (0x2CA9, 'V'), + (0x2CAA, 'M', u'ⲫ'), + (0x2CAB, 'V'), + (0x2CAC, 'M', u'ⲭ'), + (0x2CAD, 'V'), + (0x2CAE, 'M', u'ⲯ'), + (0x2CAF, 'V'), + (0x2CB0, 'M', u'ⲱ'), + (0x2CB1, 'V'), + (0x2CB2, 'M', u'ⲳ'), + (0x2CB3, 'V'), + (0x2CB4, 'M', u'ⲵ'), + (0x2CB5, 'V'), + (0x2CB6, 'M', u'ⲷ'), + (0x2CB7, 'V'), + (0x2CB8, 'M', u'ⲹ'), + (0x2CB9, 'V'), + (0x2CBA, 'M', u'ⲻ'), + (0x2CBB, 'V'), + (0x2CBC, 'M', u'ⲽ'), + (0x2CBD, 'V'), + (0x2CBE, 'M', u'ⲿ'), + (0x2CBF, 'V'), + (0x2CC0, 'M', u'ⳁ'), + (0x2CC1, 'V'), + (0x2CC2, 'M', u'ⳃ'), + (0x2CC3, 'V'), + (0x2CC4, 'M', u'ⳅ'), + (0x2CC5, 'V'), + (0x2CC6, 'M', u'ⳇ'), + (0x2CC7, 'V'), + (0x2CC8, 'M', u'ⳉ'), + (0x2CC9, 'V'), + (0x2CCA, 'M', u'ⳋ'), + (0x2CCB, 'V'), + (0x2CCC, 'M', u'ⳍ'), + (0x2CCD, 'V'), + (0x2CCE, 'M', u'ⳏ'), + (0x2CCF, 'V'), + (0x2CD0, 'M', u'ⳑ'), + (0x2CD1, 'V'), + (0x2CD2, 'M', u'ⳓ'), + (0x2CD3, 'V'), + (0x2CD4, 'M', u'ⳕ'), + (0x2CD5, 'V'), + (0x2CD6, 'M', u'ⳗ'), + (0x2CD7, 'V'), + (0x2CD8, 'M', u'ⳙ'), + (0x2CD9, 'V'), + (0x2CDA, 'M', u'ⳛ'), + (0x2CDB, 'V'), + (0x2CDC, 'M', u'ⳝ'), + (0x2CDD, 'V'), + (0x2CDE, 'M', u'ⳟ'), + (0x2CDF, 'V'), + (0x2CE0, 'M', u'ⳡ'), + (0x2CE1, 'V'), + (0x2CE2, 'M', u'ⳣ'), + (0x2CE3, 'V'), + (0x2CEB, 'M', u'ⳬ'), + (0x2CEC, 'V'), + (0x2CED, 'M', u'ⳮ'), + (0x2CEE, 'V'), + (0x2CF2, 'M', u'ⳳ'), + (0x2CF3, 'V'), + (0x2CF4, 'X'), + (0x2CF9, 'V'), + (0x2D26, 'X'), + (0x2D27, 'V'), + (0x2D28, 'X'), + (0x2D2D, 'V'), + (0x2D2E, 'X'), + (0x2D30, 'V'), + (0x2D68, 'X'), + (0x2D6F, 'M', u'ⵡ'), + (0x2D70, 'V'), + (0x2D71, 'X'), + (0x2D7F, 'V'), + (0x2D97, 'X'), + (0x2DA0, 'V'), + (0x2DA7, 'X'), + (0x2DA8, 'V'), + (0x2DAF, 'X'), + (0x2DB0, 'V'), + (0x2DB7, 'X'), + (0x2DB8, 'V'), + (0x2DBF, 'X'), + (0x2DC0, 'V'), + (0x2DC7, 'X'), + (0x2DC8, 'V'), + (0x2DCF, 'X'), + (0x2DD0, 'V'), + (0x2DD7, 'X'), + (0x2DD8, 'V'), + (0x2DDF, 'X'), + (0x2DE0, 'V'), + (0x2E3C, 'X'), + (0x2E80, 'V'), + (0x2E9A, 'X'), + (0x2E9B, 'V'), + (0x2E9F, 'M', u'母'), + (0x2EA0, 'V'), + (0x2EF3, 'M', u'龟'), + (0x2EF4, 'X'), + (0x2F00, 'M', u'一'), + (0x2F01, 'M', u'丨'), + (0x2F02, 'M', u'丶'), + (0x2F03, 'M', u'丿'), + (0x2F04, 'M', u'乙'), + (0x2F05, 'M', u'亅'), + (0x2F06, 'M', u'二'), + (0x2F07, 'M', u'亠'), + (0x2F08, 'M', u'人'), + (0x2F09, 'M', u'儿'), + (0x2F0A, 'M', u'入'), + (0x2F0B, 'M', u'八'), + (0x2F0C, 'M', u'冂'), + (0x2F0D, 'M', u'冖'), + (0x2F0E, 'M', u'冫'), + (0x2F0F, 'M', u'几'), + (0x2F10, 'M', u'凵'), + (0x2F11, 'M', u'刀'), + (0x2F12, 'M', u'力'), + (0x2F13, 'M', u'勹'), + (0x2F14, 'M', u'匕'), + (0x2F15, 'M', u'匚'), + (0x2F16, 'M', u'匸'), + (0x2F17, 'M', u'十'), + (0x2F18, 'M', u'卜'), + (0x2F19, 'M', u'卩'), + (0x2F1A, 'M', u'厂'), + (0x2F1B, 'M', u'厶'), + (0x2F1C, 'M', u'又'), + (0x2F1D, 'M', u'口'), + (0x2F1E, 'M', u'囗'), + (0x2F1F, 'M', u'土'), + (0x2F20, 'M', u'士'), + (0x2F21, 'M', u'夂'), + (0x2F22, 'M', u'夊'), + (0x2F23, 'M', u'夕'), + (0x2F24, 'M', u'大'), + (0x2F25, 'M', u'女'), + (0x2F26, 'M', u'子'), + (0x2F27, 'M', u'宀'), + (0x2F28, 'M', u'寸'), + (0x2F29, 'M', u'小'), + (0x2F2A, 'M', u'尢'), + (0x2F2B, 'M', u'尸'), + (0x2F2C, 'M', u'屮'), + (0x2F2D, 'M', u'山'), + (0x2F2E, 'M', u'巛'), + (0x2F2F, 'M', u'工'), + (0x2F30, 'M', u'己'), + (0x2F31, 'M', u'巾'), + (0x2F32, 'M', u'干'), + (0x2F33, 'M', u'幺'), + (0x2F34, 'M', u'广'), + (0x2F35, 'M', u'廴'), + (0x2F36, 'M', u'廾'), + (0x2F37, 'M', u'弋'), + (0x2F38, 'M', u'弓'), + (0x2F39, 'M', u'彐'), + (0x2F3A, 'M', u'彡'), + (0x2F3B, 'M', u'彳'), + (0x2F3C, 'M', u'心'), + (0x2F3D, 'M', u'戈'), + (0x2F3E, 'M', u'戶'), + (0x2F3F, 'M', u'手'), + (0x2F40, 'M', u'支'), + (0x2F41, 'M', u'攴'), + (0x2F42, 'M', u'文'), + (0x2F43, 'M', u'斗'), + (0x2F44, 'M', u'斤'), + (0x2F45, 'M', u'方'), + (0x2F46, 'M', u'无'), + (0x2F47, 'M', u'日'), + (0x2F48, 'M', u'曰'), + (0x2F49, 'M', u'月'), + (0x2F4A, 'M', u'木'), + (0x2F4B, 'M', u'欠'), + (0x2F4C, 'M', u'止'), + (0x2F4D, 'M', u'歹'), + (0x2F4E, 'M', u'殳'), + (0x2F4F, 'M', u'毋'), + (0x2F50, 'M', u'比'), + (0x2F51, 'M', u'毛'), + (0x2F52, 'M', u'氏'), + (0x2F53, 'M', u'气'), + (0x2F54, 'M', u'水'), + (0x2F55, 'M', u'火'), + (0x2F56, 'M', u'爪'), + (0x2F57, 'M', u'父'), + (0x2F58, 'M', u'爻'), + (0x2F59, 'M', u'爿'), + (0x2F5A, 'M', u'片'), + (0x2F5B, 'M', u'牙'), + (0x2F5C, 'M', u'牛'), + (0x2F5D, 'M', u'犬'), + (0x2F5E, 'M', u'玄'), + (0x2F5F, 'M', u'玉'), + (0x2F60, 'M', u'瓜'), + (0x2F61, 'M', u'瓦'), + (0x2F62, 'M', u'甘'), + (0x2F63, 'M', u'生'), + (0x2F64, 'M', u'用'), + (0x2F65, 'M', u'田'), + (0x2F66, 'M', u'疋'), + (0x2F67, 'M', u'疒'), + (0x2F68, 'M', u'癶'), + (0x2F69, 'M', u'白'), + (0x2F6A, 'M', u'皮'), + (0x2F6B, 'M', u'皿'), + (0x2F6C, 'M', u'目'), + (0x2F6D, 'M', u'矛'), + (0x2F6E, 'M', u'矢'), + (0x2F6F, 'M', u'石'), + (0x2F70, 'M', u'示'), + (0x2F71, 'M', u'禸'), + (0x2F72, 'M', u'禾'), + (0x2F73, 'M', u'穴'), + (0x2F74, 'M', u'立'), + (0x2F75, 'M', u'竹'), + (0x2F76, 'M', u'米'), + (0x2F77, 'M', u'糸'), + (0x2F78, 'M', u'缶'), + (0x2F79, 'M', u'网'), + (0x2F7A, 'M', u'羊'), + (0x2F7B, 'M', u'羽'), + (0x2F7C, 'M', u'老'), + (0x2F7D, 'M', u'而'), + (0x2F7E, 'M', u'耒'), + (0x2F7F, 'M', u'耳'), + (0x2F80, 'M', u'聿'), + (0x2F81, 'M', u'肉'), + (0x2F82, 'M', u'臣'), + (0x2F83, 'M', u'自'), + (0x2F84, 'M', u'至'), + (0x2F85, 'M', u'臼'), + (0x2F86, 'M', u'舌'), + (0x2F87, 'M', u'舛'), + (0x2F88, 'M', u'舟'), + (0x2F89, 'M', u'艮'), + (0x2F8A, 'M', u'色'), + (0x2F8B, 'M', u'艸'), + (0x2F8C, 'M', u'虍'), + (0x2F8D, 'M', u'虫'), + (0x2F8E, 'M', u'血'), + (0x2F8F, 'M', u'行'), + (0x2F90, 'M', u'衣'), + (0x2F91, 'M', u'襾'), + (0x2F92, 'M', u'見'), + (0x2F93, 'M', u'角'), + (0x2F94, 'M', u'言'), + (0x2F95, 'M', u'谷'), + (0x2F96, 'M', u'豆'), + (0x2F97, 'M', u'豕'), + (0x2F98, 'M', u'豸'), + (0x2F99, 'M', u'貝'), + (0x2F9A, 'M', u'赤'), + (0x2F9B, 'M', u'走'), + (0x2F9C, 'M', u'足'), + (0x2F9D, 'M', u'身'), + (0x2F9E, 'M', u'車'), + (0x2F9F, 'M', u'辛'), + (0x2FA0, 'M', u'辰'), + (0x2FA1, 'M', u'辵'), + (0x2FA2, 'M', u'邑'), + (0x2FA3, 'M', u'酉'), + (0x2FA4, 'M', u'釆'), + (0x2FA5, 'M', u'里'), + (0x2FA6, 'M', u'金'), + (0x2FA7, 'M', u'長'), + (0x2FA8, 'M', u'門'), + (0x2FA9, 'M', u'阜'), + (0x2FAA, 'M', u'隶'), + (0x2FAB, 'M', u'隹'), + (0x2FAC, 'M', u'雨'), + (0x2FAD, 'M', u'靑'), + (0x2FAE, 'M', u'非'), + (0x2FAF, 'M', u'面'), + (0x2FB0, 'M', u'革'), + (0x2FB1, 'M', u'韋'), + (0x2FB2, 'M', u'韭'), + (0x2FB3, 'M', u'音'), + (0x2FB4, 'M', u'頁'), + (0x2FB5, 'M', u'風'), + (0x2FB6, 'M', u'飛'), + (0x2FB7, 'M', u'食'), + (0x2FB8, 'M', u'首'), + (0x2FB9, 'M', u'香'), + (0x2FBA, 'M', u'馬'), + (0x2FBB, 'M', u'骨'), + (0x2FBC, 'M', u'高'), + (0x2FBD, 'M', u'髟'), + (0x2FBE, 'M', u'鬥'), + (0x2FBF, 'M', u'鬯'), + (0x2FC0, 'M', u'鬲'), + (0x2FC1, 'M', u'鬼'), + (0x2FC2, 'M', u'魚'), + (0x2FC3, 'M', u'鳥'), + (0x2FC4, 'M', u'鹵'), + (0x2FC5, 'M', u'鹿'), + (0x2FC6, 'M', u'麥'), + (0x2FC7, 'M', u'麻'), + (0x2FC8, 'M', u'黃'), + (0x2FC9, 'M', u'黍'), + (0x2FCA, 'M', u'黑'), + (0x2FCB, 'M', u'黹'), + (0x2FCC, 'M', u'黽'), + (0x2FCD, 'M', u'鼎'), + (0x2FCE, 'M', u'鼓'), + (0x2FCF, 'M', u'鼠'), + (0x2FD0, 'M', u'鼻'), + (0x2FD1, 'M', u'齊'), + (0x2FD2, 'M', u'齒'), + (0x2FD3, 'M', u'龍'), + (0x2FD4, 'M', u'龜'), + (0x2FD5, 'M', u'龠'), + (0x2FD6, 'X'), + (0x3000, '3', u' '), + (0x3001, 'V'), + (0x3002, 'M', u'.'), + (0x3003, 'V'), + (0x3036, 'M', u'〒'), + (0x3037, 'V'), + (0x3038, 'M', u'十'), + (0x3039, 'M', u'卄'), + (0x303A, 'M', u'卅'), + (0x303B, 'V'), + (0x3040, 'X'), + (0x3041, 'V'), + (0x3097, 'X'), + (0x3099, 'V'), + (0x309B, '3', u' ゙'), + (0x309C, '3', u' ゚'), + (0x309D, 'V'), + (0x309F, 'M', u'より'), + (0x30A0, 'V'), + (0x30FF, 'M', u'コト'), + (0x3100, 'X'), + (0x3105, 'V'), + (0x312E, 'X'), + (0x3131, 'M', u'ᄀ'), + (0x3132, 'M', u'ᄁ'), + (0x3133, 'M', u'ᆪ'), + (0x3134, 'M', u'ᄂ'), + (0x3135, 'M', u'ᆬ'), + (0x3136, 'M', u'ᆭ'), + (0x3137, 'M', u'ᄃ'), + (0x3138, 'M', u'ᄄ'), + (0x3139, 'M', u'ᄅ'), + (0x313A, 'M', u'ᆰ'), + (0x313B, 'M', u'ᆱ'), + (0x313C, 'M', u'ᆲ'), + (0x313D, 'M', u'ᆳ'), + (0x313E, 'M', u'ᆴ'), + (0x313F, 'M', u'ᆵ'), + (0x3140, 'M', u'ᄚ'), + (0x3141, 'M', u'ᄆ'), + (0x3142, 'M', u'ᄇ'), + (0x3143, 'M', u'ᄈ'), + (0x3144, 'M', u'ᄡ'), + (0x3145, 'M', u'ᄉ'), + (0x3146, 'M', u'ᄊ'), + (0x3147, 'M', u'ᄋ'), + (0x3148, 'M', u'ᄌ'), + (0x3149, 'M', u'ᄍ'), + (0x314A, 'M', u'ᄎ'), + (0x314B, 'M', u'ᄏ'), + (0x314C, 'M', u'ᄐ'), + (0x314D, 'M', u'ᄑ'), + (0x314E, 'M', u'ᄒ'), + (0x314F, 'M', u'ᅡ'), + (0x3150, 'M', u'ᅢ'), + (0x3151, 'M', u'ᅣ'), + (0x3152, 'M', u'ᅤ'), + (0x3153, 'M', u'ᅥ'), + (0x3154, 'M', u'ᅦ'), + (0x3155, 'M', u'ᅧ'), + (0x3156, 'M', u'ᅨ'), + (0x3157, 'M', u'ᅩ'), + (0x3158, 'M', u'ᅪ'), + (0x3159, 'M', u'ᅫ'), + (0x315A, 'M', u'ᅬ'), + (0x315B, 'M', u'ᅭ'), + (0x315C, 'M', u'ᅮ'), + (0x315D, 'M', u'ᅯ'), + (0x315E, 'M', u'ᅰ'), + (0x315F, 'M', u'ᅱ'), + (0x3160, 'M', u'ᅲ'), + (0x3161, 'M', u'ᅳ'), + (0x3162, 'M', u'ᅴ'), + (0x3163, 'M', u'ᅵ'), + (0x3164, 'X'), + (0x3165, 'M', u'ᄔ'), + (0x3166, 'M', u'ᄕ'), + (0x3167, 'M', u'ᇇ'), + (0x3168, 'M', u'ᇈ'), + (0x3169, 'M', u'ᇌ'), + (0x316A, 'M', u'ᇎ'), + (0x316B, 'M', u'ᇓ'), + (0x316C, 'M', u'ᇗ'), + (0x316D, 'M', u'ᇙ'), + (0x316E, 'M', u'ᄜ'), + (0x316F, 'M', u'ᇝ'), + (0x3170, 'M', u'ᇟ'), + (0x3171, 'M', u'ᄝ'), + (0x3172, 'M', u'ᄞ'), + (0x3173, 'M', u'ᄠ'), + (0x3174, 'M', u'ᄢ'), + (0x3175, 'M', u'ᄣ'), + (0x3176, 'M', u'ᄧ'), + (0x3177, 'M', u'ᄩ'), + (0x3178, 'M', u'ᄫ'), + (0x3179, 'M', u'ᄬ'), + (0x317A, 'M', u'ᄭ'), + (0x317B, 'M', u'ᄮ'), + (0x317C, 'M', u'ᄯ'), + (0x317D, 'M', u'ᄲ'), + (0x317E, 'M', u'ᄶ'), + (0x317F, 'M', u'ᅀ'), + (0x3180, 'M', u'ᅇ'), + (0x3181, 'M', u'ᅌ'), + (0x3182, 'M', u'ᇱ'), + (0x3183, 'M', u'ᇲ'), + (0x3184, 'M', u'ᅗ'), + (0x3185, 'M', u'ᅘ'), + (0x3186, 'M', u'ᅙ'), + (0x3187, 'M', u'ᆄ'), + (0x3188, 'M', u'ᆅ'), + (0x3189, 'M', u'ᆈ'), + (0x318A, 'M', u'ᆑ'), + (0x318B, 'M', u'ᆒ'), + (0x318C, 'M', u'ᆔ'), + (0x318D, 'M', u'ᆞ'), + (0x318E, 'M', u'ᆡ'), + (0x318F, 'X'), + (0x3190, 'V'), + (0x3192, 'M', u'一'), + (0x3193, 'M', u'二'), + (0x3194, 'M', u'三'), + (0x3195, 'M', u'四'), + (0x3196, 'M', u'上'), + (0x3197, 'M', u'中'), + (0x3198, 'M', u'下'), + (0x3199, 'M', u'甲'), + (0x319A, 'M', u'乙'), + (0x319B, 'M', u'丙'), + (0x319C, 'M', u'丁'), + (0x319D, 'M', u'天'), + (0x319E, 'M', u'地'), + (0x319F, 'M', u'人'), + (0x31A0, 'V'), + (0x31BB, 'X'), + (0x31C0, 'V'), + (0x31E4, 'X'), + (0x31F0, 'V'), + (0x3200, '3', u'(ᄀ)'), + (0x3201, '3', u'(ᄂ)'), + (0x3202, '3', u'(ᄃ)'), + (0x3203, '3', u'(ᄅ)'), + (0x3204, '3', u'(ᄆ)'), + (0x3205, '3', u'(ᄇ)'), + (0x3206, '3', u'(ᄉ)'), + (0x3207, '3', u'(ᄋ)'), + (0x3208, '3', u'(ᄌ)'), + (0x3209, '3', u'(ᄎ)'), + (0x320A, '3', u'(ᄏ)'), + (0x320B, '3', u'(ᄐ)'), + (0x320C, '3', u'(ᄑ)'), + (0x320D, '3', u'(ᄒ)'), + (0x320E, '3', u'(가)'), + (0x320F, '3', u'(나)'), + (0x3210, '3', u'(다)'), + (0x3211, '3', u'(라)'), + (0x3212, '3', u'(마)'), + (0x3213, '3', u'(바)'), + (0x3214, '3', u'(사)'), + (0x3215, '3', u'(아)'), + (0x3216, '3', u'(자)'), + (0x3217, '3', u'(차)'), + (0x3218, '3', u'(카)'), + (0x3219, '3', u'(타)'), + (0x321A, '3', u'(파)'), + (0x321B, '3', u'(하)'), + (0x321C, '3', u'(주)'), + (0x321D, '3', u'(오전)'), + (0x321E, '3', u'(오후)'), + (0x321F, 'X'), + (0x3220, '3', u'(一)'), + (0x3221, '3', u'(二)'), + (0x3222, '3', u'(三)'), + (0x3223, '3', u'(四)'), + (0x3224, '3', u'(五)'), + (0x3225, '3', u'(六)'), + (0x3226, '3', u'(七)'), + (0x3227, '3', u'(八)'), + (0x3228, '3', u'(九)'), + (0x3229, '3', u'(十)'), + (0x322A, '3', u'(月)'), + (0x322B, '3', u'(火)'), + (0x322C, '3', u'(水)'), + (0x322D, '3', u'(木)'), + (0x322E, '3', u'(金)'), + (0x322F, '3', u'(土)'), + (0x3230, '3', u'(日)'), + (0x3231, '3', u'(株)'), + (0x3232, '3', u'(有)'), + (0x3233, '3', u'(社)'), + (0x3234, '3', u'(名)'), + (0x3235, '3', u'(特)'), + (0x3236, '3', u'(財)'), + (0x3237, '3', u'(祝)'), + (0x3238, '3', u'(労)'), + (0x3239, '3', u'(代)'), + (0x323A, '3', u'(呼)'), + (0x323B, '3', u'(学)'), + (0x323C, '3', u'(監)'), + (0x323D, '3', u'(企)'), + (0x323E, '3', u'(資)'), + (0x323F, '3', u'(協)'), + (0x3240, '3', u'(祭)'), + (0x3241, '3', u'(休)'), + (0x3242, '3', u'(自)'), + (0x3243, '3', u'(至)'), + (0x3244, 'M', u'問'), + (0x3245, 'M', u'幼'), + (0x3246, 'M', u'文'), + (0x3247, 'M', u'箏'), + (0x3248, 'V'), + (0x3250, 'M', u'pte'), + (0x3251, 'M', u'21'), + (0x3252, 'M', u'22'), + (0x3253, 'M', u'23'), + (0x3254, 'M', u'24'), + (0x3255, 'M', u'25'), + (0x3256, 'M', u'26'), + (0x3257, 'M', u'27'), + (0x3258, 'M', u'28'), + (0x3259, 'M', u'29'), + (0x325A, 'M', u'30'), + (0x325B, 'M', u'31'), + (0x325C, 'M', u'32'), + (0x325D, 'M', u'33'), + (0x325E, 'M', u'34'), + (0x325F, 'M', u'35'), + (0x3260, 'M', u'ᄀ'), + (0x3261, 'M', u'ᄂ'), + (0x3262, 'M', u'ᄃ'), + (0x3263, 'M', u'ᄅ'), + (0x3264, 'M', u'ᄆ'), + (0x3265, 'M', u'ᄇ'), + (0x3266, 'M', u'ᄉ'), + (0x3267, 'M', u'ᄋ'), + (0x3268, 'M', u'ᄌ'), + (0x3269, 'M', u'ᄎ'), + (0x326A, 'M', u'ᄏ'), + (0x326B, 'M', u'ᄐ'), + (0x326C, 'M', u'ᄑ'), + (0x326D, 'M', u'ᄒ'), + (0x326E, 'M', u'가'), + (0x326F, 'M', u'나'), + (0x3270, 'M', u'다'), + (0x3271, 'M', u'라'), + (0x3272, 'M', u'마'), + (0x3273, 'M', u'바'), + (0x3274, 'M', u'사'), + (0x3275, 'M', u'아'), + (0x3276, 'M', u'자'), + (0x3277, 'M', u'차'), + (0x3278, 'M', u'카'), + (0x3279, 'M', u'타'), + (0x327A, 'M', u'파'), + (0x327B, 'M', u'하'), + (0x327C, 'M', u'참고'), + (0x327D, 'M', u'주의'), + (0x327E, 'M', u'우'), + (0x327F, 'V'), + (0x3280, 'M', u'一'), + (0x3281, 'M', u'二'), + (0x3282, 'M', u'三'), + (0x3283, 'M', u'四'), + (0x3284, 'M', u'五'), + (0x3285, 'M', u'六'), + (0x3286, 'M', u'七'), + (0x3287, 'M', u'八'), + (0x3288, 'M', u'九'), + (0x3289, 'M', u'十'), + (0x328A, 'M', u'月'), + (0x328B, 'M', u'火'), + (0x328C, 'M', u'水'), + (0x328D, 'M', u'木'), + (0x328E, 'M', u'金'), + (0x328F, 'M', u'土'), + (0x3290, 'M', u'日'), + (0x3291, 'M', u'株'), + (0x3292, 'M', u'有'), + (0x3293, 'M', u'社'), + (0x3294, 'M', u'名'), + (0x3295, 'M', u'特'), + (0x3296, 'M', u'財'), + (0x3297, 'M', u'祝'), + (0x3298, 'M', u'労'), + (0x3299, 'M', u'秘'), + (0x329A, 'M', u'男'), + (0x329B, 'M', u'女'), + (0x329C, 'M', u'適'), + (0x329D, 'M', u'優'), + (0x329E, 'M', u'印'), + (0x329F, 'M', u'注'), + (0x32A0, 'M', u'項'), + (0x32A1, 'M', u'休'), + (0x32A2, 'M', u'写'), + (0x32A3, 'M', u'正'), + (0x32A4, 'M', u'上'), + (0x32A5, 'M', u'中'), + (0x32A6, 'M', u'下'), + (0x32A7, 'M', u'左'), + (0x32A8, 'M', u'右'), + (0x32A9, 'M', u'医'), + (0x32AA, 'M', u'宗'), + (0x32AB, 'M', u'学'), + (0x32AC, 'M', u'監'), + (0x32AD, 'M', u'企'), + (0x32AE, 'M', u'資'), + (0x32AF, 'M', u'協'), + (0x32B0, 'M', u'夜'), + (0x32B1, 'M', u'36'), + (0x32B2, 'M', u'37'), + (0x32B3, 'M', u'38'), + (0x32B4, 'M', u'39'), + (0x32B5, 'M', u'40'), + (0x32B6, 'M', u'41'), + (0x32B7, 'M', u'42'), + (0x32B8, 'M', u'43'), + (0x32B9, 'M', u'44'), + (0x32BA, 'M', u'45'), + (0x32BB, 'M', u'46'), + (0x32BC, 'M', u'47'), + (0x32BD, 'M', u'48'), + (0x32BE, 'M', u'49'), + (0x32BF, 'M', u'50'), + (0x32C0, 'M', u'1月'), + (0x32C1, 'M', u'2月'), + (0x32C2, 'M', u'3月'), + (0x32C3, 'M', u'4月'), + (0x32C4, 'M', u'5月'), + (0x32C5, 'M', u'6月'), + (0x32C6, 'M', u'7月'), + (0x32C7, 'M', u'8月'), + (0x32C8, 'M', u'9月'), + (0x32C9, 'M', u'10月'), + (0x32CA, 'M', u'11月'), + (0x32CB, 'M', u'12月'), + (0x32CC, 'M', u'hg'), + (0x32CD, 'M', u'erg'), + (0x32CE, 'M', u'ev'), + (0x32CF, 'M', u'ltd'), + (0x32D0, 'M', u'ア'), + (0x32D1, 'M', u'イ'), + (0x32D2, 'M', u'ウ'), + (0x32D3, 'M', u'エ'), + (0x32D4, 'M', u'オ'), + (0x32D5, 'M', u'カ'), + (0x32D6, 'M', u'キ'), + (0x32D7, 'M', u'ク'), + (0x32D8, 'M', u'ケ'), + (0x32D9, 'M', u'コ'), + (0x32DA, 'M', u'サ'), + (0x32DB, 'M', u'シ'), + (0x32DC, 'M', u'ス'), + (0x32DD, 'M', u'セ'), + (0x32DE, 'M', u'ソ'), + (0x32DF, 'M', u'タ'), + (0x32E0, 'M', u'チ'), + (0x32E1, 'M', u'ツ'), + (0x32E2, 'M', u'テ'), + (0x32E3, 'M', u'ト'), + (0x32E4, 'M', u'ナ'), + (0x32E5, 'M', u'ニ'), + (0x32E6, 'M', u'ヌ'), + (0x32E7, 'M', u'ネ'), + (0x32E8, 'M', u'ノ'), + (0x32E9, 'M', u'ハ'), + (0x32EA, 'M', u'ヒ'), + (0x32EB, 'M', u'フ'), + (0x32EC, 'M', u'ヘ'), + (0x32ED, 'M', u'ホ'), + (0x32EE, 'M', u'マ'), + (0x32EF, 'M', u'ミ'), + (0x32F0, 'M', u'ム'), + (0x32F1, 'M', u'メ'), + (0x32F2, 'M', u'モ'), + (0x32F3, 'M', u'ヤ'), + (0x32F4, 'M', u'ユ'), + (0x32F5, 'M', u'ヨ'), + (0x32F6, 'M', u'ラ'), + (0x32F7, 'M', u'リ'), + (0x32F8, 'M', u'ル'), + (0x32F9, 'M', u'レ'), + (0x32FA, 'M', u'ロ'), + (0x32FB, 'M', u'ワ'), + (0x32FC, 'M', u'ヰ'), + (0x32FD, 'M', u'ヱ'), + (0x32FE, 'M', u'ヲ'), + (0x32FF, 'X'), + (0x3300, 'M', u'アパート'), + (0x3301, 'M', u'アルファ'), + (0x3302, 'M', u'アンペア'), + (0x3303, 'M', u'アール'), + (0x3304, 'M', u'イニング'), + (0x3305, 'M', u'インチ'), + (0x3306, 'M', u'ウォン'), + (0x3307, 'M', u'エスクード'), + (0x3308, 'M', u'エーカー'), + (0x3309, 'M', u'オンス'), + (0x330A, 'M', u'オーム'), + (0x330B, 'M', u'カイリ'), + (0x330C, 'M', u'カラット'), + (0x330D, 'M', u'カロリー'), + (0x330E, 'M', u'ガロン'), + (0x330F, 'M', u'ガンマ'), + (0x3310, 'M', u'ギガ'), + (0x3311, 'M', u'ギニー'), + (0x3312, 'M', u'キュリー'), + (0x3313, 'M', u'ギルダー'), + (0x3314, 'M', u'キロ'), + (0x3315, 'M', u'キログラム'), + (0x3316, 'M', u'キロメートル'), + (0x3317, 'M', u'キロワット'), + (0x3318, 'M', u'グラム'), + (0x3319, 'M', u'グラムトン'), + (0x331A, 'M', u'クルゼイロ'), + (0x331B, 'M', u'クローネ'), + (0x331C, 'M', u'ケース'), + (0x331D, 'M', u'コルナ'), + (0x331E, 'M', u'コーポ'), + (0x331F, 'M', u'サイクル'), + (0x3320, 'M', u'サンチーム'), + (0x3321, 'M', u'シリング'), + (0x3322, 'M', u'センチ'), + (0x3323, 'M', u'セント'), + (0x3324, 'M', u'ダース'), + (0x3325, 'M', u'デシ'), + (0x3326, 'M', u'ドル'), + (0x3327, 'M', u'トン'), + (0x3328, 'M', u'ナノ'), + (0x3329, 'M', u'ノット'), + (0x332A, 'M', u'ハイツ'), + (0x332B, 'M', u'パーセント'), + (0x332C, 'M', u'パーツ'), + (0x332D, 'M', u'バーレル'), + (0x332E, 'M', u'ピアストル'), + (0x332F, 'M', u'ピクル'), + (0x3330, 'M', u'ピコ'), + (0x3331, 'M', u'ビル'), + (0x3332, 'M', u'ファラッド'), + (0x3333, 'M', u'フィート'), + (0x3334, 'M', u'ブッシェル'), + (0x3335, 'M', u'フラン'), + (0x3336, 'M', u'ヘクタール'), + (0x3337, 'M', u'ペソ'), + (0x3338, 'M', u'ペニヒ'), + (0x3339, 'M', u'ヘルツ'), + (0x333A, 'M', u'ペンス'), + (0x333B, 'M', u'ページ'), + (0x333C, 'M', u'ベータ'), + (0x333D, 'M', u'ポイント'), + (0x333E, 'M', u'ボルト'), + (0x333F, 'M', u'ホン'), + (0x3340, 'M', u'ポンド'), + (0x3341, 'M', u'ホール'), + (0x3342, 'M', u'ホーン'), + (0x3343, 'M', u'マイクロ'), + (0x3344, 'M', u'マイル'), + (0x3345, 'M', u'マッハ'), + (0x3346, 'M', u'マルク'), + (0x3347, 'M', u'マンション'), + (0x3348, 'M', u'ミクロン'), + (0x3349, 'M', u'ミリ'), + (0x334A, 'M', u'ミリバール'), + (0x334B, 'M', u'メガ'), + (0x334C, 'M', u'メガトン'), + (0x334D, 'M', u'メートル'), + (0x334E, 'M', u'ヤード'), + (0x334F, 'M', u'ヤール'), + (0x3350, 'M', u'ユアン'), + (0x3351, 'M', u'リットル'), + (0x3352, 'M', u'リラ'), + (0x3353, 'M', u'ルピー'), + (0x3354, 'M', u'ルーブル'), + (0x3355, 'M', u'レム'), + (0x3356, 'M', u'レントゲン'), + (0x3357, 'M', u'ワット'), + (0x3358, 'M', u'0点'), + (0x3359, 'M', u'1点'), + (0x335A, 'M', u'2点'), + (0x335B, 'M', u'3点'), + (0x335C, 'M', u'4点'), + (0x335D, 'M', u'5点'), + (0x335E, 'M', u'6点'), + (0x335F, 'M', u'7点'), + (0x3360, 'M', u'8点'), + (0x3361, 'M', u'9点'), + (0x3362, 'M', u'10点'), + (0x3363, 'M', u'11点'), + (0x3364, 'M', u'12点'), + (0x3365, 'M', u'13点'), + (0x3366, 'M', u'14点'), + (0x3367, 'M', u'15点'), + (0x3368, 'M', u'16点'), + (0x3369, 'M', u'17点'), + (0x336A, 'M', u'18点'), + (0x336B, 'M', u'19点'), + (0x336C, 'M', u'20点'), + (0x336D, 'M', u'21点'), + (0x336E, 'M', u'22点'), + (0x336F, 'M', u'23点'), + (0x3370, 'M', u'24点'), + (0x3371, 'M', u'hpa'), + (0x3372, 'M', u'da'), + (0x3373, 'M', u'au'), + (0x3374, 'M', u'bar'), + (0x3375, 'M', u'ov'), + (0x3376, 'M', u'pc'), + (0x3377, 'M', u'dm'), + (0x3378, 'M', u'dm2'), + (0x3379, 'M', u'dm3'), + (0x337A, 'M', u'iu'), + (0x337B, 'M', u'平成'), + (0x337C, 'M', u'昭和'), + (0x337D, 'M', u'大正'), + (0x337E, 'M', u'明治'), + (0x337F, 'M', u'株式会社'), + (0x3380, 'M', u'pa'), + (0x3381, 'M', u'na'), + (0x3382, 'M', u'μa'), + (0x3383, 'M', u'ma'), + (0x3384, 'M', u'ka'), + (0x3385, 'M', u'kb'), + (0x3386, 'M', u'mb'), + (0x3387, 'M', u'gb'), + (0x3388, 'M', u'cal'), + (0x3389, 'M', u'kcal'), + (0x338A, 'M', u'pf'), + (0x338B, 'M', u'nf'), + (0x338C, 'M', u'μf'), + (0x338D, 'M', u'μg'), + (0x338E, 'M', u'mg'), + (0x338F, 'M', u'kg'), + (0x3390, 'M', u'hz'), + (0x3391, 'M', u'khz'), + (0x3392, 'M', u'mhz'), + (0x3393, 'M', u'ghz'), + (0x3394, 'M', u'thz'), + (0x3395, 'M', u'μl'), + (0x3396, 'M', u'ml'), + (0x3397, 'M', u'dl'), + (0x3398, 'M', u'kl'), + (0x3399, 'M', u'fm'), + (0x339A, 'M', u'nm'), + (0x339B, 'M', u'μm'), + (0x339C, 'M', u'mm'), + (0x339D, 'M', u'cm'), + (0x339E, 'M', u'km'), + (0x339F, 'M', u'mm2'), + (0x33A0, 'M', u'cm2'), + (0x33A1, 'M', u'm2'), + (0x33A2, 'M', u'km2'), + (0x33A3, 'M', u'mm3'), + (0x33A4, 'M', u'cm3'), + (0x33A5, 'M', u'm3'), + (0x33A6, 'M', u'km3'), + (0x33A7, 'M', u'm∕s'), + (0x33A8, 'M', u'm∕s2'), + (0x33A9, 'M', u'pa'), + (0x33AA, 'M', u'kpa'), + (0x33AB, 'M', u'mpa'), + (0x33AC, 'M', u'gpa'), + (0x33AD, 'M', u'rad'), + (0x33AE, 'M', u'rad∕s'), + (0x33AF, 'M', u'rad∕s2'), + (0x33B0, 'M', u'ps'), + (0x33B1, 'M', u'ns'), + (0x33B2, 'M', u'μs'), + (0x33B3, 'M', u'ms'), + (0x33B4, 'M', u'pv'), + (0x33B5, 'M', u'nv'), + (0x33B6, 'M', u'μv'), + (0x33B7, 'M', u'mv'), + (0x33B8, 'M', u'kv'), + (0x33B9, 'M', u'mv'), + (0x33BA, 'M', u'pw'), + (0x33BB, 'M', u'nw'), + (0x33BC, 'M', u'μw'), + (0x33BD, 'M', u'mw'), + (0x33BE, 'M', u'kw'), + (0x33BF, 'M', u'mw'), + (0x33C0, 'M', u'kω'), + (0x33C1, 'M', u'mω'), + (0x33C2, 'X'), + (0x33C3, 'M', u'bq'), + (0x33C4, 'M', u'cc'), + (0x33C5, 'M', u'cd'), + (0x33C6, 'M', u'c∕kg'), + (0x33C7, 'X'), + (0x33C8, 'M', u'db'), + (0x33C9, 'M', u'gy'), + (0x33CA, 'M', u'ha'), + (0x33CB, 'M', u'hp'), + (0x33CC, 'M', u'in'), + (0x33CD, 'M', u'kk'), + (0x33CE, 'M', u'km'), + (0x33CF, 'M', u'kt'), + (0x33D0, 'M', u'lm'), + (0x33D1, 'M', u'ln'), + (0x33D2, 'M', u'log'), + (0x33D3, 'M', u'lx'), + (0x33D4, 'M', u'mb'), + (0x33D5, 'M', u'mil'), + (0x33D6, 'M', u'mol'), + (0x33D7, 'M', u'ph'), + (0x33D8, 'X'), + (0x33D9, 'M', u'ppm'), + (0x33DA, 'M', u'pr'), + (0x33DB, 'M', u'sr'), + (0x33DC, 'M', u'sv'), + (0x33DD, 'M', u'wb'), + (0x33DE, 'M', u'v∕m'), + (0x33DF, 'M', u'a∕m'), + (0x33E0, 'M', u'1日'), + (0x33E1, 'M', u'2日'), + (0x33E2, 'M', u'3日'), + (0x33E3, 'M', u'4日'), + (0x33E4, 'M', u'5日'), + (0x33E5, 'M', u'6日'), + (0x33E6, 'M', u'7日'), + (0x33E7, 'M', u'8日'), + (0x33E8, 'M', u'9日'), + (0x33E9, 'M', u'10日'), + (0x33EA, 'M', u'11日'), + (0x33EB, 'M', u'12日'), + (0x33EC, 'M', u'13日'), + (0x33ED, 'M', u'14日'), + (0x33EE, 'M', u'15日'), + (0x33EF, 'M', u'16日'), + (0x33F0, 'M', u'17日'), + (0x33F1, 'M', u'18日'), + (0x33F2, 'M', u'19日'), + (0x33F3, 'M', u'20日'), + (0x33F4, 'M', u'21日'), + (0x33F5, 'M', u'22日'), + (0x33F6, 'M', u'23日'), + (0x33F7, 'M', u'24日'), + (0x33F8, 'M', u'25日'), + (0x33F9, 'M', u'26日'), + (0x33FA, 'M', u'27日'), + (0x33FB, 'M', u'28日'), + (0x33FC, 'M', u'29日'), + (0x33FD, 'M', u'30日'), + (0x33FE, 'M', u'31日'), + (0x33FF, 'M', u'gal'), + (0x3400, 'V'), + (0x4DB6, 'X'), + (0x4DC0, 'V'), + (0x9FCD, 'X'), + (0xA000, 'V'), + (0xA48D, 'X'), + (0xA490, 'V'), + (0xA4C7, 'X'), + (0xA4D0, 'V'), + (0xA62C, 'X'), + (0xA640, 'M', u'ꙁ'), + (0xA641, 'V'), + (0xA642, 'M', u'ꙃ'), + (0xA643, 'V'), + (0xA644, 'M', u'ꙅ'), + (0xA645, 'V'), + (0xA646, 'M', u'ꙇ'), + (0xA647, 'V'), + (0xA648, 'M', u'ꙉ'), + (0xA649, 'V'), + (0xA64A, 'M', u'ꙋ'), + (0xA64B, 'V'), + (0xA64C, 'M', u'ꙍ'), + (0xA64D, 'V'), + (0xA64E, 'M', u'ꙏ'), + (0xA64F, 'V'), + (0xA650, 'M', u'ꙑ'), + (0xA651, 'V'), + (0xA652, 'M', u'ꙓ'), + (0xA653, 'V'), + (0xA654, 'M', u'ꙕ'), + (0xA655, 'V'), + (0xA656, 'M', u'ꙗ'), + (0xA657, 'V'), + (0xA658, 'M', u'ꙙ'), + (0xA659, 'V'), + (0xA65A, 'M', u'ꙛ'), + (0xA65B, 'V'), + (0xA65C, 'M', u'ꙝ'), + (0xA65D, 'V'), + (0xA65E, 'M', u'ꙟ'), + (0xA65F, 'V'), + (0xA660, 'M', u'ꙡ'), + (0xA661, 'V'), + (0xA662, 'M', u'ꙣ'), + (0xA663, 'V'), + (0xA664, 'M', u'ꙥ'), + (0xA665, 'V'), + (0xA666, 'M', u'ꙧ'), + (0xA667, 'V'), + (0xA668, 'M', u'ꙩ'), + (0xA669, 'V'), + (0xA66A, 'M', u'ꙫ'), + (0xA66B, 'V'), + (0xA66C, 'M', u'ꙭ'), + (0xA66D, 'V'), + (0xA680, 'M', u'ꚁ'), + (0xA681, 'V'), + (0xA682, 'M', u'ꚃ'), + (0xA683, 'V'), + (0xA684, 'M', u'ꚅ'), + (0xA685, 'V'), + (0xA686, 'M', u'ꚇ'), + (0xA687, 'V'), + (0xA688, 'M', u'ꚉ'), + (0xA689, 'V'), + (0xA68A, 'M', u'ꚋ'), + (0xA68B, 'V'), + (0xA68C, 'M', u'ꚍ'), + (0xA68D, 'V'), + (0xA68E, 'M', u'ꚏ'), + (0xA68F, 'V'), + (0xA690, 'M', u'ꚑ'), + (0xA691, 'V'), + (0xA692, 'M', u'ꚓ'), + (0xA693, 'V'), + (0xA694, 'M', u'ꚕ'), + (0xA695, 'V'), + (0xA696, 'M', u'ꚗ'), + (0xA697, 'V'), + (0xA698, 'X'), + (0xA69F, 'V'), + (0xA6F8, 'X'), + (0xA700, 'V'), + (0xA722, 'M', u'ꜣ'), + (0xA723, 'V'), + (0xA724, 'M', u'ꜥ'), + (0xA725, 'V'), + (0xA726, 'M', u'ꜧ'), + (0xA727, 'V'), + (0xA728, 'M', u'ꜩ'), + (0xA729, 'V'), + (0xA72A, 'M', u'ꜫ'), + (0xA72B, 'V'), + (0xA72C, 'M', u'ꜭ'), + (0xA72D, 'V'), + (0xA72E, 'M', u'ꜯ'), + (0xA72F, 'V'), + (0xA732, 'M', u'ꜳ'), + (0xA733, 'V'), + (0xA734, 'M', u'ꜵ'), + (0xA735, 'V'), + (0xA736, 'M', u'ꜷ'), + (0xA737, 'V'), + (0xA738, 'M', u'ꜹ'), + (0xA739, 'V'), + (0xA73A, 'M', u'ꜻ'), + (0xA73B, 'V'), + (0xA73C, 'M', u'ꜽ'), + (0xA73D, 'V'), + (0xA73E, 'M', u'ꜿ'), + (0xA73F, 'V'), + (0xA740, 'M', u'ꝁ'), + (0xA741, 'V'), + (0xA742, 'M', u'ꝃ'), + (0xA743, 'V'), + (0xA744, 'M', u'ꝅ'), + (0xA745, 'V'), + (0xA746, 'M', u'ꝇ'), + (0xA747, 'V'), + (0xA748, 'M', u'ꝉ'), + (0xA749, 'V'), + (0xA74A, 'M', u'ꝋ'), + (0xA74B, 'V'), + (0xA74C, 'M', u'ꝍ'), + (0xA74D, 'V'), + (0xA74E, 'M', u'ꝏ'), + (0xA74F, 'V'), + (0xA750, 'M', u'ꝑ'), + (0xA751, 'V'), + (0xA752, 'M', u'ꝓ'), + (0xA753, 'V'), + (0xA754, 'M', u'ꝕ'), + (0xA755, 'V'), + (0xA756, 'M', u'ꝗ'), + (0xA757, 'V'), + (0xA758, 'M', u'ꝙ'), + (0xA759, 'V'), + (0xA75A, 'M', u'ꝛ'), + (0xA75B, 'V'), + (0xA75C, 'M', u'ꝝ'), + (0xA75D, 'V'), + (0xA75E, 'M', u'ꝟ'), + (0xA75F, 'V'), + (0xA760, 'M', u'ꝡ'), + (0xA761, 'V'), + (0xA762, 'M', u'ꝣ'), + (0xA763, 'V'), + (0xA764, 'M', u'ꝥ'), + (0xA765, 'V'), + (0xA766, 'M', u'ꝧ'), + (0xA767, 'V'), + (0xA768, 'M', u'ꝩ'), + (0xA769, 'V'), + (0xA76A, 'M', u'ꝫ'), + (0xA76B, 'V'), + (0xA76C, 'M', u'ꝭ'), + (0xA76D, 'V'), + (0xA76E, 'M', u'ꝯ'), + (0xA76F, 'V'), + (0xA770, 'M', u'ꝯ'), + (0xA771, 'V'), + (0xA779, 'M', u'ꝺ'), + (0xA77A, 'V'), + (0xA77B, 'M', u'ꝼ'), + (0xA77C, 'V'), + (0xA77D, 'M', u'ᵹ'), + (0xA77E, 'M', u'ꝿ'), + (0xA77F, 'V'), + (0xA780, 'M', u'ꞁ'), + (0xA781, 'V'), + (0xA782, 'M', u'ꞃ'), + (0xA783, 'V'), + (0xA784, 'M', u'ꞅ'), + (0xA785, 'V'), + (0xA786, 'M', u'ꞇ'), + (0xA787, 'V'), + (0xA78B, 'M', u'ꞌ'), + (0xA78C, 'V'), + (0xA78D, 'M', u'ɥ'), + (0xA78E, 'V'), + (0xA78F, 'X'), + (0xA790, 'M', u'ꞑ'), + (0xA791, 'V'), + (0xA792, 'M', u'ꞓ'), + (0xA793, 'V'), + (0xA794, 'X'), + (0xA7A0, 'M', u'ꞡ'), + (0xA7A1, 'V'), + (0xA7A2, 'M', u'ꞣ'), + (0xA7A3, 'V'), + (0xA7A4, 'M', u'ꞥ'), + (0xA7A5, 'V'), + (0xA7A6, 'M', u'ꞧ'), + (0xA7A7, 'V'), + (0xA7A8, 'M', u'ꞩ'), + (0xA7A9, 'V'), + (0xA7AA, 'M', u'ɦ'), + (0xA7AB, 'X'), + (0xA7F8, 'M', u'ħ'), + (0xA7F9, 'M', u'œ'), + (0xA7FA, 'V'), + (0xA82C, 'X'), + (0xA830, 'V'), + (0xA83A, 'X'), + (0xA840, 'V'), + (0xA878, 'X'), + (0xA880, 'V'), + (0xA8C5, 'X'), + (0xA8CE, 'V'), + (0xA8DA, 'X'), + (0xA8E0, 'V'), + (0xA8FC, 'X'), + (0xA900, 'V'), + (0xA954, 'X'), + (0xA95F, 'V'), + (0xA97D, 'X'), + (0xA980, 'V'), + (0xA9CE, 'X'), + (0xA9CF, 'V'), + (0xA9DA, 'X'), + (0xA9DE, 'V'), + (0xA9E0, 'X'), + (0xAA00, 'V'), + (0xAA37, 'X'), + (0xAA40, 'V'), + (0xAA4E, 'X'), + (0xAA50, 'V'), + (0xAA5A, 'X'), + (0xAA5C, 'V'), + (0xAA7C, 'X'), + (0xAA80, 'V'), + (0xAAC3, 'X'), + (0xAADB, 'V'), + (0xAAF7, 'X'), + (0xAB01, 'V'), + (0xAB07, 'X'), + (0xAB09, 'V'), + (0xAB0F, 'X'), + (0xAB11, 'V'), + (0xAB17, 'X'), + (0xAB20, 'V'), + (0xAB27, 'X'), + (0xAB28, 'V'), + (0xAB2F, 'X'), + (0xABC0, 'V'), + (0xABEE, 'X'), + (0xABF0, 'V'), + (0xABFA, 'X'), + (0xAC00, 'V'), + (0xD7A4, 'X'), + (0xD7B0, 'V'), + (0xD7C7, 'X'), + (0xD7CB, 'V'), + (0xD7FC, 'X'), + (0xF900, 'M', u'豈'), + (0xF901, 'M', u'更'), + (0xF902, 'M', u'車'), + (0xF903, 'M', u'賈'), + (0xF904, 'M', u'滑'), + (0xF905, 'M', u'串'), + (0xF906, 'M', u'句'), + (0xF907, 'M', u'龜'), + (0xF909, 'M', u'契'), + (0xF90A, 'M', u'金'), + (0xF90B, 'M', u'喇'), + (0xF90C, 'M', u'奈'), + (0xF90D, 'M', u'懶'), + (0xF90E, 'M', u'癩'), + (0xF90F, 'M', u'羅'), + (0xF910, 'M', u'蘿'), + (0xF911, 'M', u'螺'), + (0xF912, 'M', u'裸'), + (0xF913, 'M', u'邏'), + (0xF914, 'M', u'樂'), + (0xF915, 'M', u'洛'), + (0xF916, 'M', u'烙'), + (0xF917, 'M', u'珞'), + (0xF918, 'M', u'落'), + (0xF919, 'M', u'酪'), + (0xF91A, 'M', u'駱'), + (0xF91B, 'M', u'亂'), + (0xF91C, 'M', u'卵'), + (0xF91D, 'M', u'欄'), + (0xF91E, 'M', u'爛'), + (0xF91F, 'M', u'蘭'), + (0xF920, 'M', u'鸞'), + (0xF921, 'M', u'嵐'), + (0xF922, 'M', u'濫'), + (0xF923, 'M', u'藍'), + (0xF924, 'M', u'襤'), + (0xF925, 'M', u'拉'), + (0xF926, 'M', u'臘'), + (0xF927, 'M', u'蠟'), + (0xF928, 'M', u'廊'), + (0xF929, 'M', u'朗'), + (0xF92A, 'M', u'浪'), + (0xF92B, 'M', u'狼'), + (0xF92C, 'M', u'郎'), + (0xF92D, 'M', u'來'), + (0xF92E, 'M', u'冷'), + (0xF92F, 'M', u'勞'), + (0xF930, 'M', u'擄'), + (0xF931, 'M', u'櫓'), + (0xF932, 'M', u'爐'), + (0xF933, 'M', u'盧'), + (0xF934, 'M', u'老'), + (0xF935, 'M', u'蘆'), + (0xF936, 'M', u'虜'), + (0xF937, 'M', u'路'), + (0xF938, 'M', u'露'), + (0xF939, 'M', u'魯'), + (0xF93A, 'M', u'鷺'), + (0xF93B, 'M', u'碌'), + (0xF93C, 'M', u'祿'), + (0xF93D, 'M', u'綠'), + (0xF93E, 'M', u'菉'), + (0xF93F, 'M', u'錄'), + (0xF940, 'M', u'鹿'), + (0xF941, 'M', u'論'), + (0xF942, 'M', u'壟'), + (0xF943, 'M', u'弄'), + (0xF944, 'M', u'籠'), + (0xF945, 'M', u'聾'), + (0xF946, 'M', u'牢'), + (0xF947, 'M', u'磊'), + (0xF948, 'M', u'賂'), + (0xF949, 'M', u'雷'), + (0xF94A, 'M', u'壘'), + (0xF94B, 'M', u'屢'), + (0xF94C, 'M', u'樓'), + (0xF94D, 'M', u'淚'), + (0xF94E, 'M', u'漏'), + (0xF94F, 'M', u'累'), + (0xF950, 'M', u'縷'), + (0xF951, 'M', u'陋'), + (0xF952, 'M', u'勒'), + (0xF953, 'M', u'肋'), + (0xF954, 'M', u'凜'), + (0xF955, 'M', u'凌'), + (0xF956, 'M', u'稜'), + (0xF957, 'M', u'綾'), + (0xF958, 'M', u'菱'), + (0xF959, 'M', u'陵'), + (0xF95A, 'M', u'讀'), + (0xF95B, 'M', u'拏'), + (0xF95C, 'M', u'樂'), + (0xF95D, 'M', u'諾'), + (0xF95E, 'M', u'丹'), + (0xF95F, 'M', u'寧'), + (0xF960, 'M', u'怒'), + (0xF961, 'M', u'率'), + (0xF962, 'M', u'異'), + (0xF963, 'M', u'北'), + (0xF964, 'M', u'磻'), + (0xF965, 'M', u'便'), + (0xF966, 'M', u'復'), + (0xF967, 'M', u'不'), + (0xF968, 'M', u'泌'), + (0xF969, 'M', u'數'), + (0xF96A, 'M', u'索'), + (0xF96B, 'M', u'參'), + (0xF96C, 'M', u'塞'), + (0xF96D, 'M', u'省'), + (0xF96E, 'M', u'葉'), + (0xF96F, 'M', u'說'), + (0xF970, 'M', u'殺'), + (0xF971, 'M', u'辰'), + (0xF972, 'M', u'沈'), + (0xF973, 'M', u'拾'), + (0xF974, 'M', u'若'), + (0xF975, 'M', u'掠'), + (0xF976, 'M', u'略'), + (0xF977, 'M', u'亮'), + (0xF978, 'M', u'兩'), + (0xF979, 'M', u'凉'), + (0xF97A, 'M', u'梁'), + (0xF97B, 'M', u'糧'), + (0xF97C, 'M', u'良'), + (0xF97D, 'M', u'諒'), + (0xF97E, 'M', u'量'), + (0xF97F, 'M', u'勵'), + (0xF980, 'M', u'呂'), + (0xF981, 'M', u'女'), + (0xF982, 'M', u'廬'), + (0xF983, 'M', u'旅'), + (0xF984, 'M', u'濾'), + (0xF985, 'M', u'礪'), + (0xF986, 'M', u'閭'), + (0xF987, 'M', u'驪'), + (0xF988, 'M', u'麗'), + (0xF989, 'M', u'黎'), + (0xF98A, 'M', u'力'), + (0xF98B, 'M', u'曆'), + (0xF98C, 'M', u'歷'), + (0xF98D, 'M', u'轢'), + (0xF98E, 'M', u'年'), + (0xF98F, 'M', u'憐'), + (0xF990, 'M', u'戀'), + (0xF991, 'M', u'撚'), + (0xF992, 'M', u'漣'), + (0xF993, 'M', u'煉'), + (0xF994, 'M', u'璉'), + (0xF995, 'M', u'秊'), + (0xF996, 'M', u'練'), + (0xF997, 'M', u'聯'), + (0xF998, 'M', u'輦'), + (0xF999, 'M', u'蓮'), + (0xF99A, 'M', u'連'), + (0xF99B, 'M', u'鍊'), + (0xF99C, 'M', u'列'), + (0xF99D, 'M', u'劣'), + (0xF99E, 'M', u'咽'), + (0xF99F, 'M', u'烈'), + (0xF9A0, 'M', u'裂'), + (0xF9A1, 'M', u'說'), + (0xF9A2, 'M', u'廉'), + (0xF9A3, 'M', u'念'), + (0xF9A4, 'M', u'捻'), + (0xF9A5, 'M', u'殮'), + (0xF9A6, 'M', u'簾'), + (0xF9A7, 'M', u'獵'), + (0xF9A8, 'M', u'令'), + (0xF9A9, 'M', u'囹'), + (0xF9AA, 'M', u'寧'), + (0xF9AB, 'M', u'嶺'), + (0xF9AC, 'M', u'怜'), + (0xF9AD, 'M', u'玲'), + (0xF9AE, 'M', u'瑩'), + (0xF9AF, 'M', u'羚'), + (0xF9B0, 'M', u'聆'), + (0xF9B1, 'M', u'鈴'), + (0xF9B2, 'M', u'零'), + (0xF9B3, 'M', u'靈'), + (0xF9B4, 'M', u'領'), + (0xF9B5, 'M', u'例'), + (0xF9B6, 'M', u'禮'), + (0xF9B7, 'M', u'醴'), + (0xF9B8, 'M', u'隸'), + (0xF9B9, 'M', u'惡'), + (0xF9BA, 'M', u'了'), + (0xF9BB, 'M', u'僚'), + (0xF9BC, 'M', u'寮'), + (0xF9BD, 'M', u'尿'), + (0xF9BE, 'M', u'料'), + (0xF9BF, 'M', u'樂'), + (0xF9C0, 'M', u'燎'), + (0xF9C1, 'M', u'療'), + (0xF9C2, 'M', u'蓼'), + (0xF9C3, 'M', u'遼'), + (0xF9C4, 'M', u'龍'), + (0xF9C5, 'M', u'暈'), + (0xF9C6, 'M', u'阮'), + (0xF9C7, 'M', u'劉'), + (0xF9C8, 'M', u'杻'), + (0xF9C9, 'M', u'柳'), + (0xF9CA, 'M', u'流'), + (0xF9CB, 'M', u'溜'), + (0xF9CC, 'M', u'琉'), + (0xF9CD, 'M', u'留'), + (0xF9CE, 'M', u'硫'), + (0xF9CF, 'M', u'紐'), + (0xF9D0, 'M', u'類'), + (0xF9D1, 'M', u'六'), + (0xF9D2, 'M', u'戮'), + (0xF9D3, 'M', u'陸'), + (0xF9D4, 'M', u'倫'), + (0xF9D5, 'M', u'崙'), + (0xF9D6, 'M', u'淪'), + (0xF9D7, 'M', u'輪'), + (0xF9D8, 'M', u'律'), + (0xF9D9, 'M', u'慄'), + (0xF9DA, 'M', u'栗'), + (0xF9DB, 'M', u'率'), + (0xF9DC, 'M', u'隆'), + (0xF9DD, 'M', u'利'), + (0xF9DE, 'M', u'吏'), + (0xF9DF, 'M', u'履'), + (0xF9E0, 'M', u'易'), + (0xF9E1, 'M', u'李'), + (0xF9E2, 'M', u'梨'), + (0xF9E3, 'M', u'泥'), + (0xF9E4, 'M', u'理'), + (0xF9E5, 'M', u'痢'), + (0xF9E6, 'M', u'罹'), + (0xF9E7, 'M', u'裏'), + (0xF9E8, 'M', u'裡'), + (0xF9E9, 'M', u'里'), + (0xF9EA, 'M', u'離'), + (0xF9EB, 'M', u'匿'), + (0xF9EC, 'M', u'溺'), + (0xF9ED, 'M', u'吝'), + (0xF9EE, 'M', u'燐'), + (0xF9EF, 'M', u'璘'), + (0xF9F0, 'M', u'藺'), + (0xF9F1, 'M', u'隣'), + (0xF9F2, 'M', u'鱗'), + (0xF9F3, 'M', u'麟'), + (0xF9F4, 'M', u'林'), + (0xF9F5, 'M', u'淋'), + (0xF9F6, 'M', u'臨'), + (0xF9F7, 'M', u'立'), + (0xF9F8, 'M', u'笠'), + (0xF9F9, 'M', u'粒'), + (0xF9FA, 'M', u'狀'), + (0xF9FB, 'M', u'炙'), + (0xF9FC, 'M', u'識'), + (0xF9FD, 'M', u'什'), + (0xF9FE, 'M', u'茶'), + (0xF9FF, 'M', u'刺'), + (0xFA00, 'M', u'切'), + (0xFA01, 'M', u'度'), + (0xFA02, 'M', u'拓'), + (0xFA03, 'M', u'糖'), + (0xFA04, 'M', u'宅'), + (0xFA05, 'M', u'洞'), + (0xFA06, 'M', u'暴'), + (0xFA07, 'M', u'輻'), + (0xFA08, 'M', u'行'), + (0xFA09, 'M', u'降'), + (0xFA0A, 'M', u'見'), + (0xFA0B, 'M', u'廓'), + (0xFA0C, 'M', u'兀'), + (0xFA0D, 'M', u'嗀'), + (0xFA0E, 'V'), + (0xFA10, 'M', u'塚'), + (0xFA11, 'V'), + (0xFA12, 'M', u'晴'), + (0xFA13, 'V'), + (0xFA15, 'M', u'凞'), + (0xFA16, 'M', u'猪'), + (0xFA17, 'M', u'益'), + (0xFA18, 'M', u'礼'), + (0xFA19, 'M', u'神'), + (0xFA1A, 'M', u'祥'), + (0xFA1B, 'M', u'福'), + (0xFA1C, 'M', u'靖'), + (0xFA1D, 'M', u'精'), + (0xFA1E, 'M', u'羽'), + (0xFA1F, 'V'), + (0xFA20, 'M', u'蘒'), + (0xFA21, 'V'), + (0xFA22, 'M', u'諸'), + (0xFA23, 'V'), + (0xFA25, 'M', u'逸'), + (0xFA26, 'M', u'都'), + (0xFA27, 'V'), + (0xFA2A, 'M', u'飯'), + (0xFA2B, 'M', u'飼'), + (0xFA2C, 'M', u'館'), + (0xFA2D, 'M', u'鶴'), + (0xFA2E, 'M', u'郞'), + (0xFA2F, 'M', u'隷'), + (0xFA30, 'M', u'侮'), + (0xFA31, 'M', u'僧'), + (0xFA32, 'M', u'免'), + (0xFA33, 'M', u'勉'), + (0xFA34, 'M', u'勤'), + (0xFA35, 'M', u'卑'), + (0xFA36, 'M', u'喝'), + (0xFA37, 'M', u'嘆'), + (0xFA38, 'M', u'器'), + (0xFA39, 'M', u'塀'), + (0xFA3A, 'M', u'墨'), + (0xFA3B, 'M', u'層'), + (0xFA3C, 'M', u'屮'), + (0xFA3D, 'M', u'悔'), + (0xFA3E, 'M', u'慨'), + (0xFA3F, 'M', u'憎'), + (0xFA40, 'M', u'懲'), + (0xFA41, 'M', u'敏'), + (0xFA42, 'M', u'既'), + (0xFA43, 'M', u'暑'), + (0xFA44, 'M', u'梅'), + (0xFA45, 'M', u'海'), + (0xFA46, 'M', u'渚'), + (0xFA47, 'M', u'漢'), + (0xFA48, 'M', u'煮'), + (0xFA49, 'M', u'爫'), + (0xFA4A, 'M', u'琢'), + (0xFA4B, 'M', u'碑'), + (0xFA4C, 'M', u'社'), + (0xFA4D, 'M', u'祉'), + (0xFA4E, 'M', u'祈'), + (0xFA4F, 'M', u'祐'), + (0xFA50, 'M', u'祖'), + (0xFA51, 'M', u'祝'), + (0xFA52, 'M', u'禍'), + (0xFA53, 'M', u'禎'), + (0xFA54, 'M', u'穀'), + (0xFA55, 'M', u'突'), + (0xFA56, 'M', u'節'), + (0xFA57, 'M', u'練'), + (0xFA58, 'M', u'縉'), + (0xFA59, 'M', u'繁'), + (0xFA5A, 'M', u'署'), + (0xFA5B, 'M', u'者'), + (0xFA5C, 'M', u'臭'), + (0xFA5D, 'M', u'艹'), + (0xFA5F, 'M', u'著'), + (0xFA60, 'M', u'褐'), + (0xFA61, 'M', u'視'), + (0xFA62, 'M', u'謁'), + (0xFA63, 'M', u'謹'), + (0xFA64, 'M', u'賓'), + (0xFA65, 'M', u'贈'), + (0xFA66, 'M', u'辶'), + (0xFA67, 'M', u'逸'), + (0xFA68, 'M', u'難'), + (0xFA69, 'M', u'響'), + (0xFA6A, 'M', u'頻'), + (0xFA6B, 'M', u'恵'), + (0xFA6C, 'M', u'𤋮'), + (0xFA6D, 'M', u'舘'), + (0xFA6E, 'X'), + (0xFA70, 'M', u'並'), + (0xFA71, 'M', u'况'), + (0xFA72, 'M', u'全'), + (0xFA73, 'M', u'侀'), + (0xFA74, 'M', u'充'), + (0xFA75, 'M', u'冀'), + (0xFA76, 'M', u'勇'), + (0xFA77, 'M', u'勺'), + (0xFA78, 'M', u'喝'), + (0xFA79, 'M', u'啕'), + (0xFA7A, 'M', u'喙'), + (0xFA7B, 'M', u'嗢'), + (0xFA7C, 'M', u'塚'), + (0xFA7D, 'M', u'墳'), + (0xFA7E, 'M', u'奄'), + (0xFA7F, 'M', u'奔'), + (0xFA80, 'M', u'婢'), + (0xFA81, 'M', u'嬨'), + (0xFA82, 'M', u'廒'), + (0xFA83, 'M', u'廙'), + (0xFA84, 'M', u'彩'), + (0xFA85, 'M', u'徭'), + (0xFA86, 'M', u'惘'), + (0xFA87, 'M', u'慎'), + (0xFA88, 'M', u'愈'), + (0xFA89, 'M', u'憎'), + (0xFA8A, 'M', u'慠'), + (0xFA8B, 'M', u'懲'), + (0xFA8C, 'M', u'戴'), + (0xFA8D, 'M', u'揄'), + (0xFA8E, 'M', u'搜'), + (0xFA8F, 'M', u'摒'), + (0xFA90, 'M', u'敖'), + (0xFA91, 'M', u'晴'), + (0xFA92, 'M', u'朗'), + (0xFA93, 'M', u'望'), + (0xFA94, 'M', u'杖'), + (0xFA95, 'M', u'歹'), + (0xFA96, 'M', u'殺'), + (0xFA97, 'M', u'流'), + (0xFA98, 'M', u'滛'), + (0xFA99, 'M', u'滋'), + (0xFA9A, 'M', u'漢'), + (0xFA9B, 'M', u'瀞'), + (0xFA9C, 'M', u'煮'), + (0xFA9D, 'M', u'瞧'), + (0xFA9E, 'M', u'爵'), + (0xFA9F, 'M', u'犯'), + (0xFAA0, 'M', u'猪'), + (0xFAA1, 'M', u'瑱'), + (0xFAA2, 'M', u'甆'), + (0xFAA3, 'M', u'画'), + (0xFAA4, 'M', u'瘝'), + (0xFAA5, 'M', u'瘟'), + (0xFAA6, 'M', u'益'), + (0xFAA7, 'M', u'盛'), + (0xFAA8, 'M', u'直'), + (0xFAA9, 'M', u'睊'), + (0xFAAA, 'M', u'着'), + (0xFAAB, 'M', u'磌'), + (0xFAAC, 'M', u'窱'), + (0xFAAD, 'M', u'節'), + (0xFAAE, 'M', u'类'), + (0xFAAF, 'M', u'絛'), + (0xFAB0, 'M', u'練'), + (0xFAB1, 'M', u'缾'), + (0xFAB2, 'M', u'者'), + (0xFAB3, 'M', u'荒'), + (0xFAB4, 'M', u'華'), + (0xFAB5, 'M', u'蝹'), + (0xFAB6, 'M', u'襁'), + (0xFAB7, 'M', u'覆'), + (0xFAB8, 'M', u'視'), + (0xFAB9, 'M', u'調'), + (0xFABA, 'M', u'諸'), + (0xFABB, 'M', u'請'), + (0xFABC, 'M', u'謁'), + (0xFABD, 'M', u'諾'), + (0xFABE, 'M', u'諭'), + (0xFABF, 'M', u'謹'), + (0xFAC0, 'M', u'變'), + (0xFAC1, 'M', u'贈'), + (0xFAC2, 'M', u'輸'), + (0xFAC3, 'M', u'遲'), + (0xFAC4, 'M', u'醙'), + (0xFAC5, 'M', u'鉶'), + (0xFAC6, 'M', u'陼'), + (0xFAC7, 'M', u'難'), + (0xFAC8, 'M', u'靖'), + (0xFAC9, 'M', u'韛'), + (0xFACA, 'M', u'響'), + (0xFACB, 'M', u'頋'), + (0xFACC, 'M', u'頻'), + (0xFACD, 'M', u'鬒'), + (0xFACE, 'M', u'龜'), + (0xFACF, 'M', u'𢡊'), + (0xFAD0, 'M', u'𢡄'), + (0xFAD1, 'M', u'𣏕'), + (0xFAD2, 'M', u'㮝'), + (0xFAD3, 'M', u'䀘'), + (0xFAD4, 'M', u'䀹'), + (0xFAD5, 'M', u'𥉉'), + (0xFAD6, 'M', u'𥳐'), + (0xFAD7, 'M', u'𧻓'), + (0xFAD8, 'M', u'齃'), + (0xFAD9, 'M', u'龎'), + (0xFADA, 'X'), + (0xFB00, 'M', u'ff'), + (0xFB01, 'M', u'fi'), + (0xFB02, 'M', u'fl'), + (0xFB03, 'M', u'ffi'), + (0xFB04, 'M', u'ffl'), + (0xFB05, 'M', u'st'), + (0xFB07, 'X'), + (0xFB13, 'M', u'մն'), + (0xFB14, 'M', u'մե'), + (0xFB15, 'M', u'մի'), + (0xFB16, 'M', u'վն'), + (0xFB17, 'M', u'մխ'), + (0xFB18, 'X'), + (0xFB1D, 'M', u'יִ'), + (0xFB1E, 'V'), + (0xFB1F, 'M', u'ײַ'), + (0xFB20, 'M', u'ע'), + (0xFB21, 'M', u'א'), + (0xFB22, 'M', u'ד'), + (0xFB23, 'M', u'ה'), + (0xFB24, 'M', u'כ'), + (0xFB25, 'M', u'ל'), + (0xFB26, 'M', u'ם'), + (0xFB27, 'M', u'ר'), + (0xFB28, 'M', u'ת'), + (0xFB29, '3', u'+'), + (0xFB2A, 'M', u'שׁ'), + (0xFB2B, 'M', u'שׂ'), + (0xFB2C, 'M', u'שּׁ'), + (0xFB2D, 'M', u'שּׂ'), + (0xFB2E, 'M', u'אַ'), + (0xFB2F, 'M', u'אָ'), + (0xFB30, 'M', u'אּ'), + (0xFB31, 'M', u'בּ'), + (0xFB32, 'M', u'גּ'), + (0xFB33, 'M', u'דּ'), + (0xFB34, 'M', u'הּ'), + (0xFB35, 'M', u'וּ'), + (0xFB36, 'M', u'זּ'), + (0xFB37, 'X'), + (0xFB38, 'M', u'טּ'), + (0xFB39, 'M', u'יּ'), + (0xFB3A, 'M', u'ךּ'), + (0xFB3B, 'M', u'כּ'), + (0xFB3C, 'M', u'לּ'), + (0xFB3D, 'X'), + (0xFB3E, 'M', u'מּ'), + (0xFB3F, 'X'), + (0xFB40, 'M', u'נּ'), + (0xFB41, 'M', u'סּ'), + (0xFB42, 'X'), + (0xFB43, 'M', u'ףּ'), + (0xFB44, 'M', u'פּ'), + (0xFB45, 'X'), + (0xFB46, 'M', u'צּ'), + (0xFB47, 'M', u'קּ'), + (0xFB48, 'M', u'רּ'), + (0xFB49, 'M', u'שּ'), + (0xFB4A, 'M', u'תּ'), + (0xFB4B, 'M', u'וֹ'), + (0xFB4C, 'M', u'בֿ'), + (0xFB4D, 'M', u'כֿ'), + (0xFB4E, 'M', u'פֿ'), + (0xFB4F, 'M', u'אל'), + (0xFB50, 'M', u'ٱ'), + (0xFB52, 'M', u'ٻ'), + (0xFB56, 'M', u'پ'), + (0xFB5A, 'M', u'ڀ'), + (0xFB5E, 'M', u'ٺ'), + (0xFB62, 'M', u'ٿ'), + (0xFB66, 'M', u'ٹ'), + (0xFB6A, 'M', u'ڤ'), + (0xFB6E, 'M', u'ڦ'), + (0xFB72, 'M', u'ڄ'), + (0xFB76, 'M', u'ڃ'), + (0xFB7A, 'M', u'چ'), + (0xFB7E, 'M', u'ڇ'), + (0xFB82, 'M', u'ڍ'), + (0xFB84, 'M', u'ڌ'), + (0xFB86, 'M', u'ڎ'), + (0xFB88, 'M', u'ڈ'), + (0xFB8A, 'M', u'ژ'), + (0xFB8C, 'M', u'ڑ'), + (0xFB8E, 'M', u'ک'), + (0xFB92, 'M', u'گ'), + (0xFB96, 'M', u'ڳ'), + (0xFB9A, 'M', u'ڱ'), + (0xFB9E, 'M', u'ں'), + (0xFBA0, 'M', u'ڻ'), + (0xFBA4, 'M', u'ۀ'), + (0xFBA6, 'M', u'ہ'), + (0xFBAA, 'M', u'ھ'), + (0xFBAE, 'M', u'ے'), + (0xFBB0, 'M', u'ۓ'), + (0xFBB2, 'V'), + (0xFBC2, 'X'), + (0xFBD3, 'M', u'ڭ'), + (0xFBD7, 'M', u'ۇ'), + (0xFBD9, 'M', u'ۆ'), + (0xFBDB, 'M', u'ۈ'), + (0xFBDD, 'M', u'ۇٴ'), + (0xFBDE, 'M', u'ۋ'), + (0xFBE0, 'M', u'ۅ'), + (0xFBE2, 'M', u'ۉ'), + (0xFBE4, 'M', u'ې'), + (0xFBE8, 'M', u'ى'), + (0xFBEA, 'M', u'ئا'), + (0xFBEC, 'M', u'ئە'), + (0xFBEE, 'M', u'ئو'), + (0xFBF0, 'M', u'ئۇ'), + (0xFBF2, 'M', u'ئۆ'), + (0xFBF4, 'M', u'ئۈ'), + (0xFBF6, 'M', u'ئې'), + (0xFBF9, 'M', u'ئى'), + (0xFBFC, 'M', u'ی'), + (0xFC00, 'M', u'ئج'), + (0xFC01, 'M', u'ئح'), + (0xFC02, 'M', u'ئم'), + (0xFC03, 'M', u'ئى'), + (0xFC04, 'M', u'ئي'), + (0xFC05, 'M', u'بج'), + (0xFC06, 'M', u'بح'), + (0xFC07, 'M', u'بخ'), + (0xFC08, 'M', u'بم'), + (0xFC09, 'M', u'بى'), + (0xFC0A, 'M', u'بي'), + (0xFC0B, 'M', u'تج'), + (0xFC0C, 'M', u'تح'), + (0xFC0D, 'M', u'تخ'), + (0xFC0E, 'M', u'تم'), + (0xFC0F, 'M', u'تى'), + (0xFC10, 'M', u'تي'), + (0xFC11, 'M', u'ثج'), + (0xFC12, 'M', u'ثم'), + (0xFC13, 'M', u'ثى'), + (0xFC14, 'M', u'ثي'), + (0xFC15, 'M', u'جح'), + (0xFC16, 'M', u'جم'), + (0xFC17, 'M', u'حج'), + (0xFC18, 'M', u'حم'), + (0xFC19, 'M', u'خج'), + (0xFC1A, 'M', u'خح'), + (0xFC1B, 'M', u'خم'), + (0xFC1C, 'M', u'سج'), + (0xFC1D, 'M', u'سح'), + (0xFC1E, 'M', u'سخ'), + (0xFC1F, 'M', u'سم'), + (0xFC20, 'M', u'صح'), + (0xFC21, 'M', u'صم'), + (0xFC22, 'M', u'ضج'), + (0xFC23, 'M', u'ضح'), + (0xFC24, 'M', u'ضخ'), + (0xFC25, 'M', u'ضم'), + (0xFC26, 'M', u'طح'), + (0xFC27, 'M', u'طم'), + (0xFC28, 'M', u'ظم'), + (0xFC29, 'M', u'عج'), + (0xFC2A, 'M', u'عم'), + (0xFC2B, 'M', u'غج'), + (0xFC2C, 'M', u'غم'), + (0xFC2D, 'M', u'فج'), + (0xFC2E, 'M', u'فح'), + (0xFC2F, 'M', u'فخ'), + (0xFC30, 'M', u'فم'), + (0xFC31, 'M', u'فى'), + (0xFC32, 'M', u'في'), + (0xFC33, 'M', u'قح'), + (0xFC34, 'M', u'قم'), + (0xFC35, 'M', u'قى'), + (0xFC36, 'M', u'قي'), + (0xFC37, 'M', u'كا'), + (0xFC38, 'M', u'كج'), + (0xFC39, 'M', u'كح'), + (0xFC3A, 'M', u'كخ'), + (0xFC3B, 'M', u'كل'), + (0xFC3C, 'M', u'كم'), + (0xFC3D, 'M', u'كى'), + (0xFC3E, 'M', u'كي'), + (0xFC3F, 'M', u'لج'), + (0xFC40, 'M', u'لح'), + (0xFC41, 'M', u'لخ'), + (0xFC42, 'M', u'لم'), + (0xFC43, 'M', u'لى'), + (0xFC44, 'M', u'لي'), + (0xFC45, 'M', u'مج'), + (0xFC46, 'M', u'مح'), + (0xFC47, 'M', u'مخ'), + (0xFC48, 'M', u'مم'), + (0xFC49, 'M', u'مى'), + (0xFC4A, 'M', u'مي'), + (0xFC4B, 'M', u'نج'), + (0xFC4C, 'M', u'نح'), + (0xFC4D, 'M', u'نخ'), + (0xFC4E, 'M', u'نم'), + (0xFC4F, 'M', u'نى'), + (0xFC50, 'M', u'ني'), + (0xFC51, 'M', u'هج'), + (0xFC52, 'M', u'هم'), + (0xFC53, 'M', u'هى'), + (0xFC54, 'M', u'هي'), + (0xFC55, 'M', u'يج'), + (0xFC56, 'M', u'يح'), + (0xFC57, 'M', u'يخ'), + (0xFC58, 'M', u'يم'), + (0xFC59, 'M', u'يى'), + (0xFC5A, 'M', u'يي'), + (0xFC5B, 'M', u'ذٰ'), + (0xFC5C, 'M', u'رٰ'), + (0xFC5D, 'M', u'ىٰ'), + (0xFC5E, '3', u' ٌّ'), + (0xFC5F, '3', u' ٍّ'), + (0xFC60, '3', u' َّ'), + (0xFC61, '3', u' ُّ'), + (0xFC62, '3', u' ِّ'), + (0xFC63, '3', u' ّٰ'), + (0xFC64, 'M', u'ئر'), + (0xFC65, 'M', u'ئز'), + (0xFC66, 'M', u'ئم'), + (0xFC67, 'M', u'ئن'), + (0xFC68, 'M', u'ئى'), + (0xFC69, 'M', u'ئي'), + (0xFC6A, 'M', u'بر'), + (0xFC6B, 'M', u'بز'), + (0xFC6C, 'M', u'بم'), + (0xFC6D, 'M', u'بن'), + (0xFC6E, 'M', u'بى'), + (0xFC6F, 'M', u'بي'), + (0xFC70, 'M', u'تر'), + (0xFC71, 'M', u'تز'), + (0xFC72, 'M', u'تم'), + (0xFC73, 'M', u'تن'), + (0xFC74, 'M', u'تى'), + (0xFC75, 'M', u'تي'), + (0xFC76, 'M', u'ثر'), + (0xFC77, 'M', u'ثز'), + (0xFC78, 'M', u'ثم'), + (0xFC79, 'M', u'ثن'), + (0xFC7A, 'M', u'ثى'), + (0xFC7B, 'M', u'ثي'), + (0xFC7C, 'M', u'فى'), + (0xFC7D, 'M', u'في'), + (0xFC7E, 'M', u'قى'), + (0xFC7F, 'M', u'قي'), + (0xFC80, 'M', u'كا'), + (0xFC81, 'M', u'كل'), + (0xFC82, 'M', u'كم'), + (0xFC83, 'M', u'كى'), + (0xFC84, 'M', u'كي'), + (0xFC85, 'M', u'لم'), + (0xFC86, 'M', u'لى'), + (0xFC87, 'M', u'لي'), + (0xFC88, 'M', u'ما'), + (0xFC89, 'M', u'مم'), + (0xFC8A, 'M', u'نر'), + (0xFC8B, 'M', u'نز'), + (0xFC8C, 'M', u'نم'), + (0xFC8D, 'M', u'نن'), + (0xFC8E, 'M', u'نى'), + (0xFC8F, 'M', u'ني'), + (0xFC90, 'M', u'ىٰ'), + (0xFC91, 'M', u'ير'), + (0xFC92, 'M', u'يز'), + (0xFC93, 'M', u'يم'), + (0xFC94, 'M', u'ين'), + (0xFC95, 'M', u'يى'), + (0xFC96, 'M', u'يي'), + (0xFC97, 'M', u'ئج'), + (0xFC98, 'M', u'ئح'), + (0xFC99, 'M', u'ئخ'), + (0xFC9A, 'M', u'ئم'), + (0xFC9B, 'M', u'ئه'), + (0xFC9C, 'M', u'بج'), + (0xFC9D, 'M', u'بح'), + (0xFC9E, 'M', u'بخ'), + (0xFC9F, 'M', u'بم'), + (0xFCA0, 'M', u'به'), + (0xFCA1, 'M', u'تج'), + (0xFCA2, 'M', u'تح'), + (0xFCA3, 'M', u'تخ'), + (0xFCA4, 'M', u'تم'), + (0xFCA5, 'M', u'ته'), + (0xFCA6, 'M', u'ثم'), + (0xFCA7, 'M', u'جح'), + (0xFCA8, 'M', u'جم'), + (0xFCA9, 'M', u'حج'), + (0xFCAA, 'M', u'حم'), + (0xFCAB, 'M', u'خج'), + (0xFCAC, 'M', u'خم'), + (0xFCAD, 'M', u'سج'), + (0xFCAE, 'M', u'سح'), + (0xFCAF, 'M', u'سخ'), + (0xFCB0, 'M', u'سم'), + (0xFCB1, 'M', u'صح'), + (0xFCB2, 'M', u'صخ'), + (0xFCB3, 'M', u'صم'), + (0xFCB4, 'M', u'ضج'), + (0xFCB5, 'M', u'ضح'), + (0xFCB6, 'M', u'ضخ'), + (0xFCB7, 'M', u'ضم'), + (0xFCB8, 'M', u'طح'), + (0xFCB9, 'M', u'ظم'), + (0xFCBA, 'M', u'عج'), + (0xFCBB, 'M', u'عم'), + (0xFCBC, 'M', u'غج'), + (0xFCBD, 'M', u'غم'), + (0xFCBE, 'M', u'فج'), + (0xFCBF, 'M', u'فح'), + (0xFCC0, 'M', u'فخ'), + (0xFCC1, 'M', u'فم'), + (0xFCC2, 'M', u'قح'), + (0xFCC3, 'M', u'قم'), + (0xFCC4, 'M', u'كج'), + (0xFCC5, 'M', u'كح'), + (0xFCC6, 'M', u'كخ'), + (0xFCC7, 'M', u'كل'), + (0xFCC8, 'M', u'كم'), + (0xFCC9, 'M', u'لج'), + (0xFCCA, 'M', u'لح'), + (0xFCCB, 'M', u'لخ'), + (0xFCCC, 'M', u'لم'), + (0xFCCD, 'M', u'له'), + (0xFCCE, 'M', u'مج'), + (0xFCCF, 'M', u'مح'), + (0xFCD0, 'M', u'مخ'), + (0xFCD1, 'M', u'مم'), + (0xFCD2, 'M', u'نج'), + (0xFCD3, 'M', u'نح'), + (0xFCD4, 'M', u'نخ'), + (0xFCD5, 'M', u'نم'), + (0xFCD6, 'M', u'نه'), + (0xFCD7, 'M', u'هج'), + (0xFCD8, 'M', u'هم'), + (0xFCD9, 'M', u'هٰ'), + (0xFCDA, 'M', u'يج'), + (0xFCDB, 'M', u'يح'), + (0xFCDC, 'M', u'يخ'), + (0xFCDD, 'M', u'يم'), + (0xFCDE, 'M', u'يه'), + (0xFCDF, 'M', u'ئم'), + (0xFCE0, 'M', u'ئه'), + (0xFCE1, 'M', u'بم'), + (0xFCE2, 'M', u'به'), + (0xFCE3, 'M', u'تم'), + (0xFCE4, 'M', u'ته'), + (0xFCE5, 'M', u'ثم'), + (0xFCE6, 'M', u'ثه'), + (0xFCE7, 'M', u'سم'), + (0xFCE8, 'M', u'سه'), + (0xFCE9, 'M', u'شم'), + (0xFCEA, 'M', u'شه'), + (0xFCEB, 'M', u'كل'), + (0xFCEC, 'M', u'كم'), + (0xFCED, 'M', u'لم'), + (0xFCEE, 'M', u'نم'), + (0xFCEF, 'M', u'نه'), + (0xFCF0, 'M', u'يم'), + (0xFCF1, 'M', u'يه'), + (0xFCF2, 'M', u'ـَّ'), + (0xFCF3, 'M', u'ـُّ'), + (0xFCF4, 'M', u'ـِّ'), + (0xFCF5, 'M', u'طى'), + (0xFCF6, 'M', u'طي'), + (0xFCF7, 'M', u'عى'), + (0xFCF8, 'M', u'عي'), + (0xFCF9, 'M', u'غى'), + (0xFCFA, 'M', u'غي'), + (0xFCFB, 'M', u'سى'), + (0xFCFC, 'M', u'سي'), + (0xFCFD, 'M', u'شى'), + (0xFCFE, 'M', u'شي'), + (0xFCFF, 'M', u'حى'), + (0xFD00, 'M', u'حي'), + (0xFD01, 'M', u'جى'), + (0xFD02, 'M', u'جي'), + (0xFD03, 'M', u'خى'), + (0xFD04, 'M', u'خي'), + (0xFD05, 'M', u'صى'), + (0xFD06, 'M', u'صي'), + (0xFD07, 'M', u'ضى'), + (0xFD08, 'M', u'ضي'), + (0xFD09, 'M', u'شج'), + (0xFD0A, 'M', u'شح'), + (0xFD0B, 'M', u'شخ'), + (0xFD0C, 'M', u'شم'), + (0xFD0D, 'M', u'شر'), + (0xFD0E, 'M', u'سر'), + (0xFD0F, 'M', u'صر'), + (0xFD10, 'M', u'ضر'), + (0xFD11, 'M', u'طى'), + (0xFD12, 'M', u'طي'), + (0xFD13, 'M', u'عى'), + (0xFD14, 'M', u'عي'), + (0xFD15, 'M', u'غى'), + (0xFD16, 'M', u'غي'), + (0xFD17, 'M', u'سى'), + (0xFD18, 'M', u'سي'), + (0xFD19, 'M', u'شى'), + (0xFD1A, 'M', u'شي'), + (0xFD1B, 'M', u'حى'), + (0xFD1C, 'M', u'حي'), + (0xFD1D, 'M', u'جى'), + (0xFD1E, 'M', u'جي'), + (0xFD1F, 'M', u'خى'), + (0xFD20, 'M', u'خي'), + (0xFD21, 'M', u'صى'), + (0xFD22, 'M', u'صي'), + (0xFD23, 'M', u'ضى'), + (0xFD24, 'M', u'ضي'), + (0xFD25, 'M', u'شج'), + (0xFD26, 'M', u'شح'), + (0xFD27, 'M', u'شخ'), + (0xFD28, 'M', u'شم'), + (0xFD29, 'M', u'شر'), + (0xFD2A, 'M', u'سر'), + (0xFD2B, 'M', u'صر'), + (0xFD2C, 'M', u'ضر'), + (0xFD2D, 'M', u'شج'), + (0xFD2E, 'M', u'شح'), + (0xFD2F, 'M', u'شخ'), + (0xFD30, 'M', u'شم'), + (0xFD31, 'M', u'سه'), + (0xFD32, 'M', u'شه'), + (0xFD33, 'M', u'طم'), + (0xFD34, 'M', u'سج'), + (0xFD35, 'M', u'سح'), + (0xFD36, 'M', u'سخ'), + (0xFD37, 'M', u'شج'), + (0xFD38, 'M', u'شح'), + (0xFD39, 'M', u'شخ'), + (0xFD3A, 'M', u'طم'), + (0xFD3B, 'M', u'ظم'), + (0xFD3C, 'M', u'اً'), + (0xFD3E, 'V'), + (0xFD40, 'X'), + (0xFD50, 'M', u'تجم'), + (0xFD51, 'M', u'تحج'), + (0xFD53, 'M', u'تحم'), + (0xFD54, 'M', u'تخم'), + (0xFD55, 'M', u'تمج'), + (0xFD56, 'M', u'تمح'), + (0xFD57, 'M', u'تمخ'), + (0xFD58, 'M', u'جمح'), + (0xFD5A, 'M', u'حمي'), + (0xFD5B, 'M', u'حمى'), + (0xFD5C, 'M', u'سحج'), + (0xFD5D, 'M', u'سجح'), + (0xFD5E, 'M', u'سجى'), + (0xFD5F, 'M', u'سمح'), + (0xFD61, 'M', u'سمج'), + (0xFD62, 'M', u'سمم'), + (0xFD64, 'M', u'صحح'), + (0xFD66, 'M', u'صمم'), + (0xFD67, 'M', u'شحم'), + (0xFD69, 'M', u'شجي'), + (0xFD6A, 'M', u'شمخ'), + (0xFD6C, 'M', u'شمم'), + (0xFD6E, 'M', u'ضحى'), + (0xFD6F, 'M', u'ضخم'), + (0xFD71, 'M', u'طمح'), + (0xFD73, 'M', u'طمم'), + (0xFD74, 'M', u'طمي'), + (0xFD75, 'M', u'عجم'), + (0xFD76, 'M', u'عمم'), + (0xFD78, 'M', u'عمى'), + (0xFD79, 'M', u'غمم'), + (0xFD7A, 'M', u'غمي'), + (0xFD7B, 'M', u'غمى'), + (0xFD7C, 'M', u'فخم'), + (0xFD7E, 'M', u'قمح'), + (0xFD7F, 'M', u'قمم'), + (0xFD80, 'M', u'لحم'), + (0xFD81, 'M', u'لحي'), + (0xFD82, 'M', u'لحى'), + (0xFD83, 'M', u'لجج'), + (0xFD85, 'M', u'لخم'), + (0xFD87, 'M', u'لمح'), + (0xFD89, 'M', u'محج'), + (0xFD8A, 'M', u'محم'), + (0xFD8B, 'M', u'محي'), + (0xFD8C, 'M', u'مجح'), + (0xFD8D, 'M', u'مجم'), + (0xFD8E, 'M', u'مخج'), + (0xFD8F, 'M', u'مخم'), + (0xFD90, 'X'), + (0xFD92, 'M', u'مجخ'), + (0xFD93, 'M', u'همج'), + (0xFD94, 'M', u'همم'), + (0xFD95, 'M', u'نحم'), + (0xFD96, 'M', u'نحى'), + (0xFD97, 'M', u'نجم'), + (0xFD99, 'M', u'نجى'), + (0xFD9A, 'M', u'نمي'), + (0xFD9B, 'M', u'نمى'), + (0xFD9C, 'M', u'يمم'), + (0xFD9E, 'M', u'بخي'), + (0xFD9F, 'M', u'تجي'), + (0xFDA0, 'M', u'تجى'), + (0xFDA1, 'M', u'تخي'), + (0xFDA2, 'M', u'تخى'), + (0xFDA3, 'M', u'تمي'), + (0xFDA4, 'M', u'تمى'), + (0xFDA5, 'M', u'جمي'), + (0xFDA6, 'M', u'جحى'), + (0xFDA7, 'M', u'جمى'), + (0xFDA8, 'M', u'سخى'), + (0xFDA9, 'M', u'صحي'), + (0xFDAA, 'M', u'شحي'), + (0xFDAB, 'M', u'ضحي'), + (0xFDAC, 'M', u'لجي'), + (0xFDAD, 'M', u'لمي'), + (0xFDAE, 'M', u'يحي'), + (0xFDAF, 'M', u'يجي'), + (0xFDB0, 'M', u'يمي'), + (0xFDB1, 'M', u'ممي'), + (0xFDB2, 'M', u'قمي'), + (0xFDB3, 'M', u'نحي'), + (0xFDB4, 'M', u'قمح'), + (0xFDB5, 'M', u'لحم'), + (0xFDB6, 'M', u'عمي'), + (0xFDB7, 'M', u'كمي'), + (0xFDB8, 'M', u'نجح'), + (0xFDB9, 'M', u'مخي'), + (0xFDBA, 'M', u'لجم'), + (0xFDBB, 'M', u'كمم'), + (0xFDBC, 'M', u'لجم'), + (0xFDBD, 'M', u'نجح'), + (0xFDBE, 'M', u'جحي'), + (0xFDBF, 'M', u'حجي'), + (0xFDC0, 'M', u'مجي'), + (0xFDC1, 'M', u'فمي'), + (0xFDC2, 'M', u'بحي'), + (0xFDC3, 'M', u'كمم'), + (0xFDC4, 'M', u'عجم'), + (0xFDC5, 'M', u'صمم'), + (0xFDC6, 'M', u'سخي'), + (0xFDC7, 'M', u'نجي'), + (0xFDC8, 'X'), + (0xFDF0, 'M', u'صلے'), + (0xFDF1, 'M', u'قلے'), + (0xFDF2, 'M', u'الله'), + (0xFDF3, 'M', u'اكبر'), + (0xFDF4, 'M', u'محمد'), + (0xFDF5, 'M', u'صلعم'), + (0xFDF6, 'M', u'رسول'), + (0xFDF7, 'M', u'عليه'), + (0xFDF8, 'M', u'وسلم'), + (0xFDF9, 'M', u'صلى'), + (0xFDFA, '3', u'صلى الله عليه وسلم'), + (0xFDFB, '3', u'جل جلاله'), + (0xFDFC, 'M', u'ریال'), + (0xFDFD, 'V'), + (0xFDFE, 'X'), + (0xFE00, 'I'), + (0xFE10, '3', u','), + (0xFE11, 'M', u'、'), + (0xFE12, 'X'), + (0xFE13, '3', u':'), + (0xFE14, '3', u';'), + (0xFE15, '3', u'!'), + (0xFE16, '3', u'?'), + (0xFE17, 'M', u'〖'), + (0xFE18, 'M', u'〗'), + (0xFE19, 'X'), + (0xFE20, 'V'), + (0xFE27, 'X'), + (0xFE31, 'M', u'—'), + (0xFE32, 'M', u'–'), + (0xFE33, '3', u'_'), + (0xFE35, '3', u'('), + (0xFE36, '3', u')'), + (0xFE37, '3', u'{'), + (0xFE38, '3', u'}'), + (0xFE39, 'M', u'〔'), + (0xFE3A, 'M', u'〕'), + (0xFE3B, 'M', u'【'), + (0xFE3C, 'M', u'】'), + (0xFE3D, 'M', u'《'), + (0xFE3E, 'M', u'》'), + (0xFE3F, 'M', u'〈'), + (0xFE40, 'M', u'〉'), + (0xFE41, 'M', u'「'), + (0xFE42, 'M', u'」'), + (0xFE43, 'M', u'『'), + (0xFE44, 'M', u'』'), + (0xFE45, 'V'), + (0xFE47, '3', u'['), + (0xFE48, '3', u']'), + (0xFE49, '3', u' ̅'), + (0xFE4D, '3', u'_'), + (0xFE50, '3', u','), + (0xFE51, 'M', u'、'), + (0xFE52, 'X'), + (0xFE54, '3', u';'), + (0xFE55, '3', u':'), + (0xFE56, '3', u'?'), + (0xFE57, '3', u'!'), + (0xFE58, 'M', u'—'), + (0xFE59, '3', u'('), + (0xFE5A, '3', u')'), + (0xFE5B, '3', u'{'), + (0xFE5C, '3', u'}'), + (0xFE5D, 'M', u'〔'), + (0xFE5E, 'M', u'〕'), + (0xFE5F, '3', u'#'), + (0xFE60, '3', u'&'), + (0xFE61, '3', u'*'), + (0xFE62, '3', u'+'), + (0xFE63, 'M', u'-'), + (0xFE64, '3', u'<'), + (0xFE65, '3', u'>'), + (0xFE66, '3', u'='), + (0xFE67, 'X'), + (0xFE68, '3', u'\\'), + (0xFE69, '3', u'$'), + (0xFE6A, '3', u'%'), + (0xFE6B, '3', u'@'), + (0xFE6C, 'X'), + (0xFE70, '3', u' ً'), + (0xFE71, 'M', u'ـً'), + (0xFE72, '3', u' ٌ'), + (0xFE73, 'V'), + (0xFE74, '3', u' ٍ'), + (0xFE75, 'X'), + (0xFE76, '3', u' َ'), + (0xFE77, 'M', u'ـَ'), + (0xFE78, '3', u' ُ'), + (0xFE79, 'M', u'ـُ'), + (0xFE7A, '3', u' ِ'), + (0xFE7B, 'M', u'ـِ'), + (0xFE7C, '3', u' ّ'), + (0xFE7D, 'M', u'ـّ'), + (0xFE7E, '3', u' ْ'), + (0xFE7F, 'M', u'ـْ'), + (0xFE80, 'M', u'ء'), + (0xFE81, 'M', u'آ'), + (0xFE83, 'M', u'أ'), + (0xFE85, 'M', u'ؤ'), + (0xFE87, 'M', u'إ'), + (0xFE89, 'M', u'ئ'), + (0xFE8D, 'M', u'ا'), + (0xFE8F, 'M', u'ب'), + (0xFE93, 'M', u'ة'), + (0xFE95, 'M', u'ت'), + (0xFE99, 'M', u'ث'), + (0xFE9D, 'M', u'ج'), + (0xFEA1, 'M', u'ح'), + (0xFEA5, 'M', u'خ'), + (0xFEA9, 'M', u'د'), + (0xFEAB, 'M', u'ذ'), + (0xFEAD, 'M', u'ر'), + (0xFEAF, 'M', u'ز'), + (0xFEB1, 'M', u'س'), + (0xFEB5, 'M', u'ش'), + (0xFEB9, 'M', u'ص'), + (0xFEBD, 'M', u'ض'), + (0xFEC1, 'M', u'ط'), + (0xFEC5, 'M', u'ظ'), + (0xFEC9, 'M', u'ع'), + (0xFECD, 'M', u'غ'), + (0xFED1, 'M', u'ف'), + (0xFED5, 'M', u'ق'), + (0xFED9, 'M', u'ك'), + (0xFEDD, 'M', u'ل'), + (0xFEE1, 'M', u'م'), + (0xFEE5, 'M', u'ن'), + (0xFEE9, 'M', u'ه'), + (0xFEED, 'M', u'و'), + (0xFEEF, 'M', u'ى'), + (0xFEF1, 'M', u'ي'), + (0xFEF5, 'M', u'لآ'), + (0xFEF7, 'M', u'لأ'), + (0xFEF9, 'M', u'لإ'), + (0xFEFB, 'M', u'لا'), + (0xFEFD, 'X'), + (0xFEFF, 'I'), + (0xFF00, 'X'), + (0xFF01, '3', u'!'), + (0xFF02, '3', u'"'), + (0xFF03, '3', u'#'), + (0xFF04, '3', u'$'), + (0xFF05, '3', u'%'), + (0xFF06, '3', u'&'), + (0xFF07, '3', u'\''), + (0xFF08, '3', u'('), + (0xFF09, '3', u')'), + (0xFF0A, '3', u'*'), + (0xFF0B, '3', u'+'), + (0xFF0C, '3', u','), + (0xFF0D, 'M', u'-'), + (0xFF0E, 'M', u'.'), + (0xFF0F, '3', u'/'), + (0xFF10, 'M', u'0'), + (0xFF11, 'M', u'1'), + (0xFF12, 'M', u'2'), + (0xFF13, 'M', u'3'), + (0xFF14, 'M', u'4'), + (0xFF15, 'M', u'5'), + (0xFF16, 'M', u'6'), + (0xFF17, 'M', u'7'), + (0xFF18, 'M', u'8'), + (0xFF19, 'M', u'9'), + (0xFF1A, '3', u':'), + (0xFF1B, '3', u';'), + (0xFF1C, '3', u'<'), + (0xFF1D, '3', u'='), + (0xFF1E, '3', u'>'), + (0xFF1F, '3', u'?'), + (0xFF20, '3', u'@'), + (0xFF21, 'M', u'a'), + (0xFF22, 'M', u'b'), + (0xFF23, 'M', u'c'), + (0xFF24, 'M', u'd'), + (0xFF25, 'M', u'e'), + (0xFF26, 'M', u'f'), + (0xFF27, 'M', u'g'), + (0xFF28, 'M', u'h'), + (0xFF29, 'M', u'i'), + (0xFF2A, 'M', u'j'), + (0xFF2B, 'M', u'k'), + (0xFF2C, 'M', u'l'), + (0xFF2D, 'M', u'm'), + (0xFF2E, 'M', u'n'), + (0xFF2F, 'M', u'o'), + (0xFF30, 'M', u'p'), + (0xFF31, 'M', u'q'), + (0xFF32, 'M', u'r'), + (0xFF33, 'M', u's'), + (0xFF34, 'M', u't'), + (0xFF35, 'M', u'u'), + (0xFF36, 'M', u'v'), + (0xFF37, 'M', u'w'), + (0xFF38, 'M', u'x'), + (0xFF39, 'M', u'y'), + (0xFF3A, 'M', u'z'), + (0xFF3B, '3', u'['), + (0xFF3C, '3', u'\\'), + (0xFF3D, '3', u']'), + (0xFF3E, '3', u'^'), + (0xFF3F, '3', u'_'), + (0xFF40, '3', u'`'), + (0xFF41, 'M', u'a'), + (0xFF42, 'M', u'b'), + (0xFF43, 'M', u'c'), + (0xFF44, 'M', u'd'), + (0xFF45, 'M', u'e'), + (0xFF46, 'M', u'f'), + (0xFF47, 'M', u'g'), + (0xFF48, 'M', u'h'), + (0xFF49, 'M', u'i'), + (0xFF4A, 'M', u'j'), + (0xFF4B, 'M', u'k'), + (0xFF4C, 'M', u'l'), + (0xFF4D, 'M', u'm'), + (0xFF4E, 'M', u'n'), + (0xFF4F, 'M', u'o'), + (0xFF50, 'M', u'p'), + (0xFF51, 'M', u'q'), + (0xFF52, 'M', u'r'), + (0xFF53, 'M', u's'), + (0xFF54, 'M', u't'), + (0xFF55, 'M', u'u'), + (0xFF56, 'M', u'v'), + (0xFF57, 'M', u'w'), + (0xFF58, 'M', u'x'), + (0xFF59, 'M', u'y'), + (0xFF5A, 'M', u'z'), + (0xFF5B, '3', u'{'), + (0xFF5C, '3', u'|'), + (0xFF5D, '3', u'}'), + (0xFF5E, '3', u'~'), + (0xFF5F, 'M', u'⦅'), + (0xFF60, 'M', u'⦆'), + (0xFF61, 'M', u'.'), + (0xFF62, 'M', u'「'), + (0xFF63, 'M', u'」'), + (0xFF64, 'M', u'、'), + (0xFF65, 'M', u'・'), + (0xFF66, 'M', u'ヲ'), + (0xFF67, 'M', u'ァ'), + (0xFF68, 'M', u'ィ'), + (0xFF69, 'M', u'ゥ'), + (0xFF6A, 'M', u'ェ'), + (0xFF6B, 'M', u'ォ'), + (0xFF6C, 'M', u'ャ'), + (0xFF6D, 'M', u'ュ'), + (0xFF6E, 'M', u'ョ'), + (0xFF6F, 'M', u'ッ'), + (0xFF70, 'M', u'ー'), + (0xFF71, 'M', u'ア'), + (0xFF72, 'M', u'イ'), + (0xFF73, 'M', u'ウ'), + (0xFF74, 'M', u'エ'), + (0xFF75, 'M', u'オ'), + (0xFF76, 'M', u'カ'), + (0xFF77, 'M', u'キ'), + (0xFF78, 'M', u'ク'), + (0xFF79, 'M', u'ケ'), + (0xFF7A, 'M', u'コ'), + (0xFF7B, 'M', u'サ'), + (0xFF7C, 'M', u'シ'), + (0xFF7D, 'M', u'ス'), + (0xFF7E, 'M', u'セ'), + (0xFF7F, 'M', u'ソ'), + (0xFF80, 'M', u'タ'), + (0xFF81, 'M', u'チ'), + (0xFF82, 'M', u'ツ'), + (0xFF83, 'M', u'テ'), + (0xFF84, 'M', u'ト'), + (0xFF85, 'M', u'ナ'), + (0xFF86, 'M', u'ニ'), + (0xFF87, 'M', u'ヌ'), + (0xFF88, 'M', u'ネ'), + (0xFF89, 'M', u'ノ'), + (0xFF8A, 'M', u'ハ'), + (0xFF8B, 'M', u'ヒ'), + (0xFF8C, 'M', u'フ'), + (0xFF8D, 'M', u'ヘ'), + (0xFF8E, 'M', u'ホ'), + (0xFF8F, 'M', u'マ'), + (0xFF90, 'M', u'ミ'), + (0xFF91, 'M', u'ム'), + (0xFF92, 'M', u'メ'), + (0xFF93, 'M', u'モ'), + (0xFF94, 'M', u'ヤ'), + (0xFF95, 'M', u'ユ'), + (0xFF96, 'M', u'ヨ'), + (0xFF97, 'M', u'ラ'), + (0xFF98, 'M', u'リ'), + (0xFF99, 'M', u'ル'), + (0xFF9A, 'M', u'レ'), + (0xFF9B, 'M', u'ロ'), + (0xFF9C, 'M', u'ワ'), + (0xFF9D, 'M', u'ン'), + (0xFF9E, 'M', u'゙'), + (0xFF9F, 'M', u'゚'), + (0xFFA0, 'X'), + (0xFFA1, 'M', u'ᄀ'), + (0xFFA2, 'M', u'ᄁ'), + (0xFFA3, 'M', u'ᆪ'), + (0xFFA4, 'M', u'ᄂ'), + (0xFFA5, 'M', u'ᆬ'), + (0xFFA6, 'M', u'ᆭ'), + (0xFFA7, 'M', u'ᄃ'), + (0xFFA8, 'M', u'ᄄ'), + (0xFFA9, 'M', u'ᄅ'), + (0xFFAA, 'M', u'ᆰ'), + (0xFFAB, 'M', u'ᆱ'), + (0xFFAC, 'M', u'ᆲ'), + (0xFFAD, 'M', u'ᆳ'), + (0xFFAE, 'M', u'ᆴ'), + (0xFFAF, 'M', u'ᆵ'), + (0xFFB0, 'M', u'ᄚ'), + (0xFFB1, 'M', u'ᄆ'), + (0xFFB2, 'M', u'ᄇ'), + (0xFFB3, 'M', u'ᄈ'), + (0xFFB4, 'M', u'ᄡ'), + (0xFFB5, 'M', u'ᄉ'), + (0xFFB6, 'M', u'ᄊ'), + (0xFFB7, 'M', u'ᄋ'), + (0xFFB8, 'M', u'ᄌ'), + (0xFFB9, 'M', u'ᄍ'), + (0xFFBA, 'M', u'ᄎ'), + (0xFFBB, 'M', u'ᄏ'), + (0xFFBC, 'M', u'ᄐ'), + (0xFFBD, 'M', u'ᄑ'), + (0xFFBE, 'M', u'ᄒ'), + (0xFFBF, 'X'), + (0xFFC2, 'M', u'ᅡ'), + (0xFFC3, 'M', u'ᅢ'), + (0xFFC4, 'M', u'ᅣ'), + (0xFFC5, 'M', u'ᅤ'), + (0xFFC6, 'M', u'ᅥ'), + (0xFFC7, 'M', u'ᅦ'), + (0xFFC8, 'X'), + (0xFFCA, 'M', u'ᅧ'), + (0xFFCB, 'M', u'ᅨ'), + (0xFFCC, 'M', u'ᅩ'), + (0xFFCD, 'M', u'ᅪ'), + (0xFFCE, 'M', u'ᅫ'), + (0xFFCF, 'M', u'ᅬ'), + (0xFFD0, 'X'), + (0xFFD2, 'M', u'ᅭ'), + (0xFFD3, 'M', u'ᅮ'), + (0xFFD4, 'M', u'ᅯ'), + (0xFFD5, 'M', u'ᅰ'), + (0xFFD6, 'M', u'ᅱ'), + (0xFFD7, 'M', u'ᅲ'), + (0xFFD8, 'X'), + (0xFFDA, 'M', u'ᅳ'), + (0xFFDB, 'M', u'ᅴ'), + (0xFFDC, 'M', u'ᅵ'), + (0xFFDD, 'X'), + (0xFFE0, 'M', u'¢'), + (0xFFE1, 'M', u'£'), + (0xFFE2, 'M', u'¬'), + (0xFFE3, '3', u' ̄'), + (0xFFE4, 'M', u'¦'), + (0xFFE5, 'M', u'¥'), + (0xFFE6, 'M', u'₩'), + (0xFFE7, 'X'), + (0xFFE8, 'M', u'│'), + (0xFFE9, 'M', u'←'), + (0xFFEA, 'M', u'↑'), + (0xFFEB, 'M', u'→'), + (0xFFEC, 'M', u'↓'), + (0xFFED, 'M', u'■'), + (0xFFEE, 'M', u'○'), + (0xFFEF, 'X'), + (0x10000, 'V'), + (0x1000C, 'X'), + (0x1000D, 'V'), + (0x10027, 'X'), + (0x10028, 'V'), + (0x1003B, 'X'), + (0x1003C, 'V'), + (0x1003E, 'X'), + (0x1003F, 'V'), + (0x1004E, 'X'), + (0x10050, 'V'), + (0x1005E, 'X'), + (0x10080, 'V'), + (0x100FB, 'X'), + (0x10100, 'V'), + (0x10103, 'X'), + (0x10107, 'V'), + (0x10134, 'X'), + (0x10137, 'V'), + (0x1018B, 'X'), + (0x10190, 'V'), + (0x1019C, 'X'), + (0x101D0, 'V'), + (0x101FE, 'X'), + (0x10280, 'V'), + (0x1029D, 'X'), + (0x102A0, 'V'), + (0x102D1, 'X'), + (0x10300, 'V'), + (0x1031F, 'X'), + (0x10320, 'V'), + (0x10324, 'X'), + (0x10330, 'V'), + (0x1034B, 'X'), + (0x10380, 'V'), + (0x1039E, 'X'), + (0x1039F, 'V'), + (0x103C4, 'X'), + (0x103C8, 'V'), + (0x103D6, 'X'), + (0x10400, 'M', u'𐐨'), + (0x10401, 'M', u'𐐩'), + (0x10402, 'M', u'𐐪'), + (0x10403, 'M', u'𐐫'), + (0x10404, 'M', u'𐐬'), + (0x10405, 'M', u'𐐭'), + (0x10406, 'M', u'𐐮'), + (0x10407, 'M', u'𐐯'), + (0x10408, 'M', u'𐐰'), + (0x10409, 'M', u'𐐱'), + (0x1040A, 'M', u'𐐲'), + (0x1040B, 'M', u'𐐳'), + (0x1040C, 'M', u'𐐴'), + (0x1040D, 'M', u'𐐵'), + (0x1040E, 'M', u'𐐶'), + (0x1040F, 'M', u'𐐷'), + (0x10410, 'M', u'𐐸'), + (0x10411, 'M', u'𐐹'), + (0x10412, 'M', u'𐐺'), + (0x10413, 'M', u'𐐻'), + (0x10414, 'M', u'𐐼'), + (0x10415, 'M', u'𐐽'), + (0x10416, 'M', u'𐐾'), + (0x10417, 'M', u'𐐿'), + (0x10418, 'M', u'𐑀'), + (0x10419, 'M', u'𐑁'), + (0x1041A, 'M', u'𐑂'), + (0x1041B, 'M', u'𐑃'), + (0x1041C, 'M', u'𐑄'), + (0x1041D, 'M', u'𐑅'), + (0x1041E, 'M', u'𐑆'), + (0x1041F, 'M', u'𐑇'), + (0x10420, 'M', u'𐑈'), + (0x10421, 'M', u'𐑉'), + (0x10422, 'M', u'𐑊'), + (0x10423, 'M', u'𐑋'), + (0x10424, 'M', u'𐑌'), + (0x10425, 'M', u'𐑍'), + (0x10426, 'M', u'𐑎'), + (0x10427, 'M', u'𐑏'), + (0x10428, 'V'), + (0x1049E, 'X'), + (0x104A0, 'V'), + (0x104AA, 'X'), + (0x10800, 'V'), + (0x10806, 'X'), + (0x10808, 'V'), + (0x10809, 'X'), + (0x1080A, 'V'), + (0x10836, 'X'), + (0x10837, 'V'), + (0x10839, 'X'), + (0x1083C, 'V'), + (0x1083D, 'X'), + (0x1083F, 'V'), + (0x10856, 'X'), + (0x10857, 'V'), + (0x10860, 'X'), + (0x10900, 'V'), + (0x1091C, 'X'), + (0x1091F, 'V'), + (0x1093A, 'X'), + (0x1093F, 'V'), + (0x10940, 'X'), + (0x10980, 'V'), + (0x109B8, 'X'), + (0x109BE, 'V'), + (0x109C0, 'X'), + (0x10A00, 'V'), + (0x10A04, 'X'), + (0x10A05, 'V'), + (0x10A07, 'X'), + (0x10A0C, 'V'), + (0x10A14, 'X'), + (0x10A15, 'V'), + (0x10A18, 'X'), + (0x10A19, 'V'), + (0x10A34, 'X'), + (0x10A38, 'V'), + (0x10A3B, 'X'), + (0x10A3F, 'V'), + (0x10A48, 'X'), + (0x10A50, 'V'), + (0x10A59, 'X'), + (0x10A60, 'V'), + (0x10A80, 'X'), + (0x10B00, 'V'), + (0x10B36, 'X'), + (0x10B39, 'V'), + (0x10B56, 'X'), + (0x10B58, 'V'), + (0x10B73, 'X'), + (0x10B78, 'V'), + (0x10B80, 'X'), + (0x10C00, 'V'), + (0x10C49, 'X'), + (0x10E60, 'V'), + (0x10E7F, 'X'), + (0x11000, 'V'), + (0x1104E, 'X'), + (0x11052, 'V'), + (0x11070, 'X'), + (0x11080, 'V'), + (0x110BD, 'X'), + (0x110BE, 'V'), + (0x110C2, 'X'), + (0x110D0, 'V'), + (0x110E9, 'X'), + (0x110F0, 'V'), + (0x110FA, 'X'), + (0x11100, 'V'), + (0x11135, 'X'), + (0x11136, 'V'), + (0x11144, 'X'), + (0x11180, 'V'), + (0x111C9, 'X'), + (0x111D0, 'V'), + (0x111DA, 'X'), + (0x11680, 'V'), + (0x116B8, 'X'), + (0x116C0, 'V'), + (0x116CA, 'X'), + (0x12000, 'V'), + (0x1236F, 'X'), + (0x12400, 'V'), + (0x12463, 'X'), + (0x12470, 'V'), + (0x12474, 'X'), + (0x13000, 'V'), + (0x1342F, 'X'), + (0x16800, 'V'), + (0x16A39, 'X'), + (0x16F00, 'V'), + (0x16F45, 'X'), + (0x16F50, 'V'), + (0x16F7F, 'X'), + (0x16F8F, 'V'), + (0x16FA0, 'X'), + (0x1B000, 'V'), + (0x1B002, 'X'), + (0x1D000, 'V'), + (0x1D0F6, 'X'), + (0x1D100, 'V'), + (0x1D127, 'X'), + (0x1D129, 'V'), + (0x1D15E, 'M', u'𝅗𝅥'), + (0x1D15F, 'M', u'𝅘𝅥'), + (0x1D160, 'M', u'𝅘𝅥𝅮'), + (0x1D161, 'M', u'𝅘𝅥𝅯'), + (0x1D162, 'M', u'𝅘𝅥𝅰'), + (0x1D163, 'M', u'𝅘𝅥𝅱'), + (0x1D164, 'M', u'𝅘𝅥𝅲'), + (0x1D165, 'V'), + (0x1D173, 'X'), + (0x1D17B, 'V'), + (0x1D1BB, 'M', u'𝆹𝅥'), + (0x1D1BC, 'M', u'𝆺𝅥'), + (0x1D1BD, 'M', u'𝆹𝅥𝅮'), + (0x1D1BE, 'M', u'𝆺𝅥𝅮'), + (0x1D1BF, 'M', u'𝆹𝅥𝅯'), + (0x1D1C0, 'M', u'𝆺𝅥𝅯'), + (0x1D1C1, 'V'), + (0x1D1DE, 'X'), + (0x1D200, 'V'), + (0x1D246, 'X'), + (0x1D300, 'V'), + (0x1D357, 'X'), + (0x1D360, 'V'), + (0x1D372, 'X'), + (0x1D400, 'M', u'a'), + (0x1D401, 'M', u'b'), + (0x1D402, 'M', u'c'), + (0x1D403, 'M', u'd'), + (0x1D404, 'M', u'e'), + (0x1D405, 'M', u'f'), + (0x1D406, 'M', u'g'), + (0x1D407, 'M', u'h'), + (0x1D408, 'M', u'i'), + (0x1D409, 'M', u'j'), + (0x1D40A, 'M', u'k'), + (0x1D40B, 'M', u'l'), + (0x1D40C, 'M', u'm'), + (0x1D40D, 'M', u'n'), + (0x1D40E, 'M', u'o'), + (0x1D40F, 'M', u'p'), + (0x1D410, 'M', u'q'), + (0x1D411, 'M', u'r'), + (0x1D412, 'M', u's'), + (0x1D413, 'M', u't'), + (0x1D414, 'M', u'u'), + (0x1D415, 'M', u'v'), + (0x1D416, 'M', u'w'), + (0x1D417, 'M', u'x'), + (0x1D418, 'M', u'y'), + (0x1D419, 'M', u'z'), + (0x1D41A, 'M', u'a'), + (0x1D41B, 'M', u'b'), + (0x1D41C, 'M', u'c'), + (0x1D41D, 'M', u'd'), + (0x1D41E, 'M', u'e'), + (0x1D41F, 'M', u'f'), + (0x1D420, 'M', u'g'), + (0x1D421, 'M', u'h'), + (0x1D422, 'M', u'i'), + (0x1D423, 'M', u'j'), + (0x1D424, 'M', u'k'), + (0x1D425, 'M', u'l'), + (0x1D426, 'M', u'm'), + (0x1D427, 'M', u'n'), + (0x1D428, 'M', u'o'), + (0x1D429, 'M', u'p'), + (0x1D42A, 'M', u'q'), + (0x1D42B, 'M', u'r'), + (0x1D42C, 'M', u's'), + (0x1D42D, 'M', u't'), + (0x1D42E, 'M', u'u'), + (0x1D42F, 'M', u'v'), + (0x1D430, 'M', u'w'), + (0x1D431, 'M', u'x'), + (0x1D432, 'M', u'y'), + (0x1D433, 'M', u'z'), + (0x1D434, 'M', u'a'), + (0x1D435, 'M', u'b'), + (0x1D436, 'M', u'c'), + (0x1D437, 'M', u'd'), + (0x1D438, 'M', u'e'), + (0x1D439, 'M', u'f'), + (0x1D43A, 'M', u'g'), + (0x1D43B, 'M', u'h'), + (0x1D43C, 'M', u'i'), + (0x1D43D, 'M', u'j'), + (0x1D43E, 'M', u'k'), + (0x1D43F, 'M', u'l'), + (0x1D440, 'M', u'm'), + (0x1D441, 'M', u'n'), + (0x1D442, 'M', u'o'), + (0x1D443, 'M', u'p'), + (0x1D444, 'M', u'q'), + (0x1D445, 'M', u'r'), + (0x1D446, 'M', u's'), + (0x1D447, 'M', u't'), + (0x1D448, 'M', u'u'), + (0x1D449, 'M', u'v'), + (0x1D44A, 'M', u'w'), + (0x1D44B, 'M', u'x'), + (0x1D44C, 'M', u'y'), + (0x1D44D, 'M', u'z'), + (0x1D44E, 'M', u'a'), + (0x1D44F, 'M', u'b'), + (0x1D450, 'M', u'c'), + (0x1D451, 'M', u'd'), + (0x1D452, 'M', u'e'), + (0x1D453, 'M', u'f'), + (0x1D454, 'M', u'g'), + (0x1D455, 'X'), + (0x1D456, 'M', u'i'), + (0x1D457, 'M', u'j'), + (0x1D458, 'M', u'k'), + (0x1D459, 'M', u'l'), + (0x1D45A, 'M', u'm'), + (0x1D45B, 'M', u'n'), + (0x1D45C, 'M', u'o'), + (0x1D45D, 'M', u'p'), + (0x1D45E, 'M', u'q'), + (0x1D45F, 'M', u'r'), + (0x1D460, 'M', u's'), + (0x1D461, 'M', u't'), + (0x1D462, 'M', u'u'), + (0x1D463, 'M', u'v'), + (0x1D464, 'M', u'w'), + (0x1D465, 'M', u'x'), + (0x1D466, 'M', u'y'), + (0x1D467, 'M', u'z'), + (0x1D468, 'M', u'a'), + (0x1D469, 'M', u'b'), + (0x1D46A, 'M', u'c'), + (0x1D46B, 'M', u'd'), + (0x1D46C, 'M', u'e'), + (0x1D46D, 'M', u'f'), + (0x1D46E, 'M', u'g'), + (0x1D46F, 'M', u'h'), + (0x1D470, 'M', u'i'), + (0x1D471, 'M', u'j'), + (0x1D472, 'M', u'k'), + (0x1D473, 'M', u'l'), + (0x1D474, 'M', u'm'), + (0x1D475, 'M', u'n'), + (0x1D476, 'M', u'o'), + (0x1D477, 'M', u'p'), + (0x1D478, 'M', u'q'), + (0x1D479, 'M', u'r'), + (0x1D47A, 'M', u's'), + (0x1D47B, 'M', u't'), + (0x1D47C, 'M', u'u'), + (0x1D47D, 'M', u'v'), + (0x1D47E, 'M', u'w'), + (0x1D47F, 'M', u'x'), + (0x1D480, 'M', u'y'), + (0x1D481, 'M', u'z'), + (0x1D482, 'M', u'a'), + (0x1D483, 'M', u'b'), + (0x1D484, 'M', u'c'), + (0x1D485, 'M', u'd'), + (0x1D486, 'M', u'e'), + (0x1D487, 'M', u'f'), + (0x1D488, 'M', u'g'), + (0x1D489, 'M', u'h'), + (0x1D48A, 'M', u'i'), + (0x1D48B, 'M', u'j'), + (0x1D48C, 'M', u'k'), + (0x1D48D, 'M', u'l'), + (0x1D48E, 'M', u'm'), + (0x1D48F, 'M', u'n'), + (0x1D490, 'M', u'o'), + (0x1D491, 'M', u'p'), + (0x1D492, 'M', u'q'), + (0x1D493, 'M', u'r'), + (0x1D494, 'M', u's'), + (0x1D495, 'M', u't'), + (0x1D496, 'M', u'u'), + (0x1D497, 'M', u'v'), + (0x1D498, 'M', u'w'), + (0x1D499, 'M', u'x'), + (0x1D49A, 'M', u'y'), + (0x1D49B, 'M', u'z'), + (0x1D49C, 'M', u'a'), + (0x1D49D, 'X'), + (0x1D49E, 'M', u'c'), + (0x1D49F, 'M', u'd'), + (0x1D4A0, 'X'), + (0x1D4A2, 'M', u'g'), + (0x1D4A3, 'X'), + (0x1D4A5, 'M', u'j'), + (0x1D4A6, 'M', u'k'), + (0x1D4A7, 'X'), + (0x1D4A9, 'M', u'n'), + (0x1D4AA, 'M', u'o'), + (0x1D4AB, 'M', u'p'), + (0x1D4AC, 'M', u'q'), + (0x1D4AD, 'X'), + (0x1D4AE, 'M', u's'), + (0x1D4AF, 'M', u't'), + (0x1D4B0, 'M', u'u'), + (0x1D4B1, 'M', u'v'), + (0x1D4B2, 'M', u'w'), + (0x1D4B3, 'M', u'x'), + (0x1D4B4, 'M', u'y'), + (0x1D4B5, 'M', u'z'), + (0x1D4B6, 'M', u'a'), + (0x1D4B7, 'M', u'b'), + (0x1D4B8, 'M', u'c'), + (0x1D4B9, 'M', u'd'), + (0x1D4BA, 'X'), + (0x1D4BB, 'M', u'f'), + (0x1D4BC, 'X'), + (0x1D4BD, 'M', u'h'), + (0x1D4BE, 'M', u'i'), + (0x1D4BF, 'M', u'j'), + (0x1D4C0, 'M', u'k'), + (0x1D4C1, 'M', u'l'), + (0x1D4C2, 'M', u'm'), + (0x1D4C3, 'M', u'n'), + (0x1D4C4, 'X'), + (0x1D4C5, 'M', u'p'), + (0x1D4C6, 'M', u'q'), + (0x1D4C7, 'M', u'r'), + (0x1D4C8, 'M', u's'), + (0x1D4C9, 'M', u't'), + (0x1D4CA, 'M', u'u'), + (0x1D4CB, 'M', u'v'), + (0x1D4CC, 'M', u'w'), + (0x1D4CD, 'M', u'x'), + (0x1D4CE, 'M', u'y'), + (0x1D4CF, 'M', u'z'), + (0x1D4D0, 'M', u'a'), + (0x1D4D1, 'M', u'b'), + (0x1D4D2, 'M', u'c'), + (0x1D4D3, 'M', u'd'), + (0x1D4D4, 'M', u'e'), + (0x1D4D5, 'M', u'f'), + (0x1D4D6, 'M', u'g'), + (0x1D4D7, 'M', u'h'), + (0x1D4D8, 'M', u'i'), + (0x1D4D9, 'M', u'j'), + (0x1D4DA, 'M', u'k'), + (0x1D4DB, 'M', u'l'), + (0x1D4DC, 'M', u'm'), + (0x1D4DD, 'M', u'n'), + (0x1D4DE, 'M', u'o'), + (0x1D4DF, 'M', u'p'), + (0x1D4E0, 'M', u'q'), + (0x1D4E1, 'M', u'r'), + (0x1D4E2, 'M', u's'), + (0x1D4E3, 'M', u't'), + (0x1D4E4, 'M', u'u'), + (0x1D4E5, 'M', u'v'), + (0x1D4E6, 'M', u'w'), + (0x1D4E7, 'M', u'x'), + (0x1D4E8, 'M', u'y'), + (0x1D4E9, 'M', u'z'), + (0x1D4EA, 'M', u'a'), + (0x1D4EB, 'M', u'b'), + (0x1D4EC, 'M', u'c'), + (0x1D4ED, 'M', u'd'), + (0x1D4EE, 'M', u'e'), + (0x1D4EF, 'M', u'f'), + (0x1D4F0, 'M', u'g'), + (0x1D4F1, 'M', u'h'), + (0x1D4F2, 'M', u'i'), + (0x1D4F3, 'M', u'j'), + (0x1D4F4, 'M', u'k'), + (0x1D4F5, 'M', u'l'), + (0x1D4F6, 'M', u'm'), + (0x1D4F7, 'M', u'n'), + (0x1D4F8, 'M', u'o'), + (0x1D4F9, 'M', u'p'), + (0x1D4FA, 'M', u'q'), + (0x1D4FB, 'M', u'r'), + (0x1D4FC, 'M', u's'), + (0x1D4FD, 'M', u't'), + (0x1D4FE, 'M', u'u'), + (0x1D4FF, 'M', u'v'), + (0x1D500, 'M', u'w'), + (0x1D501, 'M', u'x'), + (0x1D502, 'M', u'y'), + (0x1D503, 'M', u'z'), + (0x1D504, 'M', u'a'), + (0x1D505, 'M', u'b'), + (0x1D506, 'X'), + (0x1D507, 'M', u'd'), + (0x1D508, 'M', u'e'), + (0x1D509, 'M', u'f'), + (0x1D50A, 'M', u'g'), + (0x1D50B, 'X'), + (0x1D50D, 'M', u'j'), + (0x1D50E, 'M', u'k'), + (0x1D50F, 'M', u'l'), + (0x1D510, 'M', u'm'), + (0x1D511, 'M', u'n'), + (0x1D512, 'M', u'o'), + (0x1D513, 'M', u'p'), + (0x1D514, 'M', u'q'), + (0x1D515, 'X'), + (0x1D516, 'M', u's'), + (0x1D517, 'M', u't'), + (0x1D518, 'M', u'u'), + (0x1D519, 'M', u'v'), + (0x1D51A, 'M', u'w'), + (0x1D51B, 'M', u'x'), + (0x1D51C, 'M', u'y'), + (0x1D51D, 'X'), + (0x1D51E, 'M', u'a'), + (0x1D51F, 'M', u'b'), + (0x1D520, 'M', u'c'), + (0x1D521, 'M', u'd'), + (0x1D522, 'M', u'e'), + (0x1D523, 'M', u'f'), + (0x1D524, 'M', u'g'), + (0x1D525, 'M', u'h'), + (0x1D526, 'M', u'i'), + (0x1D527, 'M', u'j'), + (0x1D528, 'M', u'k'), + (0x1D529, 'M', u'l'), + (0x1D52A, 'M', u'm'), + (0x1D52B, 'M', u'n'), + (0x1D52C, 'M', u'o'), + (0x1D52D, 'M', u'p'), + (0x1D52E, 'M', u'q'), + (0x1D52F, 'M', u'r'), + (0x1D530, 'M', u's'), + (0x1D531, 'M', u't'), + (0x1D532, 'M', u'u'), + (0x1D533, 'M', u'v'), + (0x1D534, 'M', u'w'), + (0x1D535, 'M', u'x'), + (0x1D536, 'M', u'y'), + (0x1D537, 'M', u'z'), + (0x1D538, 'M', u'a'), + (0x1D539, 'M', u'b'), + (0x1D53A, 'X'), + (0x1D53B, 'M', u'd'), + (0x1D53C, 'M', u'e'), + (0x1D53D, 'M', u'f'), + (0x1D53E, 'M', u'g'), + (0x1D53F, 'X'), + (0x1D540, 'M', u'i'), + (0x1D541, 'M', u'j'), + (0x1D542, 'M', u'k'), + (0x1D543, 'M', u'l'), + (0x1D544, 'M', u'm'), + (0x1D545, 'X'), + (0x1D546, 'M', u'o'), + (0x1D547, 'X'), + (0x1D54A, 'M', u's'), + (0x1D54B, 'M', u't'), + (0x1D54C, 'M', u'u'), + (0x1D54D, 'M', u'v'), + (0x1D54E, 'M', u'w'), + (0x1D54F, 'M', u'x'), + (0x1D550, 'M', u'y'), + (0x1D551, 'X'), + (0x1D552, 'M', u'a'), + (0x1D553, 'M', u'b'), + (0x1D554, 'M', u'c'), + (0x1D555, 'M', u'd'), + (0x1D556, 'M', u'e'), + (0x1D557, 'M', u'f'), + (0x1D558, 'M', u'g'), + (0x1D559, 'M', u'h'), + (0x1D55A, 'M', u'i'), + (0x1D55B, 'M', u'j'), + (0x1D55C, 'M', u'k'), + (0x1D55D, 'M', u'l'), + (0x1D55E, 'M', u'm'), + (0x1D55F, 'M', u'n'), + (0x1D560, 'M', u'o'), + (0x1D561, 'M', u'p'), + (0x1D562, 'M', u'q'), + (0x1D563, 'M', u'r'), + (0x1D564, 'M', u's'), + (0x1D565, 'M', u't'), + (0x1D566, 'M', u'u'), + (0x1D567, 'M', u'v'), + (0x1D568, 'M', u'w'), + (0x1D569, 'M', u'x'), + (0x1D56A, 'M', u'y'), + (0x1D56B, 'M', u'z'), + (0x1D56C, 'M', u'a'), + (0x1D56D, 'M', u'b'), + (0x1D56E, 'M', u'c'), + (0x1D56F, 'M', u'd'), + (0x1D570, 'M', u'e'), + (0x1D571, 'M', u'f'), + (0x1D572, 'M', u'g'), + (0x1D573, 'M', u'h'), + (0x1D574, 'M', u'i'), + (0x1D575, 'M', u'j'), + (0x1D576, 'M', u'k'), + (0x1D577, 'M', u'l'), + (0x1D578, 'M', u'm'), + (0x1D579, 'M', u'n'), + (0x1D57A, 'M', u'o'), + (0x1D57B, 'M', u'p'), + (0x1D57C, 'M', u'q'), + (0x1D57D, 'M', u'r'), + (0x1D57E, 'M', u's'), + (0x1D57F, 'M', u't'), + (0x1D580, 'M', u'u'), + (0x1D581, 'M', u'v'), + (0x1D582, 'M', u'w'), + (0x1D583, 'M', u'x'), + (0x1D584, 'M', u'y'), + (0x1D585, 'M', u'z'), + (0x1D586, 'M', u'a'), + (0x1D587, 'M', u'b'), + (0x1D588, 'M', u'c'), + (0x1D589, 'M', u'd'), + (0x1D58A, 'M', u'e'), + (0x1D58B, 'M', u'f'), + (0x1D58C, 'M', u'g'), + (0x1D58D, 'M', u'h'), + (0x1D58E, 'M', u'i'), + (0x1D58F, 'M', u'j'), + (0x1D590, 'M', u'k'), + (0x1D591, 'M', u'l'), + (0x1D592, 'M', u'm'), + (0x1D593, 'M', u'n'), + (0x1D594, 'M', u'o'), + (0x1D595, 'M', u'p'), + (0x1D596, 'M', u'q'), + (0x1D597, 'M', u'r'), + (0x1D598, 'M', u's'), + (0x1D599, 'M', u't'), + (0x1D59A, 'M', u'u'), + (0x1D59B, 'M', u'v'), + (0x1D59C, 'M', u'w'), + (0x1D59D, 'M', u'x'), + (0x1D59E, 'M', u'y'), + (0x1D59F, 'M', u'z'), + (0x1D5A0, 'M', u'a'), + (0x1D5A1, 'M', u'b'), + (0x1D5A2, 'M', u'c'), + (0x1D5A3, 'M', u'd'), + (0x1D5A4, 'M', u'e'), + (0x1D5A5, 'M', u'f'), + (0x1D5A6, 'M', u'g'), + (0x1D5A7, 'M', u'h'), + (0x1D5A8, 'M', u'i'), + (0x1D5A9, 'M', u'j'), + (0x1D5AA, 'M', u'k'), + (0x1D5AB, 'M', u'l'), + (0x1D5AC, 'M', u'm'), + (0x1D5AD, 'M', u'n'), + (0x1D5AE, 'M', u'o'), + (0x1D5AF, 'M', u'p'), + (0x1D5B0, 'M', u'q'), + (0x1D5B1, 'M', u'r'), + (0x1D5B2, 'M', u's'), + (0x1D5B3, 'M', u't'), + (0x1D5B4, 'M', u'u'), + (0x1D5B5, 'M', u'v'), + (0x1D5B6, 'M', u'w'), + (0x1D5B7, 'M', u'x'), + (0x1D5B8, 'M', u'y'), + (0x1D5B9, 'M', u'z'), + (0x1D5BA, 'M', u'a'), + (0x1D5BB, 'M', u'b'), + (0x1D5BC, 'M', u'c'), + (0x1D5BD, 'M', u'd'), + (0x1D5BE, 'M', u'e'), + (0x1D5BF, 'M', u'f'), + (0x1D5C0, 'M', u'g'), + (0x1D5C1, 'M', u'h'), + (0x1D5C2, 'M', u'i'), + (0x1D5C3, 'M', u'j'), + (0x1D5C4, 'M', u'k'), + (0x1D5C5, 'M', u'l'), + (0x1D5C6, 'M', u'm'), + (0x1D5C7, 'M', u'n'), + (0x1D5C8, 'M', u'o'), + (0x1D5C9, 'M', u'p'), + (0x1D5CA, 'M', u'q'), + (0x1D5CB, 'M', u'r'), + (0x1D5CC, 'M', u's'), + (0x1D5CD, 'M', u't'), + (0x1D5CE, 'M', u'u'), + (0x1D5CF, 'M', u'v'), + (0x1D5D0, 'M', u'w'), + (0x1D5D1, 'M', u'x'), + (0x1D5D2, 'M', u'y'), + (0x1D5D3, 'M', u'z'), + (0x1D5D4, 'M', u'a'), + (0x1D5D5, 'M', u'b'), + (0x1D5D6, 'M', u'c'), + (0x1D5D7, 'M', u'd'), + (0x1D5D8, 'M', u'e'), + (0x1D5D9, 'M', u'f'), + (0x1D5DA, 'M', u'g'), + (0x1D5DB, 'M', u'h'), + (0x1D5DC, 'M', u'i'), + (0x1D5DD, 'M', u'j'), + (0x1D5DE, 'M', u'k'), + (0x1D5DF, 'M', u'l'), + (0x1D5E0, 'M', u'm'), + (0x1D5E1, 'M', u'n'), + (0x1D5E2, 'M', u'o'), + (0x1D5E3, 'M', u'p'), + (0x1D5E4, 'M', u'q'), + (0x1D5E5, 'M', u'r'), + (0x1D5E6, 'M', u's'), + (0x1D5E7, 'M', u't'), + (0x1D5E8, 'M', u'u'), + (0x1D5E9, 'M', u'v'), + (0x1D5EA, 'M', u'w'), + (0x1D5EB, 'M', u'x'), + (0x1D5EC, 'M', u'y'), + (0x1D5ED, 'M', u'z'), + (0x1D5EE, 'M', u'a'), + (0x1D5EF, 'M', u'b'), + (0x1D5F0, 'M', u'c'), + (0x1D5F1, 'M', u'd'), + (0x1D5F2, 'M', u'e'), + (0x1D5F3, 'M', u'f'), + (0x1D5F4, 'M', u'g'), + (0x1D5F5, 'M', u'h'), + (0x1D5F6, 'M', u'i'), + (0x1D5F7, 'M', u'j'), + (0x1D5F8, 'M', u'k'), + (0x1D5F9, 'M', u'l'), + (0x1D5FA, 'M', u'm'), + (0x1D5FB, 'M', u'n'), + (0x1D5FC, 'M', u'o'), + (0x1D5FD, 'M', u'p'), + (0x1D5FE, 'M', u'q'), + (0x1D5FF, 'M', u'r'), + (0x1D600, 'M', u's'), + (0x1D601, 'M', u't'), + (0x1D602, 'M', u'u'), + (0x1D603, 'M', u'v'), + (0x1D604, 'M', u'w'), + (0x1D605, 'M', u'x'), + (0x1D606, 'M', u'y'), + (0x1D607, 'M', u'z'), + (0x1D608, 'M', u'a'), + (0x1D609, 'M', u'b'), + (0x1D60A, 'M', u'c'), + (0x1D60B, 'M', u'd'), + (0x1D60C, 'M', u'e'), + (0x1D60D, 'M', u'f'), + (0x1D60E, 'M', u'g'), + (0x1D60F, 'M', u'h'), + (0x1D610, 'M', u'i'), + (0x1D611, 'M', u'j'), + (0x1D612, 'M', u'k'), + (0x1D613, 'M', u'l'), + (0x1D614, 'M', u'm'), + (0x1D615, 'M', u'n'), + (0x1D616, 'M', u'o'), + (0x1D617, 'M', u'p'), + (0x1D618, 'M', u'q'), + (0x1D619, 'M', u'r'), + (0x1D61A, 'M', u's'), + (0x1D61B, 'M', u't'), + (0x1D61C, 'M', u'u'), + (0x1D61D, 'M', u'v'), + (0x1D61E, 'M', u'w'), + (0x1D61F, 'M', u'x'), + (0x1D620, 'M', u'y'), + (0x1D621, 'M', u'z'), + (0x1D622, 'M', u'a'), + (0x1D623, 'M', u'b'), + (0x1D624, 'M', u'c'), + (0x1D625, 'M', u'd'), + (0x1D626, 'M', u'e'), + (0x1D627, 'M', u'f'), + (0x1D628, 'M', u'g'), + (0x1D629, 'M', u'h'), + (0x1D62A, 'M', u'i'), + (0x1D62B, 'M', u'j'), + (0x1D62C, 'M', u'k'), + (0x1D62D, 'M', u'l'), + (0x1D62E, 'M', u'm'), + (0x1D62F, 'M', u'n'), + (0x1D630, 'M', u'o'), + (0x1D631, 'M', u'p'), + (0x1D632, 'M', u'q'), + (0x1D633, 'M', u'r'), + (0x1D634, 'M', u's'), + (0x1D635, 'M', u't'), + (0x1D636, 'M', u'u'), + (0x1D637, 'M', u'v'), + (0x1D638, 'M', u'w'), + (0x1D639, 'M', u'x'), + (0x1D63A, 'M', u'y'), + (0x1D63B, 'M', u'z'), + (0x1D63C, 'M', u'a'), + (0x1D63D, 'M', u'b'), + (0x1D63E, 'M', u'c'), + (0x1D63F, 'M', u'd'), + (0x1D640, 'M', u'e'), + (0x1D641, 'M', u'f'), + (0x1D642, 'M', u'g'), + (0x1D643, 'M', u'h'), + (0x1D644, 'M', u'i'), + (0x1D645, 'M', u'j'), + (0x1D646, 'M', u'k'), + (0x1D647, 'M', u'l'), + (0x1D648, 'M', u'm'), + (0x1D649, 'M', u'n'), + (0x1D64A, 'M', u'o'), + (0x1D64B, 'M', u'p'), + (0x1D64C, 'M', u'q'), + (0x1D64D, 'M', u'r'), + (0x1D64E, 'M', u's'), + (0x1D64F, 'M', u't'), + (0x1D650, 'M', u'u'), + (0x1D651, 'M', u'v'), + (0x1D652, 'M', u'w'), + (0x1D653, 'M', u'x'), + (0x1D654, 'M', u'y'), + (0x1D655, 'M', u'z'), + (0x1D656, 'M', u'a'), + (0x1D657, 'M', u'b'), + (0x1D658, 'M', u'c'), + (0x1D659, 'M', u'd'), + (0x1D65A, 'M', u'e'), + (0x1D65B, 'M', u'f'), + (0x1D65C, 'M', u'g'), + (0x1D65D, 'M', u'h'), + (0x1D65E, 'M', u'i'), + (0x1D65F, 'M', u'j'), + (0x1D660, 'M', u'k'), + (0x1D661, 'M', u'l'), + (0x1D662, 'M', u'm'), + (0x1D663, 'M', u'n'), + (0x1D664, 'M', u'o'), + (0x1D665, 'M', u'p'), + (0x1D666, 'M', u'q'), + (0x1D667, 'M', u'r'), + (0x1D668, 'M', u's'), + (0x1D669, 'M', u't'), + (0x1D66A, 'M', u'u'), + (0x1D66B, 'M', u'v'), + (0x1D66C, 'M', u'w'), + (0x1D66D, 'M', u'x'), + (0x1D66E, 'M', u'y'), + (0x1D66F, 'M', u'z'), + (0x1D670, 'M', u'a'), + (0x1D671, 'M', u'b'), + (0x1D672, 'M', u'c'), + (0x1D673, 'M', u'd'), + (0x1D674, 'M', u'e'), + (0x1D675, 'M', u'f'), + (0x1D676, 'M', u'g'), + (0x1D677, 'M', u'h'), + (0x1D678, 'M', u'i'), + (0x1D679, 'M', u'j'), + (0x1D67A, 'M', u'k'), + (0x1D67B, 'M', u'l'), + (0x1D67C, 'M', u'm'), + (0x1D67D, 'M', u'n'), + (0x1D67E, 'M', u'o'), + (0x1D67F, 'M', u'p'), + (0x1D680, 'M', u'q'), + (0x1D681, 'M', u'r'), + (0x1D682, 'M', u's'), + (0x1D683, 'M', u't'), + (0x1D684, 'M', u'u'), + (0x1D685, 'M', u'v'), + (0x1D686, 'M', u'w'), + (0x1D687, 'M', u'x'), + (0x1D688, 'M', u'y'), + (0x1D689, 'M', u'z'), + (0x1D68A, 'M', u'a'), + (0x1D68B, 'M', u'b'), + (0x1D68C, 'M', u'c'), + (0x1D68D, 'M', u'd'), + (0x1D68E, 'M', u'e'), + (0x1D68F, 'M', u'f'), + (0x1D690, 'M', u'g'), + (0x1D691, 'M', u'h'), + (0x1D692, 'M', u'i'), + (0x1D693, 'M', u'j'), + (0x1D694, 'M', u'k'), + (0x1D695, 'M', u'l'), + (0x1D696, 'M', u'm'), + (0x1D697, 'M', u'n'), + (0x1D698, 'M', u'o'), + (0x1D699, 'M', u'p'), + (0x1D69A, 'M', u'q'), + (0x1D69B, 'M', u'r'), + (0x1D69C, 'M', u's'), + (0x1D69D, 'M', u't'), + (0x1D69E, 'M', u'u'), + (0x1D69F, 'M', u'v'), + (0x1D6A0, 'M', u'w'), + (0x1D6A1, 'M', u'x'), + (0x1D6A2, 'M', u'y'), + (0x1D6A3, 'M', u'z'), + (0x1D6A4, 'M', u'ı'), + (0x1D6A5, 'M', u'ȷ'), + (0x1D6A6, 'X'), + (0x1D6A8, 'M', u'α'), + (0x1D6A9, 'M', u'β'), + (0x1D6AA, 'M', u'γ'), + (0x1D6AB, 'M', u'δ'), + (0x1D6AC, 'M', u'ε'), + (0x1D6AD, 'M', u'ζ'), + (0x1D6AE, 'M', u'η'), + (0x1D6AF, 'M', u'θ'), + (0x1D6B0, 'M', u'ι'), + (0x1D6B1, 'M', u'κ'), + (0x1D6B2, 'M', u'λ'), + (0x1D6B3, 'M', u'μ'), + (0x1D6B4, 'M', u'ν'), + (0x1D6B5, 'M', u'ξ'), + (0x1D6B6, 'M', u'ο'), + (0x1D6B7, 'M', u'π'), + (0x1D6B8, 'M', u'ρ'), + (0x1D6B9, 'M', u'θ'), + (0x1D6BA, 'M', u'σ'), + (0x1D6BB, 'M', u'τ'), + (0x1D6BC, 'M', u'υ'), + (0x1D6BD, 'M', u'φ'), + (0x1D6BE, 'M', u'χ'), + (0x1D6BF, 'M', u'ψ'), + (0x1D6C0, 'M', u'ω'), + (0x1D6C1, 'M', u'∇'), + (0x1D6C2, 'M', u'α'), + (0x1D6C3, 'M', u'β'), + (0x1D6C4, 'M', u'γ'), + (0x1D6C5, 'M', u'δ'), + (0x1D6C6, 'M', u'ε'), + (0x1D6C7, 'M', u'ζ'), + (0x1D6C8, 'M', u'η'), + (0x1D6C9, 'M', u'θ'), + (0x1D6CA, 'M', u'ι'), + (0x1D6CB, 'M', u'κ'), + (0x1D6CC, 'M', u'λ'), + (0x1D6CD, 'M', u'μ'), + (0x1D6CE, 'M', u'ν'), + (0x1D6CF, 'M', u'ξ'), + (0x1D6D0, 'M', u'ο'), + (0x1D6D1, 'M', u'π'), + (0x1D6D2, 'M', u'ρ'), + (0x1D6D3, 'M', u'σ'), + (0x1D6D5, 'M', u'τ'), + (0x1D6D6, 'M', u'υ'), + (0x1D6D7, 'M', u'φ'), + (0x1D6D8, 'M', u'χ'), + (0x1D6D9, 'M', u'ψ'), + (0x1D6DA, 'M', u'ω'), + (0x1D6DB, 'M', u'∂'), + (0x1D6DC, 'M', u'ε'), + (0x1D6DD, 'M', u'θ'), + (0x1D6DE, 'M', u'κ'), + (0x1D6DF, 'M', u'φ'), + (0x1D6E0, 'M', u'ρ'), + (0x1D6E1, 'M', u'π'), + (0x1D6E2, 'M', u'α'), + (0x1D6E3, 'M', u'β'), + (0x1D6E4, 'M', u'γ'), + (0x1D6E5, 'M', u'δ'), + (0x1D6E6, 'M', u'ε'), + (0x1D6E7, 'M', u'ζ'), + (0x1D6E8, 'M', u'η'), + (0x1D6E9, 'M', u'θ'), + (0x1D6EA, 'M', u'ι'), + (0x1D6EB, 'M', u'κ'), + (0x1D6EC, 'M', u'λ'), + (0x1D6ED, 'M', u'μ'), + (0x1D6EE, 'M', u'ν'), + (0x1D6EF, 'M', u'ξ'), + (0x1D6F0, 'M', u'ο'), + (0x1D6F1, 'M', u'π'), + (0x1D6F2, 'M', u'ρ'), + (0x1D6F3, 'M', u'θ'), + (0x1D6F4, 'M', u'σ'), + (0x1D6F5, 'M', u'τ'), + (0x1D6F6, 'M', u'υ'), + (0x1D6F7, 'M', u'φ'), + (0x1D6F8, 'M', u'χ'), + (0x1D6F9, 'M', u'ψ'), + (0x1D6FA, 'M', u'ω'), + (0x1D6FB, 'M', u'∇'), + (0x1D6FC, 'M', u'α'), + (0x1D6FD, 'M', u'β'), + (0x1D6FE, 'M', u'γ'), + (0x1D6FF, 'M', u'δ'), + (0x1D700, 'M', u'ε'), + (0x1D701, 'M', u'ζ'), + (0x1D702, 'M', u'η'), + (0x1D703, 'M', u'θ'), + (0x1D704, 'M', u'ι'), + (0x1D705, 'M', u'κ'), + (0x1D706, 'M', u'λ'), + (0x1D707, 'M', u'μ'), + (0x1D708, 'M', u'ν'), + (0x1D709, 'M', u'ξ'), + (0x1D70A, 'M', u'ο'), + (0x1D70B, 'M', u'π'), + (0x1D70C, 'M', u'ρ'), + (0x1D70D, 'M', u'σ'), + (0x1D70F, 'M', u'τ'), + (0x1D710, 'M', u'υ'), + (0x1D711, 'M', u'φ'), + (0x1D712, 'M', u'χ'), + (0x1D713, 'M', u'ψ'), + (0x1D714, 'M', u'ω'), + (0x1D715, 'M', u'∂'), + (0x1D716, 'M', u'ε'), + (0x1D717, 'M', u'θ'), + (0x1D718, 'M', u'κ'), + (0x1D719, 'M', u'φ'), + (0x1D71A, 'M', u'ρ'), + (0x1D71B, 'M', u'π'), + (0x1D71C, 'M', u'α'), + (0x1D71D, 'M', u'β'), + (0x1D71E, 'M', u'γ'), + (0x1D71F, 'M', u'δ'), + (0x1D720, 'M', u'ε'), + (0x1D721, 'M', u'ζ'), + (0x1D722, 'M', u'η'), + (0x1D723, 'M', u'θ'), + (0x1D724, 'M', u'ι'), + (0x1D725, 'M', u'κ'), + (0x1D726, 'M', u'λ'), + (0x1D727, 'M', u'μ'), + (0x1D728, 'M', u'ν'), + (0x1D729, 'M', u'ξ'), + (0x1D72A, 'M', u'ο'), + (0x1D72B, 'M', u'π'), + (0x1D72C, 'M', u'ρ'), + (0x1D72D, 'M', u'θ'), + (0x1D72E, 'M', u'σ'), + (0x1D72F, 'M', u'τ'), + (0x1D730, 'M', u'υ'), + (0x1D731, 'M', u'φ'), + (0x1D732, 'M', u'χ'), + (0x1D733, 'M', u'ψ'), + (0x1D734, 'M', u'ω'), + (0x1D735, 'M', u'∇'), + (0x1D736, 'M', u'α'), + (0x1D737, 'M', u'β'), + (0x1D738, 'M', u'γ'), + (0x1D739, 'M', u'δ'), + (0x1D73A, 'M', u'ε'), + (0x1D73B, 'M', u'ζ'), + (0x1D73C, 'M', u'η'), + (0x1D73D, 'M', u'θ'), + (0x1D73E, 'M', u'ι'), + (0x1D73F, 'M', u'κ'), + (0x1D740, 'M', u'λ'), + (0x1D741, 'M', u'μ'), + (0x1D742, 'M', u'ν'), + (0x1D743, 'M', u'ξ'), + (0x1D744, 'M', u'ο'), + (0x1D745, 'M', u'π'), + (0x1D746, 'M', u'ρ'), + (0x1D747, 'M', u'σ'), + (0x1D749, 'M', u'τ'), + (0x1D74A, 'M', u'υ'), + (0x1D74B, 'M', u'φ'), + (0x1D74C, 'M', u'χ'), + (0x1D74D, 'M', u'ψ'), + (0x1D74E, 'M', u'ω'), + (0x1D74F, 'M', u'∂'), + (0x1D750, 'M', u'ε'), + (0x1D751, 'M', u'θ'), + (0x1D752, 'M', u'κ'), + (0x1D753, 'M', u'φ'), + (0x1D754, 'M', u'ρ'), + (0x1D755, 'M', u'π'), + (0x1D756, 'M', u'α'), + (0x1D757, 'M', u'β'), + (0x1D758, 'M', u'γ'), + (0x1D759, 'M', u'δ'), + (0x1D75A, 'M', u'ε'), + (0x1D75B, 'M', u'ζ'), + (0x1D75C, 'M', u'η'), + (0x1D75D, 'M', u'θ'), + (0x1D75E, 'M', u'ι'), + (0x1D75F, 'M', u'κ'), + (0x1D760, 'M', u'λ'), + (0x1D761, 'M', u'μ'), + (0x1D762, 'M', u'ν'), + (0x1D763, 'M', u'ξ'), + (0x1D764, 'M', u'ο'), + (0x1D765, 'M', u'π'), + (0x1D766, 'M', u'ρ'), + (0x1D767, 'M', u'θ'), + (0x1D768, 'M', u'σ'), + (0x1D769, 'M', u'τ'), + (0x1D76A, 'M', u'υ'), + (0x1D76B, 'M', u'φ'), + (0x1D76C, 'M', u'χ'), + (0x1D76D, 'M', u'ψ'), + (0x1D76E, 'M', u'ω'), + (0x1D76F, 'M', u'∇'), + (0x1D770, 'M', u'α'), + (0x1D771, 'M', u'β'), + (0x1D772, 'M', u'γ'), + (0x1D773, 'M', u'δ'), + (0x1D774, 'M', u'ε'), + (0x1D775, 'M', u'ζ'), + (0x1D776, 'M', u'η'), + (0x1D777, 'M', u'θ'), + (0x1D778, 'M', u'ι'), + (0x1D779, 'M', u'κ'), + (0x1D77A, 'M', u'λ'), + (0x1D77B, 'M', u'μ'), + (0x1D77C, 'M', u'ν'), + (0x1D77D, 'M', u'ξ'), + (0x1D77E, 'M', u'ο'), + (0x1D77F, 'M', u'π'), + (0x1D780, 'M', u'ρ'), + (0x1D781, 'M', u'σ'), + (0x1D783, 'M', u'τ'), + (0x1D784, 'M', u'υ'), + (0x1D785, 'M', u'φ'), + (0x1D786, 'M', u'χ'), + (0x1D787, 'M', u'ψ'), + (0x1D788, 'M', u'ω'), + (0x1D789, 'M', u'∂'), + (0x1D78A, 'M', u'ε'), + (0x1D78B, 'M', u'θ'), + (0x1D78C, 'M', u'κ'), + (0x1D78D, 'M', u'φ'), + (0x1D78E, 'M', u'ρ'), + (0x1D78F, 'M', u'π'), + (0x1D790, 'M', u'α'), + (0x1D791, 'M', u'β'), + (0x1D792, 'M', u'γ'), + (0x1D793, 'M', u'δ'), + (0x1D794, 'M', u'ε'), + (0x1D795, 'M', u'ζ'), + (0x1D796, 'M', u'η'), + (0x1D797, 'M', u'θ'), + (0x1D798, 'M', u'ι'), + (0x1D799, 'M', u'κ'), + (0x1D79A, 'M', u'λ'), + (0x1D79B, 'M', u'μ'), + (0x1D79C, 'M', u'ν'), + (0x1D79D, 'M', u'ξ'), + (0x1D79E, 'M', u'ο'), + (0x1D79F, 'M', u'π'), + (0x1D7A0, 'M', u'ρ'), + (0x1D7A1, 'M', u'θ'), + (0x1D7A2, 'M', u'σ'), + (0x1D7A3, 'M', u'τ'), + (0x1D7A4, 'M', u'υ'), + (0x1D7A5, 'M', u'φ'), + (0x1D7A6, 'M', u'χ'), + (0x1D7A7, 'M', u'ψ'), + (0x1D7A8, 'M', u'ω'), + (0x1D7A9, 'M', u'∇'), + (0x1D7AA, 'M', u'α'), + (0x1D7AB, 'M', u'β'), + (0x1D7AC, 'M', u'γ'), + (0x1D7AD, 'M', u'δ'), + (0x1D7AE, 'M', u'ε'), + (0x1D7AF, 'M', u'ζ'), + (0x1D7B0, 'M', u'η'), + (0x1D7B1, 'M', u'θ'), + (0x1D7B2, 'M', u'ι'), + (0x1D7B3, 'M', u'κ'), + (0x1D7B4, 'M', u'λ'), + (0x1D7B5, 'M', u'μ'), + (0x1D7B6, 'M', u'ν'), + (0x1D7B7, 'M', u'ξ'), + (0x1D7B8, 'M', u'ο'), + (0x1D7B9, 'M', u'π'), + (0x1D7BA, 'M', u'ρ'), + (0x1D7BB, 'M', u'σ'), + (0x1D7BD, 'M', u'τ'), + (0x1D7BE, 'M', u'υ'), + (0x1D7BF, 'M', u'φ'), + (0x1D7C0, 'M', u'χ'), + (0x1D7C1, 'M', u'ψ'), + (0x1D7C2, 'M', u'ω'), + (0x1D7C3, 'M', u'∂'), + (0x1D7C4, 'M', u'ε'), + (0x1D7C5, 'M', u'θ'), + (0x1D7C6, 'M', u'κ'), + (0x1D7C7, 'M', u'φ'), + (0x1D7C8, 'M', u'ρ'), + (0x1D7C9, 'M', u'π'), + (0x1D7CA, 'M', u'ϝ'), + (0x1D7CC, 'X'), + (0x1D7CE, 'M', u'0'), + (0x1D7CF, 'M', u'1'), + (0x1D7D0, 'M', u'2'), + (0x1D7D1, 'M', u'3'), + (0x1D7D2, 'M', u'4'), + (0x1D7D3, 'M', u'5'), + (0x1D7D4, 'M', u'6'), + (0x1D7D5, 'M', u'7'), + (0x1D7D6, 'M', u'8'), + (0x1D7D7, 'M', u'9'), + (0x1D7D8, 'M', u'0'), + (0x1D7D9, 'M', u'1'), + (0x1D7DA, 'M', u'2'), + (0x1D7DB, 'M', u'3'), + (0x1D7DC, 'M', u'4'), + (0x1D7DD, 'M', u'5'), + (0x1D7DE, 'M', u'6'), + (0x1D7DF, 'M', u'7'), + (0x1D7E0, 'M', u'8'), + (0x1D7E1, 'M', u'9'), + (0x1D7E2, 'M', u'0'), + (0x1D7E3, 'M', u'1'), + (0x1D7E4, 'M', u'2'), + (0x1D7E5, 'M', u'3'), + (0x1D7E6, 'M', u'4'), + (0x1D7E7, 'M', u'5'), + (0x1D7E8, 'M', u'6'), + (0x1D7E9, 'M', u'7'), + (0x1D7EA, 'M', u'8'), + (0x1D7EB, 'M', u'9'), + (0x1D7EC, 'M', u'0'), + (0x1D7ED, 'M', u'1'), + (0x1D7EE, 'M', u'2'), + (0x1D7EF, 'M', u'3'), + (0x1D7F0, 'M', u'4'), + (0x1D7F1, 'M', u'5'), + (0x1D7F2, 'M', u'6'), + (0x1D7F3, 'M', u'7'), + (0x1D7F4, 'M', u'8'), + (0x1D7F5, 'M', u'9'), + (0x1D7F6, 'M', u'0'), + (0x1D7F7, 'M', u'1'), + (0x1D7F8, 'M', u'2'), + (0x1D7F9, 'M', u'3'), + (0x1D7FA, 'M', u'4'), + (0x1D7FB, 'M', u'5'), + (0x1D7FC, 'M', u'6'), + (0x1D7FD, 'M', u'7'), + (0x1D7FE, 'M', u'8'), + (0x1D7FF, 'M', u'9'), + (0x1D800, 'X'), + (0x1EE00, 'M', u'ا'), + (0x1EE01, 'M', u'ب'), + (0x1EE02, 'M', u'ج'), + (0x1EE03, 'M', u'د'), + (0x1EE04, 'X'), + (0x1EE05, 'M', u'و'), + (0x1EE06, 'M', u'ز'), + (0x1EE07, 'M', u'ح'), + (0x1EE08, 'M', u'ط'), + (0x1EE09, 'M', u'ي'), + (0x1EE0A, 'M', u'ك'), + (0x1EE0B, 'M', u'ل'), + (0x1EE0C, 'M', u'م'), + (0x1EE0D, 'M', u'ن'), + (0x1EE0E, 'M', u'س'), + (0x1EE0F, 'M', u'ع'), + (0x1EE10, 'M', u'ف'), + (0x1EE11, 'M', u'ص'), + (0x1EE12, 'M', u'ق'), + (0x1EE13, 'M', u'ر'), + (0x1EE14, 'M', u'ش'), + (0x1EE15, 'M', u'ت'), + (0x1EE16, 'M', u'ث'), + (0x1EE17, 'M', u'خ'), + (0x1EE18, 'M', u'ذ'), + (0x1EE19, 'M', u'ض'), + (0x1EE1A, 'M', u'ظ'), + (0x1EE1B, 'M', u'غ'), + (0x1EE1C, 'M', u'ٮ'), + (0x1EE1D, 'M', u'ں'), + (0x1EE1E, 'M', u'ڡ'), + (0x1EE1F, 'M', u'ٯ'), + (0x1EE20, 'X'), + (0x1EE21, 'M', u'ب'), + (0x1EE22, 'M', u'ج'), + (0x1EE23, 'X'), + (0x1EE24, 'M', u'ه'), + (0x1EE25, 'X'), + (0x1EE27, 'M', u'ح'), + (0x1EE28, 'X'), + (0x1EE29, 'M', u'ي'), + (0x1EE2A, 'M', u'ك'), + (0x1EE2B, 'M', u'ل'), + (0x1EE2C, 'M', u'م'), + (0x1EE2D, 'M', u'ن'), + (0x1EE2E, 'M', u'س'), + (0x1EE2F, 'M', u'ع'), + (0x1EE30, 'M', u'ف'), + (0x1EE31, 'M', u'ص'), + (0x1EE32, 'M', u'ق'), + (0x1EE33, 'X'), + (0x1EE34, 'M', u'ش'), + (0x1EE35, 'M', u'ت'), + (0x1EE36, 'M', u'ث'), + (0x1EE37, 'M', u'خ'), + (0x1EE38, 'X'), + (0x1EE39, 'M', u'ض'), + (0x1EE3A, 'X'), + (0x1EE3B, 'M', u'غ'), + (0x1EE3C, 'X'), + (0x1EE42, 'M', u'ج'), + (0x1EE43, 'X'), + (0x1EE47, 'M', u'ح'), + (0x1EE48, 'X'), + (0x1EE49, 'M', u'ي'), + (0x1EE4A, 'X'), + (0x1EE4B, 'M', u'ل'), + (0x1EE4C, 'X'), + (0x1EE4D, 'M', u'ن'), + (0x1EE4E, 'M', u'س'), + (0x1EE4F, 'M', u'ع'), + (0x1EE50, 'X'), + (0x1EE51, 'M', u'ص'), + (0x1EE52, 'M', u'ق'), + (0x1EE53, 'X'), + (0x1EE54, 'M', u'ش'), + (0x1EE55, 'X'), + (0x1EE57, 'M', u'خ'), + (0x1EE58, 'X'), + (0x1EE59, 'M', u'ض'), + (0x1EE5A, 'X'), + (0x1EE5B, 'M', u'غ'), + (0x1EE5C, 'X'), + (0x1EE5D, 'M', u'ں'), + (0x1EE5E, 'X'), + (0x1EE5F, 'M', u'ٯ'), + (0x1EE60, 'X'), + (0x1EE61, 'M', u'ب'), + (0x1EE62, 'M', u'ج'), + (0x1EE63, 'X'), + (0x1EE64, 'M', u'ه'), + (0x1EE65, 'X'), + (0x1EE67, 'M', u'ح'), + (0x1EE68, 'M', u'ط'), + (0x1EE69, 'M', u'ي'), + (0x1EE6A, 'M', u'ك'), + (0x1EE6B, 'X'), + (0x1EE6C, 'M', u'م'), + (0x1EE6D, 'M', u'ن'), + (0x1EE6E, 'M', u'س'), + (0x1EE6F, 'M', u'ع'), + (0x1EE70, 'M', u'ف'), + (0x1EE71, 'M', u'ص'), + (0x1EE72, 'M', u'ق'), + (0x1EE73, 'X'), + (0x1EE74, 'M', u'ش'), + (0x1EE75, 'M', u'ت'), + (0x1EE76, 'M', u'ث'), + (0x1EE77, 'M', u'خ'), + (0x1EE78, 'X'), + (0x1EE79, 'M', u'ض'), + (0x1EE7A, 'M', u'ظ'), + (0x1EE7B, 'M', u'غ'), + (0x1EE7C, 'M', u'ٮ'), + (0x1EE7D, 'X'), + (0x1EE7E, 'M', u'ڡ'), + (0x1EE7F, 'X'), + (0x1EE80, 'M', u'ا'), + (0x1EE81, 'M', u'ب'), + (0x1EE82, 'M', u'ج'), + (0x1EE83, 'M', u'د'), + (0x1EE84, 'M', u'ه'), + (0x1EE85, 'M', u'و'), + (0x1EE86, 'M', u'ز'), + (0x1EE87, 'M', u'ح'), + (0x1EE88, 'M', u'ط'), + (0x1EE89, 'M', u'ي'), + (0x1EE8A, 'X'), + (0x1EE8B, 'M', u'ل'), + (0x1EE8C, 'M', u'م'), + (0x1EE8D, 'M', u'ن'), + (0x1EE8E, 'M', u'س'), + (0x1EE8F, 'M', u'ع'), + (0x1EE90, 'M', u'ف'), + (0x1EE91, 'M', u'ص'), + (0x1EE92, 'M', u'ق'), + (0x1EE93, 'M', u'ر'), + (0x1EE94, 'M', u'ش'), + (0x1EE95, 'M', u'ت'), + (0x1EE96, 'M', u'ث'), + (0x1EE97, 'M', u'خ'), + (0x1EE98, 'M', u'ذ'), + (0x1EE99, 'M', u'ض'), + (0x1EE9A, 'M', u'ظ'), + (0x1EE9B, 'M', u'غ'), + (0x1EE9C, 'X'), + (0x1EEA1, 'M', u'ب'), + (0x1EEA2, 'M', u'ج'), + (0x1EEA3, 'M', u'د'), + (0x1EEA4, 'X'), + (0x1EEA5, 'M', u'و'), + (0x1EEA6, 'M', u'ز'), + (0x1EEA7, 'M', u'ح'), + (0x1EEA8, 'M', u'ط'), + (0x1EEA9, 'M', u'ي'), + (0x1EEAA, 'X'), + (0x1EEAB, 'M', u'ل'), + (0x1EEAC, 'M', u'م'), + (0x1EEAD, 'M', u'ن'), + (0x1EEAE, 'M', u'س'), + (0x1EEAF, 'M', u'ع'), + (0x1EEB0, 'M', u'ف'), + (0x1EEB1, 'M', u'ص'), + (0x1EEB2, 'M', u'ق'), + (0x1EEB3, 'M', u'ر'), + (0x1EEB4, 'M', u'ش'), + (0x1EEB5, 'M', u'ت'), + (0x1EEB6, 'M', u'ث'), + (0x1EEB7, 'M', u'خ'), + (0x1EEB8, 'M', u'ذ'), + (0x1EEB9, 'M', u'ض'), + (0x1EEBA, 'M', u'ظ'), + (0x1EEBB, 'M', u'غ'), + (0x1EEBC, 'X'), + (0x1EEF0, 'V'), + (0x1EEF2, 'X'), + (0x1F000, 'V'), + (0x1F02C, 'X'), + (0x1F030, 'V'), + (0x1F094, 'X'), + (0x1F0A0, 'V'), + (0x1F0AF, 'X'), + (0x1F0B1, 'V'), + (0x1F0BF, 'X'), + (0x1F0C1, 'V'), + (0x1F0D0, 'X'), + (0x1F0D1, 'V'), + (0x1F0E0, 'X'), + (0x1F101, '3', u'0,'), + (0x1F102, '3', u'1,'), + (0x1F103, '3', u'2,'), + (0x1F104, '3', u'3,'), + (0x1F105, '3', u'4,'), + (0x1F106, '3', u'5,'), + (0x1F107, '3', u'6,'), + (0x1F108, '3', u'7,'), + (0x1F109, '3', u'8,'), + (0x1F10A, '3', u'9,'), + (0x1F10B, 'X'), + (0x1F110, '3', u'(a)'), + (0x1F111, '3', u'(b)'), + (0x1F112, '3', u'(c)'), + (0x1F113, '3', u'(d)'), + (0x1F114, '3', u'(e)'), + (0x1F115, '3', u'(f)'), + (0x1F116, '3', u'(g)'), + (0x1F117, '3', u'(h)'), + (0x1F118, '3', u'(i)'), + (0x1F119, '3', u'(j)'), + (0x1F11A, '3', u'(k)'), + (0x1F11B, '3', u'(l)'), + (0x1F11C, '3', u'(m)'), + (0x1F11D, '3', u'(n)'), + (0x1F11E, '3', u'(o)'), + (0x1F11F, '3', u'(p)'), + (0x1F120, '3', u'(q)'), + (0x1F121, '3', u'(r)'), + (0x1F122, '3', u'(s)'), + (0x1F123, '3', u'(t)'), + (0x1F124, '3', u'(u)'), + (0x1F125, '3', u'(v)'), + (0x1F126, '3', u'(w)'), + (0x1F127, '3', u'(x)'), + (0x1F128, '3', u'(y)'), + (0x1F129, '3', u'(z)'), + (0x1F12A, 'M', u'〔s〕'), + (0x1F12B, 'M', u'c'), + (0x1F12C, 'M', u'r'), + (0x1F12D, 'M', u'cd'), + (0x1F12E, 'M', u'wz'), + (0x1F12F, 'X'), + (0x1F130, 'M', u'a'), + (0x1F131, 'M', u'b'), + (0x1F132, 'M', u'c'), + (0x1F133, 'M', u'd'), + (0x1F134, 'M', u'e'), + (0x1F135, 'M', u'f'), + (0x1F136, 'M', u'g'), + (0x1F137, 'M', u'h'), + (0x1F138, 'M', u'i'), + (0x1F139, 'M', u'j'), + (0x1F13A, 'M', u'k'), + (0x1F13B, 'M', u'l'), + (0x1F13C, 'M', u'm'), + (0x1F13D, 'M', u'n'), + (0x1F13E, 'M', u'o'), + (0x1F13F, 'M', u'p'), + (0x1F140, 'M', u'q'), + (0x1F141, 'M', u'r'), + (0x1F142, 'M', u's'), + (0x1F143, 'M', u't'), + (0x1F144, 'M', u'u'), + (0x1F145, 'M', u'v'), + (0x1F146, 'M', u'w'), + (0x1F147, 'M', u'x'), + (0x1F148, 'M', u'y'), + (0x1F149, 'M', u'z'), + (0x1F14A, 'M', u'hv'), + (0x1F14B, 'M', u'mv'), + (0x1F14C, 'M', u'sd'), + (0x1F14D, 'M', u'ss'), + (0x1F14E, 'M', u'ppv'), + (0x1F14F, 'M', u'wc'), + (0x1F150, 'V'), + (0x1F16A, 'M', u'mc'), + (0x1F16B, 'M', u'md'), + (0x1F16C, 'X'), + (0x1F170, 'V'), + (0x1F190, 'M', u'dj'), + (0x1F191, 'V'), + (0x1F19B, 'X'), + (0x1F1E6, 'V'), + (0x1F200, 'M', u'ほか'), + (0x1F201, 'M', u'ココ'), + (0x1F202, 'M', u'サ'), + (0x1F203, 'X'), + (0x1F210, 'M', u'手'), + (0x1F211, 'M', u'字'), + (0x1F212, 'M', u'双'), + (0x1F213, 'M', u'デ'), + (0x1F214, 'M', u'二'), + (0x1F215, 'M', u'多'), + (0x1F216, 'M', u'解'), + (0x1F217, 'M', u'天'), + (0x1F218, 'M', u'交'), + (0x1F219, 'M', u'映'), + (0x1F21A, 'M', u'無'), + (0x1F21B, 'M', u'料'), + (0x1F21C, 'M', u'前'), + (0x1F21D, 'M', u'後'), + (0x1F21E, 'M', u'再'), + (0x1F21F, 'M', u'新'), + (0x1F220, 'M', u'初'), + (0x1F221, 'M', u'終'), + (0x1F222, 'M', u'生'), + (0x1F223, 'M', u'販'), + (0x1F224, 'M', u'声'), + (0x1F225, 'M', u'吹'), + (0x1F226, 'M', u'演'), + (0x1F227, 'M', u'投'), + (0x1F228, 'M', u'捕'), + (0x1F229, 'M', u'一'), + (0x1F22A, 'M', u'三'), + (0x1F22B, 'M', u'遊'), + (0x1F22C, 'M', u'左'), + (0x1F22D, 'M', u'中'), + (0x1F22E, 'M', u'右'), + (0x1F22F, 'M', u'指'), + (0x1F230, 'M', u'走'), + (0x1F231, 'M', u'打'), + (0x1F232, 'M', u'禁'), + (0x1F233, 'M', u'空'), + (0x1F234, 'M', u'合'), + (0x1F235, 'M', u'満'), + (0x1F236, 'M', u'有'), + (0x1F237, 'M', u'月'), + (0x1F238, 'M', u'申'), + (0x1F239, 'M', u'割'), + (0x1F23A, 'M', u'営'), + (0x1F23B, 'X'), + (0x1F240, 'M', u'〔本〕'), + (0x1F241, 'M', u'〔三〕'), + (0x1F242, 'M', u'〔二〕'), + (0x1F243, 'M', u'〔安〕'), + (0x1F244, 'M', u'〔点〕'), + (0x1F245, 'M', u'〔打〕'), + (0x1F246, 'M', u'〔盗〕'), + (0x1F247, 'M', u'〔勝〕'), + (0x1F248, 'M', u'〔敗〕'), + (0x1F249, 'X'), + (0x1F250, 'M', u'得'), + (0x1F251, 'M', u'可'), + (0x1F252, 'X'), + (0x1F300, 'V'), + (0x1F321, 'X'), + (0x1F330, 'V'), + (0x1F336, 'X'), + (0x1F337, 'V'), + (0x1F37D, 'X'), + (0x1F380, 'V'), + (0x1F394, 'X'), + (0x1F3A0, 'V'), + (0x1F3C5, 'X'), + (0x1F3C6, 'V'), + (0x1F3CB, 'X'), + (0x1F3E0, 'V'), + (0x1F3F1, 'X'), + (0x1F400, 'V'), + (0x1F43F, 'X'), + (0x1F440, 'V'), + (0x1F441, 'X'), + (0x1F442, 'V'), + (0x1F4F8, 'X'), + (0x1F4F9, 'V'), + (0x1F4FD, 'X'), + (0x1F500, 'V'), + (0x1F53E, 'X'), + (0x1F540, 'V'), + (0x1F544, 'X'), + (0x1F550, 'V'), + (0x1F568, 'X'), + (0x1F5FB, 'V'), + (0x1F641, 'X'), + (0x1F645, 'V'), + (0x1F650, 'X'), + (0x1F680, 'V'), + (0x1F6C6, 'X'), + (0x1F700, 'V'), + (0x1F774, 'X'), + (0x20000, 'V'), + (0x2A6D7, 'X'), + (0x2A700, 'V'), + (0x2B735, 'X'), + (0x2B740, 'V'), + (0x2B81E, 'X'), + (0x2F800, 'M', u'丽'), + (0x2F801, 'M', u'丸'), + (0x2F802, 'M', u'乁'), + (0x2F803, 'M', u'𠄢'), + (0x2F804, 'M', u'你'), + (0x2F805, 'M', u'侮'), + (0x2F806, 'M', u'侻'), + (0x2F807, 'M', u'倂'), + (0x2F808, 'M', u'偺'), + (0x2F809, 'M', u'備'), + (0x2F80A, 'M', u'僧'), + (0x2F80B, 'M', u'像'), + (0x2F80C, 'M', u'㒞'), + (0x2F80D, 'M', u'𠘺'), + (0x2F80E, 'M', u'免'), + (0x2F80F, 'M', u'兔'), + (0x2F810, 'M', u'兤'), + (0x2F811, 'M', u'具'), + (0x2F812, 'M', u'𠔜'), + (0x2F813, 'M', u'㒹'), + (0x2F814, 'M', u'內'), + (0x2F815, 'M', u'再'), + (0x2F816, 'M', u'𠕋'), + (0x2F817, 'M', u'冗'), + (0x2F818, 'M', u'冤'), + (0x2F819, 'M', u'仌'), + (0x2F81A, 'M', u'冬'), + (0x2F81B, 'M', u'况'), + (0x2F81C, 'M', u'𩇟'), + (0x2F81D, 'M', u'凵'), + (0x2F81E, 'M', u'刃'), + (0x2F81F, 'M', u'㓟'), + (0x2F820, 'M', u'刻'), + (0x2F821, 'M', u'剆'), + (0x2F822, 'M', u'割'), + (0x2F823, 'M', u'剷'), + (0x2F824, 'M', u'㔕'), + (0x2F825, 'M', u'勇'), + (0x2F826, 'M', u'勉'), + (0x2F827, 'M', u'勤'), + (0x2F828, 'M', u'勺'), + (0x2F829, 'M', u'包'), + (0x2F82A, 'M', u'匆'), + (0x2F82B, 'M', u'北'), + (0x2F82C, 'M', u'卉'), + (0x2F82D, 'M', u'卑'), + (0x2F82E, 'M', u'博'), + (0x2F82F, 'M', u'即'), + (0x2F830, 'M', u'卽'), + (0x2F831, 'M', u'卿'), + (0x2F834, 'M', u'𠨬'), + (0x2F835, 'M', u'灰'), + (0x2F836, 'M', u'及'), + (0x2F837, 'M', u'叟'), + (0x2F838, 'M', u'𠭣'), + (0x2F839, 'M', u'叫'), + (0x2F83A, 'M', u'叱'), + (0x2F83B, 'M', u'吆'), + (0x2F83C, 'M', u'咞'), + (0x2F83D, 'M', u'吸'), + (0x2F83E, 'M', u'呈'), + (0x2F83F, 'M', u'周'), + (0x2F840, 'M', u'咢'), + (0x2F841, 'M', u'哶'), + (0x2F842, 'M', u'唐'), + (0x2F843, 'M', u'啓'), + (0x2F844, 'M', u'啣'), + (0x2F845, 'M', u'善'), + (0x2F847, 'M', u'喙'), + (0x2F848, 'M', u'喫'), + (0x2F849, 'M', u'喳'), + (0x2F84A, 'M', u'嗂'), + (0x2F84B, 'M', u'圖'), + (0x2F84C, 'M', u'嘆'), + (0x2F84D, 'M', u'圗'), + (0x2F84E, 'M', u'噑'), + (0x2F84F, 'M', u'噴'), + (0x2F850, 'M', u'切'), + (0x2F851, 'M', u'壮'), + (0x2F852, 'M', u'城'), + (0x2F853, 'M', u'埴'), + (0x2F854, 'M', u'堍'), + (0x2F855, 'M', u'型'), + (0x2F856, 'M', u'堲'), + (0x2F857, 'M', u'報'), + (0x2F858, 'M', u'墬'), + (0x2F859, 'M', u'𡓤'), + (0x2F85A, 'M', u'売'), + (0x2F85B, 'M', u'壷'), + (0x2F85C, 'M', u'夆'), + (0x2F85D, 'M', u'多'), + (0x2F85E, 'M', u'夢'), + (0x2F85F, 'M', u'奢'), + (0x2F860, 'M', u'𡚨'), + (0x2F861, 'M', u'𡛪'), + (0x2F862, 'M', u'姬'), + (0x2F863, 'M', u'娛'), + (0x2F864, 'M', u'娧'), + (0x2F865, 'M', u'姘'), + (0x2F866, 'M', u'婦'), + (0x2F867, 'M', u'㛮'), + (0x2F868, 'X'), + (0x2F869, 'M', u'嬈'), + (0x2F86A, 'M', u'嬾'), + (0x2F86C, 'M', u'𡧈'), + (0x2F86D, 'M', u'寃'), + (0x2F86E, 'M', u'寘'), + (0x2F86F, 'M', u'寧'), + (0x2F870, 'M', u'寳'), + (0x2F871, 'M', u'𡬘'), + (0x2F872, 'M', u'寿'), + (0x2F873, 'M', u'将'), + (0x2F874, 'X'), + (0x2F875, 'M', u'尢'), + (0x2F876, 'M', u'㞁'), + (0x2F877, 'M', u'屠'), + (0x2F878, 'M', u'屮'), + (0x2F879, 'M', u'峀'), + (0x2F87A, 'M', u'岍'), + (0x2F87B, 'M', u'𡷤'), + (0x2F87C, 'M', u'嵃'), + (0x2F87D, 'M', u'𡷦'), + (0x2F87E, 'M', u'嵮'), + (0x2F87F, 'M', u'嵫'), + (0x2F880, 'M', u'嵼'), + (0x2F881, 'M', u'巡'), + (0x2F882, 'M', u'巢'), + (0x2F883, 'M', u'㠯'), + (0x2F884, 'M', u'巽'), + (0x2F885, 'M', u'帨'), + (0x2F886, 'M', u'帽'), + (0x2F887, 'M', u'幩'), + (0x2F888, 'M', u'㡢'), + (0x2F889, 'M', u'𢆃'), + (0x2F88A, 'M', u'㡼'), + (0x2F88B, 'M', u'庰'), + (0x2F88C, 'M', u'庳'), + (0x2F88D, 'M', u'庶'), + (0x2F88E, 'M', u'廊'), + (0x2F88F, 'M', u'𪎒'), + (0x2F890, 'M', u'廾'), + (0x2F891, 'M', u'𢌱'), + (0x2F893, 'M', u'舁'), + (0x2F894, 'M', u'弢'), + (0x2F896, 'M', u'㣇'), + (0x2F897, 'M', u'𣊸'), + (0x2F898, 'M', u'𦇚'), + (0x2F899, 'M', u'形'), + (0x2F89A, 'M', u'彫'), + (0x2F89B, 'M', u'㣣'), + (0x2F89C, 'M', u'徚'), + (0x2F89D, 'M', u'忍'), + (0x2F89E, 'M', u'志'), + (0x2F89F, 'M', u'忹'), + (0x2F8A0, 'M', u'悁'), + (0x2F8A1, 'M', u'㤺'), + (0x2F8A2, 'M', u'㤜'), + (0x2F8A3, 'M', u'悔'), + (0x2F8A4, 'M', u'𢛔'), + (0x2F8A5, 'M', u'惇'), + (0x2F8A6, 'M', u'慈'), + (0x2F8A7, 'M', u'慌'), + (0x2F8A8, 'M', u'慎'), + (0x2F8A9, 'M', u'慌'), + (0x2F8AA, 'M', u'慺'), + (0x2F8AB, 'M', u'憎'), + (0x2F8AC, 'M', u'憲'), + (0x2F8AD, 'M', u'憤'), + (0x2F8AE, 'M', u'憯'), + (0x2F8AF, 'M', u'懞'), + (0x2F8B0, 'M', u'懲'), + (0x2F8B1, 'M', u'懶'), + (0x2F8B2, 'M', u'成'), + (0x2F8B3, 'M', u'戛'), + (0x2F8B4, 'M', u'扝'), + (0x2F8B5, 'M', u'抱'), + (0x2F8B6, 'M', u'拔'), + (0x2F8B7, 'M', u'捐'), + (0x2F8B8, 'M', u'𢬌'), + (0x2F8B9, 'M', u'挽'), + (0x2F8BA, 'M', u'拼'), + (0x2F8BB, 'M', u'捨'), + (0x2F8BC, 'M', u'掃'), + (0x2F8BD, 'M', u'揤'), + (0x2F8BE, 'M', u'𢯱'), + (0x2F8BF, 'M', u'搢'), + (0x2F8C0, 'M', u'揅'), + (0x2F8C1, 'M', u'掩'), + (0x2F8C2, 'M', u'㨮'), + (0x2F8C3, 'M', u'摩'), + (0x2F8C4, 'M', u'摾'), + (0x2F8C5, 'M', u'撝'), + (0x2F8C6, 'M', u'摷'), + (0x2F8C7, 'M', u'㩬'), + (0x2F8C8, 'M', u'敏'), + (0x2F8C9, 'M', u'敬'), + (0x2F8CA, 'M', u'𣀊'), + (0x2F8CB, 'M', u'旣'), + (0x2F8CC, 'M', u'書'), + (0x2F8CD, 'M', u'晉'), + (0x2F8CE, 'M', u'㬙'), + (0x2F8CF, 'M', u'暑'), + (0x2F8D0, 'M', u'㬈'), + (0x2F8D1, 'M', u'㫤'), + (0x2F8D2, 'M', u'冒'), + (0x2F8D3, 'M', u'冕'), + (0x2F8D4, 'M', u'最'), + (0x2F8D5, 'M', u'暜'), + (0x2F8D6, 'M', u'肭'), + (0x2F8D7, 'M', u'䏙'), + (0x2F8D8, 'M', u'朗'), + (0x2F8D9, 'M', u'望'), + (0x2F8DA, 'M', u'朡'), + (0x2F8DB, 'M', u'杞'), + (0x2F8DC, 'M', u'杓'), + (0x2F8DD, 'M', u'𣏃'), + (0x2F8DE, 'M', u'㭉'), + (0x2F8DF, 'M', u'柺'), + (0x2F8E0, 'M', u'枅'), + (0x2F8E1, 'M', u'桒'), + (0x2F8E2, 'M', u'梅'), + (0x2F8E3, 'M', u'𣑭'), + (0x2F8E4, 'M', u'梎'), + (0x2F8E5, 'M', u'栟'), + (0x2F8E6, 'M', u'椔'), + (0x2F8E7, 'M', u'㮝'), + (0x2F8E8, 'M', u'楂'), + (0x2F8E9, 'M', u'榣'), + (0x2F8EA, 'M', u'槪'), + (0x2F8EB, 'M', u'檨'), + (0x2F8EC, 'M', u'𣚣'), + (0x2F8ED, 'M', u'櫛'), + (0x2F8EE, 'M', u'㰘'), + (0x2F8EF, 'M', u'次'), + (0x2F8F0, 'M', u'𣢧'), + (0x2F8F1, 'M', u'歔'), + (0x2F8F2, 'M', u'㱎'), + (0x2F8F3, 'M', u'歲'), + (0x2F8F4, 'M', u'殟'), + (0x2F8F5, 'M', u'殺'), + (0x2F8F6, 'M', u'殻'), + (0x2F8F7, 'M', u'𣪍'), + (0x2F8F8, 'M', u'𡴋'), + (0x2F8F9, 'M', u'𣫺'), + (0x2F8FA, 'M', u'汎'), + (0x2F8FB, 'M', u'𣲼'), + (0x2F8FC, 'M', u'沿'), + (0x2F8FD, 'M', u'泍'), + (0x2F8FE, 'M', u'汧'), + (0x2F8FF, 'M', u'洖'), + (0x2F900, 'M', u'派'), + (0x2F901, 'M', u'海'), + (0x2F902, 'M', u'流'), + (0x2F903, 'M', u'浩'), + (0x2F904, 'M', u'浸'), + (0x2F905, 'M', u'涅'), + (0x2F906, 'M', u'𣴞'), + (0x2F907, 'M', u'洴'), + (0x2F908, 'M', u'港'), + (0x2F909, 'M', u'湮'), + (0x2F90A, 'M', u'㴳'), + (0x2F90B, 'M', u'滋'), + (0x2F90C, 'M', u'滇'), + (0x2F90D, 'M', u'𣻑'), + (0x2F90E, 'M', u'淹'), + (0x2F90F, 'M', u'潮'), + (0x2F910, 'M', u'𣽞'), + (0x2F911, 'M', u'𣾎'), + (0x2F912, 'M', u'濆'), + (0x2F913, 'M', u'瀹'), + (0x2F914, 'M', u'瀞'), + (0x2F915, 'M', u'瀛'), + (0x2F916, 'M', u'㶖'), + (0x2F917, 'M', u'灊'), + (0x2F918, 'M', u'災'), + (0x2F919, 'M', u'灷'), + (0x2F91A, 'M', u'炭'), + (0x2F91B, 'M', u'𠔥'), + (0x2F91C, 'M', u'煅'), + (0x2F91D, 'M', u'𤉣'), + (0x2F91E, 'M', u'熜'), + (0x2F91F, 'X'), + (0x2F920, 'M', u'爨'), + (0x2F921, 'M', u'爵'), + (0x2F922, 'M', u'牐'), + (0x2F923, 'M', u'𤘈'), + (0x2F924, 'M', u'犀'), + (0x2F925, 'M', u'犕'), + (0x2F926, 'M', u'𤜵'), + (0x2F927, 'M', u'𤠔'), + (0x2F928, 'M', u'獺'), + (0x2F929, 'M', u'王'), + (0x2F92A, 'M', u'㺬'), + (0x2F92B, 'M', u'玥'), + (0x2F92C, 'M', u'㺸'), + (0x2F92E, 'M', u'瑇'), + (0x2F92F, 'M', u'瑜'), + (0x2F930, 'M', u'瑱'), + (0x2F931, 'M', u'璅'), + (0x2F932, 'M', u'瓊'), + (0x2F933, 'M', u'㼛'), + (0x2F934, 'M', u'甤'), + (0x2F935, 'M', u'𤰶'), + (0x2F936, 'M', u'甾'), + (0x2F937, 'M', u'𤲒'), + (0x2F938, 'M', u'異'), + (0x2F939, 'M', u'𢆟'), + (0x2F93A, 'M', u'瘐'), + (0x2F93B, 'M', u'𤾡'), + (0x2F93C, 'M', u'𤾸'), + (0x2F93D, 'M', u'𥁄'), + (0x2F93E, 'M', u'㿼'), + (0x2F93F, 'M', u'䀈'), + (0x2F940, 'M', u'直'), + (0x2F941, 'M', u'𥃳'), + (0x2F942, 'M', u'𥃲'), + (0x2F943, 'M', u'𥄙'), + (0x2F944, 'M', u'𥄳'), + (0x2F945, 'M', u'眞'), + (0x2F946, 'M', u'真'), + (0x2F948, 'M', u'睊'), + (0x2F949, 'M', u'䀹'), + (0x2F94A, 'M', u'瞋'), + (0x2F94B, 'M', u'䁆'), + (0x2F94C, 'M', u'䂖'), + (0x2F94D, 'M', u'𥐝'), + (0x2F94E, 'M', u'硎'), + (0x2F94F, 'M', u'碌'), + (0x2F950, 'M', u'磌'), + (0x2F951, 'M', u'䃣'), + (0x2F952, 'M', u'𥘦'), + (0x2F953, 'M', u'祖'), + (0x2F954, 'M', u'𥚚'), + (0x2F955, 'M', u'𥛅'), + (0x2F956, 'M', u'福'), + (0x2F957, 'M', u'秫'), + (0x2F958, 'M', u'䄯'), + (0x2F959, 'M', u'穀'), + (0x2F95A, 'M', u'穊'), + (0x2F95B, 'M', u'穏'), + (0x2F95C, 'M', u'𥥼'), + (0x2F95D, 'M', u'𥪧'), + (0x2F95F, 'X'), + (0x2F960, 'M', u'䈂'), + (0x2F961, 'M', u'𥮫'), + (0x2F962, 'M', u'篆'), + (0x2F963, 'M', u'築'), + (0x2F964, 'M', u'䈧'), + (0x2F965, 'M', u'𥲀'), + (0x2F966, 'M', u'糒'), + (0x2F967, 'M', u'䊠'), + (0x2F968, 'M', u'糨'), + (0x2F969, 'M', u'糣'), + (0x2F96A, 'M', u'紀'), + (0x2F96B, 'M', u'𥾆'), + (0x2F96C, 'M', u'絣'), + (0x2F96D, 'M', u'䌁'), + (0x2F96E, 'M', u'緇'), + (0x2F96F, 'M', u'縂'), + (0x2F970, 'M', u'繅'), + (0x2F971, 'M', u'䌴'), + (0x2F972, 'M', u'𦈨'), + (0x2F973, 'M', u'𦉇'), + (0x2F974, 'M', u'䍙'), + (0x2F975, 'M', u'𦋙'), + (0x2F976, 'M', u'罺'), + (0x2F977, 'M', u'𦌾'), + (0x2F978, 'M', u'羕'), + (0x2F979, 'M', u'翺'), + (0x2F97A, 'M', u'者'), + (0x2F97B, 'M', u'𦓚'), + (0x2F97C, 'M', u'𦔣'), + (0x2F97D, 'M', u'聠'), + (0x2F97E, 'M', u'𦖨'), + (0x2F97F, 'M', u'聰'), + (0x2F980, 'M', u'𣍟'), + (0x2F981, 'M', u'䏕'), + (0x2F982, 'M', u'育'), + (0x2F983, 'M', u'脃'), + (0x2F984, 'M', u'䐋'), + (0x2F985, 'M', u'脾'), + (0x2F986, 'M', u'媵'), + (0x2F987, 'M', u'𦞧'), + (0x2F988, 'M', u'𦞵'), + (0x2F989, 'M', u'𣎓'), + (0x2F98A, 'M', u'𣎜'), + (0x2F98B, 'M', u'舁'), + (0x2F98C, 'M', u'舄'), + (0x2F98D, 'M', u'辞'), + (0x2F98E, 'M', u'䑫'), + (0x2F98F, 'M', u'芑'), + (0x2F990, 'M', u'芋'), + (0x2F991, 'M', u'芝'), + (0x2F992, 'M', u'劳'), + (0x2F993, 'M', u'花'), + (0x2F994, 'M', u'芳'), + (0x2F995, 'M', u'芽'), + (0x2F996, 'M', u'苦'), + (0x2F997, 'M', u'𦬼'), + (0x2F998, 'M', u'若'), + (0x2F999, 'M', u'茝'), + (0x2F99A, 'M', u'荣'), + (0x2F99B, 'M', u'莭'), + (0x2F99C, 'M', u'茣'), + (0x2F99D, 'M', u'莽'), + (0x2F99E, 'M', u'菧'), + (0x2F99F, 'M', u'著'), + (0x2F9A0, 'M', u'荓'), + (0x2F9A1, 'M', u'菊'), + (0x2F9A2, 'M', u'菌'), + (0x2F9A3, 'M', u'菜'), + (0x2F9A4, 'M', u'𦰶'), + (0x2F9A5, 'M', u'𦵫'), + (0x2F9A6, 'M', u'𦳕'), + (0x2F9A7, 'M', u'䔫'), + (0x2F9A8, 'M', u'蓱'), + (0x2F9A9, 'M', u'蓳'), + (0x2F9AA, 'M', u'蔖'), + (0x2F9AB, 'M', u'𧏊'), + (0x2F9AC, 'M', u'蕤'), + (0x2F9AD, 'M', u'𦼬'), + (0x2F9AE, 'M', u'䕝'), + (0x2F9AF, 'M', u'䕡'), + (0x2F9B0, 'M', u'𦾱'), + (0x2F9B1, 'M', u'𧃒'), + (0x2F9B2, 'M', u'䕫'), + (0x2F9B3, 'M', u'虐'), + (0x2F9B4, 'M', u'虜'), + (0x2F9B5, 'M', u'虧'), + (0x2F9B6, 'M', u'虩'), + (0x2F9B7, 'M', u'蚩'), + (0x2F9B8, 'M', u'蚈'), + (0x2F9B9, 'M', u'蜎'), + (0x2F9BA, 'M', u'蛢'), + (0x2F9BB, 'M', u'蝹'), + (0x2F9BC, 'M', u'蜨'), + (0x2F9BD, 'M', u'蝫'), + (0x2F9BE, 'M', u'螆'), + (0x2F9BF, 'X'), + (0x2F9C0, 'M', u'蟡'), + (0x2F9C1, 'M', u'蠁'), + (0x2F9C2, 'M', u'䗹'), + (0x2F9C3, 'M', u'衠'), + (0x2F9C4, 'M', u'衣'), + (0x2F9C5, 'M', u'𧙧'), + (0x2F9C6, 'M', u'裗'), + (0x2F9C7, 'M', u'裞'), + (0x2F9C8, 'M', u'䘵'), + (0x2F9C9, 'M', u'裺'), + (0x2F9CA, 'M', u'㒻'), + (0x2F9CB, 'M', u'𧢮'), + (0x2F9CC, 'M', u'𧥦'), + (0x2F9CD, 'M', u'䚾'), + (0x2F9CE, 'M', u'䛇'), + (0x2F9CF, 'M', u'誠'), + (0x2F9D0, 'M', u'諭'), + (0x2F9D1, 'M', u'變'), + (0x2F9D2, 'M', u'豕'), + (0x2F9D3, 'M', u'𧲨'), + (0x2F9D4, 'M', u'貫'), + (0x2F9D5, 'M', u'賁'), + (0x2F9D6, 'M', u'贛'), + (0x2F9D7, 'M', u'起'), + (0x2F9D8, 'M', u'𧼯'), + (0x2F9D9, 'M', u'𠠄'), + (0x2F9DA, 'M', u'跋'), + (0x2F9DB, 'M', u'趼'), + (0x2F9DC, 'M', u'跰'), + (0x2F9DD, 'M', u'𠣞'), + (0x2F9DE, 'M', u'軔'), + (0x2F9DF, 'M', u'輸'), + (0x2F9E0, 'M', u'𨗒'), + (0x2F9E1, 'M', u'𨗭'), + (0x2F9E2, 'M', u'邔'), + (0x2F9E3, 'M', u'郱'), + (0x2F9E4, 'M', u'鄑'), + (0x2F9E5, 'M', u'𨜮'), + (0x2F9E6, 'M', u'鄛'), + (0x2F9E7, 'M', u'鈸'), + (0x2F9E8, 'M', u'鋗'), + (0x2F9E9, 'M', u'鋘'), + (0x2F9EA, 'M', u'鉼'), + (0x2F9EB, 'M', u'鏹'), + (0x2F9EC, 'M', u'鐕'), + (0x2F9ED, 'M', u'𨯺'), + (0x2F9EE, 'M', u'開'), + (0x2F9EF, 'M', u'䦕'), + (0x2F9F0, 'M', u'閷'), + (0x2F9F1, 'M', u'𨵷'), + (0x2F9F2, 'M', u'䧦'), + (0x2F9F3, 'M', u'雃'), + (0x2F9F4, 'M', u'嶲'), + (0x2F9F5, 'M', u'霣'), + (0x2F9F6, 'M', u'𩅅'), + (0x2F9F7, 'M', u'𩈚'), + (0x2F9F8, 'M', u'䩮'), + (0x2F9F9, 'M', u'䩶'), + (0x2F9FA, 'M', u'韠'), + (0x2F9FB, 'M', u'𩐊'), + (0x2F9FC, 'M', u'䪲'), + (0x2F9FD, 'M', u'𩒖'), + (0x2F9FE, 'M', u'頋'), + (0x2FA00, 'M', u'頩'), + (0x2FA01, 'M', u'𩖶'), + (0x2FA02, 'M', u'飢'), + (0x2FA03, 'M', u'䬳'), + (0x2FA04, 'M', u'餩'), + (0x2FA05, 'M', u'馧'), + (0x2FA06, 'M', u'駂'), + (0x2FA07, 'M', u'駾'), + (0x2FA08, 'M', u'䯎'), + (0x2FA09, 'M', u'𩬰'), + (0x2FA0A, 'M', u'鬒'), + (0x2FA0B, 'M', u'鱀'), + (0x2FA0C, 'M', u'鳽'), + (0x2FA0D, 'M', u'䳎'), + (0x2FA0E, 'M', u'䳭'), + (0x2FA0F, 'M', u'鵧'), + (0x2FA10, 'M', u'𪃎'), + (0x2FA11, 'M', u'䳸'), + (0x2FA12, 'M', u'𪄅'), + (0x2FA13, 'M', u'𪈎'), + (0x2FA14, 'M', u'𪊑'), + (0x2FA15, 'M', u'麻'), + (0x2FA16, 'M', u'䵖'), + (0x2FA17, 'M', u'黹'), + (0x2FA18, 'M', u'黾'), + (0x2FA19, 'M', u'鼅'), + (0x2FA1A, 'M', u'鼏'), + (0x2FA1B, 'M', u'鼖'), + (0x2FA1C, 'M', u'鼻'), + (0x2FA1D, 'M', u'𪘀'), + (0x2FA1E, 'X'), + (0xE0100, 'I'), + (0xE01F0, 'X'), +) diff --git a/lib/python3.4/site-packages/netifaces-0.10.4-py3.4.egg-info/PKG-INFO b/lib/python3.4/site-packages/netifaces-0.10.4-py3.4.egg-info/PKG-INFO new file mode 100644 index 0000000..8f597f2 --- /dev/null +++ b/lib/python3.4/site-packages/netifaces-0.10.4-py3.4.egg-info/PKG-INFO @@ -0,0 +1,214 @@ +Metadata-Version: 1.1 +Name: netifaces +Version: 0.10.4 +Summary: Portable network interface information. +Home-page: https://bitbucket.org/al45tair/netifaces +Author: Alastair Houghton +Author-email: alastair@alastairs-place.net +License: MIT License +Description: netifaces 0.10.4 + ================ + + .. image:: https://drone.io/bitbucket.org/al45tair/netifaces/status.png + :target: https://drone.io/bitbucket.org/al45tair/netifaces/latest + :alt: Build Status + + 1. What is this? + ---------------- + + It's been annoying me for some time that there's no easy way to get the + address(es) of the machine's network interfaces from Python. There is + a good reason for this difficulty, which is that it is virtually impossible + to do so in a portable manner. However, it seems to me that there should + be a package you can easy_install that will take care of working out the + details of doing so on the machine you're using, then you can get on with + writing Python code without concerning yourself with the nitty gritty of + system-dependent low-level networking APIs. + + This package attempts to solve that problem. + + 2. How do I use it? + ------------------- + + First you need to install it, which you can do by typing:: + + tar xvzf netifaces-0.10.4.tar.gz + cd netifaces-0.10.4 + python setup.py install + + Once that's done, you'll need to start Python and do something like the + following:: + + >>> import netifaces + + Then if you enter + + >>> netifaces.interfaces() + ['lo0', 'gif0', 'stf0', 'en0', 'en1', 'fw0'] + + you'll see the list of interface identifiers for your machine. + + You can ask for the addresses of a particular interface by doing + + >>> netifaces.ifaddresses('lo0') + {18: [{'addr': ''}], 2: [{'peer': '127.0.0.1', 'netmask': '255.0.0.0', 'addr': '127.0.0.1'}], 30: [{'peer': '::1', 'netmask': 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff', 'addr': '::1'}, {'peer': '', 'netmask': 'ffff:ffff:ffff:ffff::', 'addr': 'fe80::1%lo0'}]} + + Hmmmm. That result looks a bit cryptic; let's break it apart and explain + what each piece means. It returned a dictionary, so let's look there first:: + + { 18: [...], 2: [...], 30: [...] } + + Each of the numbers refers to a particular address family. In this case, we + have three address families listed; on my system, 18 is ``AF_LINK`` (which means + the link layer interface, e.g. Ethernet), 2 is ``AF_INET`` (normal Internet + addresses), and 30 is ``AF_INET6`` (IPv6). + + But wait! Don't use these numbers in your code. The numeric values here are + system dependent; fortunately, I thought of that when writing netifaces, so + the module declares a range of values that you might need. e.g. + + >>> netifaces.AF_LINK + 18 + + Again, on your system, the number may be different. + + So, what we've established is that the dictionary that's returned has one + entry for each address family for which this interface has an address. Let's + take a look at the ``AF_INET`` addresses now: + + >>> addrs = netifaces.ifaddresses('lo0') + >>> addrs[netifaces.AF_INET] + [{'peer': '127.0.0.1', 'netmask': '255.0.0.0', 'addr': '127.0.0.1'}] + + You might be wondering why this value is a list. The reason is that it's + possible for an interface to have more than one address, even within the + same family. I'll say that again: *you can have more than one address of + the same type associated with each interface*. + + *Asking for "the" address of a particular interface doesn't make sense.* + + Right, so, we can see that this particular interface only has one address, + and, because it's a loopback interface, it's point-to-point and therefore + has a *peer* address rather than a broadcast address. + + Let's look at a more interesting interface. + + >>> addrs = netifaces.ifaddresses('en0') + >>> addrs[netifaces.AF_INET] + [{'broadcast': '10.15.255.255', 'netmask': '255.240.0.0', 'addr': '10.0.1.4'}, {'broadcast': '192.168.0.255', 'addr': '192.168.0.47'}] + + This interface has two addresses (see, I told you...) Both of them are + regular IPv4 addresses, although in one case the netmask has been changed + from its default. The netmask *may not* appear on your system if it's set + to the default for the address range. + + Because this interface isn't point-to-point, it also has broadcast addresses. + + Now, say we want, instead of the IP addresses, to get the MAC address; that + is, the hardware address of the Ethernet adapter running this interface. We + can do + + >>> addrs[netifaces.AF_LINK] + [{'addr': '00:12:34:56:78:9a'}] + + Note that this may not be available on platforms without getifaddrs(), unless + they happen to implement ``SIOCGIFHWADDR``. Note also that you just get the + address; it's unlikely that you'll see anything else with an ``AF_LINK`` address. + Oh, and don't assume that all ``AF_LINK`` addresses are Ethernet; you might, for + instance, be on a Mac, in which case: + + >>> addrs = netifaces.ifaddresses('fw0') + >>> addrs[netifaces.AF_LINK] + [{'addr': '00:12:34:56:78:9a:bc:de'}] + + No, that isn't an exceptionally long Ethernet MAC address---it's a FireWire + address. + + As of version 0.10.0, you can also obtain a list of gateways on your + machine: + + >>> netifaces.gateways() + {2: [('10.0.1.1', 'en0', True), ('10.2.1.1', 'en1', False)], 30: [('fe80::1', 'en0', True)], 'default': { 2: ('10.0.1.1', 'en0'), 30: ('fe80::1', 'en0') }} + + This dictionary is keyed on address family---in this case, ``AF_INET``---and + each entry is a list of gateways as ``(address, interface, is_default)`` tuples. + Notice that here we have two separate gateways for IPv4 (``AF_INET``); some + operating systems support configurations like this and can either route packets + based on their source, or based on administratively configured routing tables. + + For convenience, we also allow you to index the dictionary with the special + value ``'default'``, which returns a dictionary mapping address families to the + default gateway in each case. Thus you can get the default IPv4 gateway with + + >>> gws = netifaces.gateways() + >>> gws['default'][netifaces.AF_INET] + ('10.0.1.1', 'en0') + + Do note that there may be no default gateway for any given address family; + this is currently very common for IPv6 and much less common for IPv4 but it + can happen even for ``AF_INET``. + + BTW, if you're trying to configure your machine to have multiple gateways for + the same address family, it's a very good idea to check the documentation for + your operating system *very* carefully, as some systems become extremely + confused or route packets in a non-obvious manner. + + I'm very interested in hearing from anyone (on any platform) for whom the + ``gateways()`` method doesn't produce the expected results. It's quite + complicated extracting this information from the operating system (whichever + operating system we're talking about), and so I expect there's at least one + system out there where this just won't work. + + 3. This is great! What platforms does it work on? + -------------------------------------------------- + + It gets regular testing on OS X, Linux and Windows. It has also been used + successfully on Solaris, and it's expected to work properly on other UNIX-like + systems as well. If you are running something that is not supported, and + wish to contribute a patch, please use BitBucket to send a pull request. + + 4. What license is this under? + ------------------------------ + + It's an MIT-style license. Here goes: + + Copyright (c) 2007-2014 Alastair Houghton + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. + + 5. Why the jump to 0.10.0? + -------------------------- + + Because someone released a fork of netifaces with the version 0.9.0. + Hopefully skipping the version number should remove any confusion. In + addition starting with 0.10.0 Python 3 is now supported and other + features/bugfixes have been included as well. See the CHANGELOG for a + more complete list of changes. + +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Topic :: System :: Networking +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.5 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 diff --git a/lib/python3.4/site-packages/netifaces-0.10.4-py3.4.egg-info/SOURCES.txt b/lib/python3.4/site-packages/netifaces-0.10.4-py3.4.egg-info/SOURCES.txt new file mode 100644 index 0000000..a382c6c --- /dev/null +++ b/lib/python3.4/site-packages/netifaces-0.10.4-py3.4.egg-info/SOURCES.txt @@ -0,0 +1,9 @@ +README.rst +netifaces.c +setup.cfg +setup.py +netifaces.egg-info/PKG-INFO +netifaces.egg-info/SOURCES.txt +netifaces.egg-info/dependency_links.txt +netifaces.egg-info/top_level.txt +netifaces.egg-info/zip-safe \ No newline at end of file diff --git a/lib/python3.4/site-packages/netifaces-0.10.4-py3.4.egg-info/dependency_links.txt b/lib/python3.4/site-packages/netifaces-0.10.4-py3.4.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/lib/python3.4/site-packages/netifaces-0.10.4-py3.4.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/lib/python3.4/site-packages/netifaces-0.10.4-py3.4.egg-info/installed-files.txt b/lib/python3.4/site-packages/netifaces-0.10.4-py3.4.egg-info/installed-files.txt new file mode 100644 index 0000000..472c8fc --- /dev/null +++ b/lib/python3.4/site-packages/netifaces-0.10.4-py3.4.egg-info/installed-files.txt @@ -0,0 +1,7 @@ +../netifaces.cpython-34m.so +./ +PKG-INFO +dependency_links.txt +SOURCES.txt +top_level.txt +zip-safe diff --git a/lib/python3.4/site-packages/netifaces-0.10.4-py3.4.egg-info/top_level.txt b/lib/python3.4/site-packages/netifaces-0.10.4-py3.4.egg-info/top_level.txt new file mode 100644 index 0000000..3f008fd --- /dev/null +++ b/lib/python3.4/site-packages/netifaces-0.10.4-py3.4.egg-info/top_level.txt @@ -0,0 +1 @@ +netifaces diff --git a/lib/python3.4/site-packages/netifaces-0.10.4-py3.4.egg-info/zip-safe b/lib/python3.4/site-packages/netifaces-0.10.4-py3.4.egg-info/zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/lib/python3.4/site-packages/netifaces-0.10.4-py3.4.egg-info/zip-safe @@ -0,0 +1 @@ + diff --git a/lib/python3.4/site-packages/netifaces.cpython-34m.so b/lib/python3.4/site-packages/netifaces.cpython-34m.so new file mode 100755 index 0000000..fb37436 Binary files /dev/null and b/lib/python3.4/site-packages/netifaces.cpython-34m.so differ diff --git a/lib/python3.4/site-packages/pip-1.5.6.dist-info/DESCRIPTION.rst b/lib/python3.4/site-packages/pip-1.5.6.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..2e2d679 --- /dev/null +++ b/lib/python3.4/site-packages/pip-1.5.6.dist-info/DESCRIPTION.rst @@ -0,0 +1,71 @@ + +Project Info +============ + +* Project Page: https://github.com/pypa/pip +* Install howto: https://pip.pypa.io/en/latest/installing.html +* Changelog: https://pip.pypa.io/en/latest/news.html +* Bug Tracking: https://github.com/pypa/pip/issues +* Mailing list: http://groups.google.com/group/python-virtualenv +* Docs: https://pip.pypa.io/ +* User IRC: #pypa on Freenode. +* Dev IRC: #pypa-dev on Freenode. + +Quickstart +========== + +First, :doc:`Install pip `. + +Install a package from `PyPI`_: + +:: + + $ pip install SomePackage + [...] + Successfully installed SomePackage + +Show what files were installed: + +:: + + $ pip show --files SomePackage + Name: SomePackage + Version: 1.0 + Location: /my/env/lib/pythonx.x/site-packages + Files: + ../somepackage/__init__.py + [...] + +List what packages are outdated: + +:: + + $ pip list --outdated + SomePackage (Current: 1.0 Latest: 2.0) + +Upgrade a package: + +:: + + $ pip install --upgrade SomePackage + [...] + Found existing installation: SomePackage 1.0 + Uninstalling SomePackage: + Successfully uninstalled SomePackage + Running setup.py install for SomePackage + Successfully installed SomePackage + +Uninstall a package: + +:: + + $ pip uninstall SomePackage + Uninstalling SomePackage: + /my/env/lib/pythonx.x/site-packages/somepackage + Proceed (y/n)? y + Successfully uninstalled SomePackage + + +.. _PyPI: http://pypi.python.org/pypi/ + + diff --git a/lib/python3.4/site-packages/pip-1.5.6.dist-info/METADATA b/lib/python3.4/site-packages/pip-1.5.6.dist-info/METADATA new file mode 100644 index 0000000..8bad7a6 --- /dev/null +++ b/lib/python3.4/site-packages/pip-1.5.6.dist-info/METADATA @@ -0,0 +1,97 @@ +Metadata-Version: 2.0 +Name: pip +Version: 1.5.6 +Summary: A tool for installing and managing Python packages. +Home-page: https://pip.pypa.io/ +Author: The pip developers +Author-email: python-virtualenv@groups.google.com +License: MIT +Keywords: easy_install distutils setuptools egg virtualenv +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Topic :: Software Development :: Build Tools +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.1 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Provides-Extra: testing +Requires-Dist: pytest; extra == 'testing' +Requires-Dist: scripttest (>=1.3); extra == 'testing' +Requires-Dist: mock; extra == 'testing' + + +Project Info +============ + +* Project Page: https://github.com/pypa/pip +* Install howto: https://pip.pypa.io/en/latest/installing.html +* Changelog: https://pip.pypa.io/en/latest/news.html +* Bug Tracking: https://github.com/pypa/pip/issues +* Mailing list: http://groups.google.com/group/python-virtualenv +* Docs: https://pip.pypa.io/ +* User IRC: #pypa on Freenode. +* Dev IRC: #pypa-dev on Freenode. + +Quickstart +========== + +First, :doc:`Install pip `. + +Install a package from `PyPI`_: + +:: + + $ pip install SomePackage + [...] + Successfully installed SomePackage + +Show what files were installed: + +:: + + $ pip show --files SomePackage + Name: SomePackage + Version: 1.0 + Location: /my/env/lib/pythonx.x/site-packages + Files: + ../somepackage/__init__.py + [...] + +List what packages are outdated: + +:: + + $ pip list --outdated + SomePackage (Current: 1.0 Latest: 2.0) + +Upgrade a package: + +:: + + $ pip install --upgrade SomePackage + [...] + Found existing installation: SomePackage 1.0 + Uninstalling SomePackage: + Successfully uninstalled SomePackage + Running setup.py install for SomePackage + Successfully installed SomePackage + +Uninstall a package: + +:: + + $ pip uninstall SomePackage + Uninstalling SomePackage: + /my/env/lib/pythonx.x/site-packages/somepackage + Proceed (y/n)? y + Successfully uninstalled SomePackage + + +.. _PyPI: http://pypi.python.org/pypi/ + + diff --git a/lib/python3.4/site-packages/pip-1.5.6.dist-info/RECORD b/lib/python3.4/site-packages/pip-1.5.6.dist-info/RECORD new file mode 100644 index 0000000..8544563 --- /dev/null +++ b/lib/python3.4/site-packages/pip-1.5.6.dist-info/RECORD @@ -0,0 +1,80 @@ +pip-1.5.6.dist-info/entry_points.txt,sha256=1-e4WB_Fe8mWHrMi1YQo_s5knbh0lu_uRmd8Wb6MJfY,68 +pip-1.5.6.dist-info/WHEEL,sha256=AvR0WeTpDaxT645bl5FQxUK6NPsTls2ttpcGJg3j1Xg,110 +pip-1.5.6.dist-info/metadata.json,sha256=FTHiqyD_sCWIrHxPzd_PkxOksmFyFLqpUwnT20gr9Gc,1379 +pip-1.5.6.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pip-1.5.6.dist-info/DESCRIPTION.rst,sha256=n5sT7bxCOnG9ej7TtEjjARQZ_n2ECqWFDiJK88BM0u0,1422 +pip-1.5.6.dist-info/RECORD,, +pip-1.5.6.dist-info/METADATA,sha256=lzWjAnNXrB51bQ7xdjWidesBvidhIm0QMK5qdjl_KT4,2444 +pip/pep425tags.py,sha256=jb5Rq395Gz_Uv8kn3L9Im1HX7EhEj8nqyYX0nXulzWo,2969 +pip/basecommand.py,sha256=N_nE7BCcoMA7t2nRNTiJB8T__1XqI74SJI2G72VaM2E,6578 +pip/index.py,sha256=STYPJFaliyW7bv3PqZZRN_BNBdPXYqdRs19VT__TzCM,40374 +pip/wheel.py,sha256=kLXOu5T_PBIJ1vona020Tsb_S_qzcw_I2XrZ5gdRdjE,20551 +pip/status_codes.py,sha256=sEFHUaUJbqv8iArL3HAtcztWZmGOFX01hTesSytDEh0,116 +pip/__init__.py,sha256=KSoPZT91fVHfh_z_XkRFfYrupZIMzN35PsQWXpg-Ul4,12070 +pip/locations.py,sha256=YyFyCLYADKgT5x-Ctj_LeZl5bEzkbBXuR2Iv8IbVqDA,6202 +pip/__main__.py,sha256=9JBJhprGRLUy1fEvAdufs0tsjKFAvFAY_nTde6GDkHk,116 +pip/cmdoptions.py,sha256=2E4AK_UNbyvJE6bmvlvsWhkrttE3jrwM_hNYpIecLfE,9330 +pip/runner.py,sha256=VkcZKNikprpDOSl2Y3m0FaQbdGuYsoHkxdhjtL0N3oA,431 +pip/log.py,sha256=YysOwj7x7qMnv8E8H5scLAqDUM2xrwXTkUj94EmdAwo,9438 +pip/baseparser.py,sha256=GHPwVFcD9HVgQSNnmW5s7_tNa-lyLJbl5iQH9WnWBBo,8124 +pip/exceptions.py,sha256=wAoboA4PdhGN7xH-ayf_dcDFPYZe9XAivAlZJbOgCN4,1086 +pip/req.py,sha256=519qbgmVY_l_ymGNOiuI8Ya4ueyb3d4WJdaz6kLep0o,83782 +pip/download.py,sha256=UWtMRDLPa0ONxw-LMmcCZ_3Hh9NiVnOQA4tHRUNACu8,22491 +pip/util.py,sha256=y83oY2wa5OysB9JBCQt6EFM-2jdJ3T7xNe9Jl35o0kY,25816 +pip/vcs/subversion.py,sha256=P31K7o83JdcipIyuEVlnpSp5KZqakb4OJ1PKT-FB7C8,10640 +pip/vcs/__init__.py,sha256=kS31hLmJ6BgKnBu8kvXKQlJEwoj1MxYE7wfRuFL-daM,8748 +pip/vcs/git.py,sha256=ib3TqDwJyfjBnSRFKVe_HhNdwkmfcOZfJHbqt2RUOVg,7898 +pip/vcs/mercurial.py,sha256=71ESfgxotPPPZjiH6sMTBWcj5TS8kjgJxVnWrRb3bwo,5820 +pip/vcs/bazaar.py,sha256=qUIuIqDJqwZ_nP6WR52YwvYVy1lvIUmvaT-IdxDYUHo,4943 +pip/commands/show.py,sha256=klFQt5fhN5zydPnoe-IugUoWwTOK5WcT9GmjyWB04Wo,2750 +pip/commands/bundle.py,sha256=tK8LU3Khjkrz65y3brNP71QOBkQCb9mlv9x8s1W02T4,1787 +pip/commands/help.py,sha256=ETLg8xfv8uFwS3KvxmsCE-I56S15jUTvfkwaPAA18pE,927 +pip/commands/install.py,sha256=JIorvRJM2a-O_UFqE8_2l7obIgrKx07Wzk7rG1tiRZY,13396 +pip/commands/wheel.py,sha256=8CLXrGKgkAiBiZ56vBHlQa8rxWTDf-qzHv5hmAyRWPw,7891 +pip/commands/list.py,sha256=FHf7H35AajbCuymiG2z8xAGNSx8W5CNZKj6Hh2QGo38,6814 +pip/commands/__init__.py,sha256=N_4io-oGcWF9-raDN5TYXbGlJFsx5po36HZmwgLso6I,2236 +pip/commands/search.py,sha256=RPaWhZ49vZnk0hiuNCjziniYZ_Ln5lNIbtamTC633rM,4719 +pip/commands/uninstall.py,sha256=MF4zSLfMxnH3E8T673ORNWz0Bsc4C6LEI5KImpAQrck,2203 +pip/commands/completion.py,sha256=LnJgUrpGGO4x2Y8VdwhKda4kGZWMFO28P4jYzYT5Q8k,1838 +pip/commands/freeze.py,sha256=PFvq9h0joZOpnKv4-ohLIQ3dLhtyEIlebzhqKFiKvYo,4647 +pip/commands/unzip.py,sha256=_PeTWKOd_iRxPt_7njQ8jGFpjX006vobn593tcIyeUc,185 +pip/commands/zip.py,sha256=KECCb3oCHxJqDT3kUEnlf0udp31Ckoe8oyEKdS7EKNQ,14821 +pip/backwardcompat/__init__.py,sha256=AcP5dr3nL-4AGxSwsFIEUcf9ki0ROUFwfc0IrIeHaJI,3756 +/home/pi/oml/platform_linux_armv7l/p34/bin/pip,sha256=rKSTiOz624REwuRvVhDOXTcPHACMWkQt9h06EJJqcqw,241 +/home/pi/oml/platform_linux_armv7l/p34/bin/pip3,sha256=rKSTiOz624REwuRvVhDOXTcPHACMWkQt9h06EJJqcqw,241 +/home/pi/oml/platform_linux_armv7l/p34/bin/pip3.4,sha256=rKSTiOz624REwuRvVhDOXTcPHACMWkQt9h06EJJqcqw,241 +pip/commands/__pycache__/freeze.cpython-34.pyc,, +pip/__pycache__/runner.cpython-34.pyc,, +pip/vcs/__pycache__/__init__.cpython-34.pyc,, +pip/__pycache__/__init__.cpython-34.pyc,, +pip/__pycache__/wheel.cpython-34.pyc,, +pip/commands/__pycache__/wheel.cpython-34.pyc,, +pip/__pycache__/req.cpython-34.pyc,, +pip/__pycache__/cmdoptions.cpython-34.pyc,, +pip/commands/__pycache__/list.cpython-34.pyc,, +pip/__pycache__/util.cpython-34.pyc,, +pip/commands/__pycache__/unzip.cpython-34.pyc,, +pip/__pycache__/index.cpython-34.pyc,, +pip/commands/__pycache__/show.cpython-34.pyc,, +pip/commands/__pycache__/uninstall.cpython-34.pyc,, +pip/__pycache__/__main__.cpython-34.pyc,, +pip/commands/__pycache__/zip.cpython-34.pyc,, +pip/commands/__pycache__/help.cpython-34.pyc,, +pip/vcs/__pycache__/git.cpython-34.pyc,, +pip/commands/__pycache__/install.cpython-34.pyc,, +pip/__pycache__/exceptions.cpython-34.pyc,, +pip/commands/__pycache__/bundle.cpython-34.pyc,, +pip/backwardcompat/__pycache__/__init__.cpython-34.pyc,, +pip/vcs/__pycache__/bazaar.cpython-34.pyc,, +pip/commands/__pycache__/search.cpython-34.pyc,, +pip/__pycache__/download.cpython-34.pyc,, +pip/__pycache__/log.cpython-34.pyc,, +pip/__pycache__/pep425tags.cpython-34.pyc,, +pip/commands/__pycache__/completion.cpython-34.pyc,, +pip/commands/__pycache__/__init__.cpython-34.pyc,, +pip/__pycache__/basecommand.cpython-34.pyc,, +pip/__pycache__/status_codes.cpython-34.pyc,, +pip/vcs/__pycache__/mercurial.cpython-34.pyc,, +pip/__pycache__/baseparser.cpython-34.pyc,, +pip/__pycache__/locations.cpython-34.pyc,, +pip/vcs/__pycache__/subversion.cpython-34.pyc,, diff --git a/lib/python3.4/site-packages/pip-1.5.6.dist-info/WHEEL b/lib/python3.4/site-packages/pip-1.5.6.dist-info/WHEEL new file mode 100644 index 0000000..9dff69d --- /dev/null +++ b/lib/python3.4/site-packages/pip-1.5.6.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.24.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/lib/python3.4/site-packages/pip-1.5.6.dist-info/entry_points.txt b/lib/python3.4/site-packages/pip-1.5.6.dist-info/entry_points.txt new file mode 100644 index 0000000..a237b5e --- /dev/null +++ b/lib/python3.4/site-packages/pip-1.5.6.dist-info/entry_points.txt @@ -0,0 +1,5 @@ +[console_scripts] +pip = pip:main +pip3 = pip:main +pip3.4 = pip:main + diff --git a/lib/python3.4/site-packages/pip-1.5.6.dist-info/metadata.json b/lib/python3.4/site-packages/pip-1.5.6.dist-info/metadata.json new file mode 100644 index 0000000..aa993fa --- /dev/null +++ b/lib/python3.4/site-packages/pip-1.5.6.dist-info/metadata.json @@ -0,0 +1 @@ +{"license": "MIT", "metadata_version": "2.0", "extras": ["testing"], "version": "1.5.6", "test_requires": [{"requires": ["pytest", "scripttest (>=1.3)", "mock"]}], "keywords": ["easy_install", "distutils", "setuptools", "egg", "virtualenv"], "run_requires": [{"requires": ["pytest", "scripttest (>=1.3)", "mock"], "extra": "testing"}], "generator": "bdist_wheel (0.24.0)", "extensions": {"python.details": {"contacts": [{"role": "author", "email": "python-virtualenv@groups.google.com", "name": "The pip developers"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://pip.pypa.io/"}}, "python.exports": {"console_scripts": {"pip3": "pip:main", "pip": "pip:main", "pip3.4": "pip:main"}}, "python.commands": {"wrap_console": {"pip3": "pip:main", "pip": "pip:main", "pip3.4": "pip:main"}}}, "classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: Software Development :: Build Tools", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.1", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3"], "summary": "A tool for installing and managing Python packages.", "name": "pip"} \ No newline at end of file diff --git a/lib/python3.4/site-packages/pip-1.5.6.dist-info/top_level.txt b/lib/python3.4/site-packages/pip-1.5.6.dist-info/top_level.txt new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/lib/python3.4/site-packages/pip-1.5.6.dist-info/top_level.txt @@ -0,0 +1 @@ +pip diff --git a/lib/python3.4/site-packages/pip/__init__.py b/lib/python3.4/site-packages/pip/__init__.py new file mode 100644 index 0000000..eba984e --- /dev/null +++ b/lib/python3.4/site-packages/pip/__init__.py @@ -0,0 +1,340 @@ +#!/usr/bin/env python +import os +import optparse + +import sys +import re +import errno + +# Upstream pip vendorizes a bunch of its dependencies. Debian de-vendorizes +# (unbundles) these dependencies to be compliant with Debian policy. Instead, +# these dependencies are packaged as wheel (.whl) files in a known location. +# When pip itself executes, we have to arrange for these wheels to show up +# earlier on sys.path than any other version of these packages, otherwise +# things can break. See for example Bug #744145. +# +# The location of the wheels differs depending on whether we're inside or +# outside a virtual environment, regardless of whether that venv was created +# with virtualenv or pyvenv. The first thing we have to do is figure out if +# we're inside or outside a venv, then search the appropriate wheel directory +# and add all the .whls found there to the front of sys.path. As per Debian +# Python Policy, only the wheels needed to support this de-vendorization will +# be present, so it's safe to add them all. +# +# venv determination is a bit of a black art, but this algorithm should work +# in both Python 2 (virtualenv-only) and Python 3 (pyvenv and virtualenv). - +# updated by barry@debian.org 2015-02-25 + +base_prefix = getattr(sys, 'base_prefix', None) +real_prefix = getattr(sys, 'real_prefix', None) +if base_prefix is None: + # Python 2 has no base_prefix at all. It also has no pyvenv. Fall back + # to checking real_prefix. + if real_prefix is None: + # We are not in a venv. + in_venv = False + else: + # We're in a Python 2 virtualenv created venv, but real_prefix should + # never be the same as sys.prefix. + assert sys.prefix != real_prefix + in_venv = True +elif sys.prefix != base_prefix: + # We're in a Python 3, pyvenv created venv. + in_venv = True +elif real_prefix is None: + # We're in Python 3, outside a venv, but base better equal prefix. + assert sys.prefix == base_prefix + in_venv = False +else: + # We're in a Python 3, virtualenv created venv. + assert real_prefix != sys.prefix + in_venv = True + + +if in_venv: + wheel_dir = os.path.join(sys.prefix, 'lib', 'python-wheels') +else: + wheel_dir = '/usr/share/python-wheels' + +# We'll add all the wheels we find to the front of sys.path so that they're +# found first, even if the same dependencies are available in site-packages. +try: + for filename in os.listdir(wheel_dir): + if os.path.splitext(filename)[1] == '.whl': + sys.path.insert(0, os.path.join(wheel_dir, filename)) +# FileNotFoundError doesn't exist in Python 2, but ignore it anyway. +except OSError as error: + if error.errno != errno.ENOENT: + raise + + +from pip.exceptions import InstallationError, CommandError, PipError +from pip.log import logger +from pip.util import get_installed_distributions, get_prog +from pip.vcs import git, mercurial, subversion, bazaar # noqa +from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter +from pip.commands import commands, get_summaries, get_similar_commands + +# This fixes a peculiarity when importing via __import__ - as we are +# initialising the pip module, "from pip import cmdoptions" is recursive +# and appears not to work properly in that situation. +import pip.cmdoptions +cmdoptions = pip.cmdoptions + +# The version as used in the setup.py and the docs conf.py +__version__ = "1.5.6" + + +def autocomplete(): + """Command and option completion for the main option parser (and options) + and its subcommands (and options). + + Enable by sourcing one of the completion shell scripts (bash or zsh). + """ + # Don't complete if user hasn't sourced bash_completion file. + if 'PIP_AUTO_COMPLETE' not in os.environ: + return + cwords = os.environ['COMP_WORDS'].split()[1:] + cword = int(os.environ['COMP_CWORD']) + try: + current = cwords[cword - 1] + except IndexError: + current = '' + + subcommands = [cmd for cmd, summary in get_summaries()] + options = [] + # subcommand + try: + subcommand_name = [w for w in cwords if w in subcommands][0] + except IndexError: + subcommand_name = None + + parser = create_main_parser() + # subcommand options + if subcommand_name: + # special case: 'help' subcommand has no options + if subcommand_name == 'help': + sys.exit(1) + # special case: list locally installed dists for uninstall command + if subcommand_name == 'uninstall' and not current.startswith('-'): + installed = [] + lc = current.lower() + for dist in get_installed_distributions(local_only=True): + if dist.key.startswith(lc) and dist.key not in cwords[1:]: + installed.append(dist.key) + # if there are no dists installed, fall back to option completion + if installed: + for dist in installed: + print(dist) + sys.exit(1) + + subcommand = commands[subcommand_name]() + options += [(opt.get_opt_string(), opt.nargs) + for opt in subcommand.parser.option_list_all + if opt.help != optparse.SUPPRESS_HELP] + + # filter out previously specified options from available options + prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]] + options = [(x, v) for (x, v) in options if x not in prev_opts] + # filter options by current input + options = [(k, v) for k, v in options if k.startswith(current)] + for option in options: + opt_label = option[0] + # append '=' to options which require args + if option[1]: + opt_label += '=' + print(opt_label) + else: + # show main parser options only when necessary + if current.startswith('-') or current.startswith('--'): + opts = [i.option_list for i in parser.option_groups] + opts.append(parser.option_list) + opts = (o for it in opts for o in it) + + subcommands += [i.get_opt_string() for i in opts + if i.help != optparse.SUPPRESS_HELP] + + print(' '.join([x for x in subcommands if x.startswith(current)])) + sys.exit(1) + + +def create_main_parser(): + parser_kw = { + 'usage': '\n%prog [options]', + 'add_help_option': False, + 'formatter': UpdatingDefaultsHelpFormatter(), + 'name': 'global', + 'prog': get_prog(), + } + + parser = ConfigOptionParser(**parser_kw) + parser.disable_interspersed_args() + + pip_pkg_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + parser.version = 'pip %s from %s (python %s)' % ( + __version__, pip_pkg_dir, sys.version[:3]) + + # add the general options + gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser) + parser.add_option_group(gen_opts) + + parser.main = True # so the help formatter knows + + # create command listing for description + command_summaries = get_summaries() + description = [''] + ['%-27s %s' % (i, j) for i, j in command_summaries] + parser.description = '\n'.join(description) + + return parser + + +def parseopts(args): + parser = create_main_parser() + + # Note: parser calls disable_interspersed_args(), so the result of this call + # is to split the initial args into the general options before the + # subcommand and everything else. + # For example: + # args: ['--timeout=5', 'install', '--user', 'INITools'] + # general_options: ['--timeout==5'] + # args_else: ['install', '--user', 'INITools'] + general_options, args_else = parser.parse_args(args) + + # --version + if general_options.version: + sys.stdout.write(parser.version) + sys.stdout.write(os.linesep) + sys.exit() + + # pip || pip help -> print_help() + if not args_else or (args_else[0] == 'help' and len(args_else) == 1): + parser.print_help() + sys.exit() + + # the subcommand name + cmd_name = args_else[0].lower() + + #all the args without the subcommand + cmd_args = args[:] + cmd_args.remove(args_else[0].lower()) + + if cmd_name not in commands: + guess = get_similar_commands(cmd_name) + + msg = ['unknown command "%s"' % cmd_name] + if guess: + msg.append('maybe you meant "%s"' % guess) + + raise CommandError(' - '.join(msg)) + + return cmd_name, cmd_args + + +def main(initial_args=None): + if initial_args is None: + initial_args = sys.argv[1:] + + autocomplete() + + try: + cmd_name, cmd_args = parseopts(initial_args) + except PipError: + e = sys.exc_info()[1] + sys.stderr.write("ERROR: %s" % e) + sys.stderr.write(os.linesep) + sys.exit(1) + + command = commands[cmd_name]() + return command.main(cmd_args) + + +def bootstrap(): + """ + Bootstrapping function to be called from install-pip.py script. + """ + pkgs = ['pip'] + try: + import setuptools + except ImportError: + pkgs.append('setuptools') + return main(['install', '--upgrade'] + pkgs + sys.argv[1:]) + +############################################################ +## Writing freeze files + + +class FrozenRequirement(object): + + def __init__(self, name, req, editable, comments=()): + self.name = name + self.req = req + self.editable = editable + self.comments = comments + + _rev_re = re.compile(r'-r(\d+)$') + _date_re = re.compile(r'-(20\d\d\d\d\d\d)$') + + @classmethod + def from_dist(cls, dist, dependency_links, find_tags=False): + location = os.path.normcase(os.path.abspath(dist.location)) + comments = [] + from pip.vcs import vcs, get_src_requirement + if vcs.get_backend_name(location): + editable = True + try: + req = get_src_requirement(dist, location, find_tags) + except InstallationError: + ex = sys.exc_info()[1] + logger.warn("Error when trying to get requirement for VCS system %s, falling back to uneditable format" % ex) + req = None + if req is None: + logger.warn('Could not determine repository location of %s' % location) + comments.append('## !! Could not determine repository location') + req = dist.as_requirement() + editable = False + else: + editable = False + req = dist.as_requirement() + specs = req.specs + assert len(specs) == 1 and specs[0][0] == '==' + version = specs[0][1] + ver_match = cls._rev_re.search(version) + date_match = cls._date_re.search(version) + if ver_match or date_match: + svn_backend = vcs.get_backend('svn') + if svn_backend: + svn_location = svn_backend( + ).get_location(dist, dependency_links) + if not svn_location: + logger.warn( + 'Warning: cannot find svn location for %s' % req) + comments.append('## FIXME: could not find svn URL in dependency_links for this package:') + else: + comments.append('# Installing as editable to satisfy requirement %s:' % req) + if ver_match: + rev = ver_match.group(1) + else: + rev = '{%s}' % date_match.group(1) + editable = True + req = '%s@%s#egg=%s' % (svn_location, rev, cls.egg_name(dist)) + return cls(dist.project_name, req, editable, comments) + + @staticmethod + def egg_name(dist): + name = dist.egg_name() + match = re.search(r'-py\d\.\d$', name) + if match: + name = name[:match.start()] + return name + + def __str__(self): + req = self.req + if self.editable: + req = '-e %s' % req + return '\n'.join(list(self.comments) + [str(req)]) + '\n' + + +if __name__ == '__main__': + exit = main() + if exit: + sys.exit(exit) diff --git a/lib/python3.4/site-packages/pip/__main__.py b/lib/python3.4/site-packages/pip/__main__.py new file mode 100644 index 0000000..5ca3746 --- /dev/null +++ b/lib/python3.4/site-packages/pip/__main__.py @@ -0,0 +1,7 @@ +import sys +from .runner import run + +if __name__ == '__main__': + exit = run() + if exit: + sys.exit(exit) diff --git a/lib/python3.4/site-packages/pip/backwardcompat/__init__.py b/lib/python3.4/site-packages/pip/backwardcompat/__init__.py new file mode 100644 index 0000000..c327bbe --- /dev/null +++ b/lib/python3.4/site-packages/pip/backwardcompat/__init__.py @@ -0,0 +1,138 @@ +"""Stuff that differs in different Python versions and platform +distributions.""" + +import os +import imp +import sys +import site + +__all__ = ['WindowsError'] + +uses_pycache = hasattr(imp, 'cache_from_source') + +class NeverUsedException(Exception): + """this exception should never be raised""" + +try: + WindowsError = WindowsError +except NameError: + WindowsError = NeverUsedException + +try: + #new in Python 3.3 + PermissionError = PermissionError +except NameError: + PermissionError = NeverUsedException + +console_encoding = sys.__stdout__.encoding + +if sys.version_info >= (3,): + from io import StringIO, BytesIO + from functools import reduce + from urllib.error import URLError, HTTPError + from queue import Queue, Empty + from urllib.request import url2pathname, urlretrieve, pathname2url + from email import message as emailmessage + import urllib.parse as urllib + import urllib.request as urllib2 + import configparser as ConfigParser + import xmlrpc.client as xmlrpclib + import urllib.parse as urlparse + import http.client as httplib + + def cmp(a, b): + return (a > b) - (a < b) + + def b(s): + return s.encode('utf-8') + + def u(s): + return s.decode('utf-8') + + def console_to_str(s): + try: + return s.decode(console_encoding) + except UnicodeDecodeError: + return s.decode('utf_8') + + def get_http_message_param(http_message, param, default_value): + return http_message.get_param(param, default_value) + + bytes = bytes + string_types = (str,) + raw_input = input +else: + from cStringIO import StringIO + from urllib2 import URLError, HTTPError + from Queue import Queue, Empty + from urllib import url2pathname, urlretrieve, pathname2url + from email import Message as emailmessage + import urllib + import urllib2 + import urlparse + import ConfigParser + import xmlrpclib + import httplib + + def b(s): + return s + + def u(s): + return s + + def console_to_str(s): + return s + + def get_http_message_param(http_message, param, default_value): + result = http_message.getparam(param) + return result or default_value + + bytes = str + string_types = (basestring,) + reduce = reduce + cmp = cmp + raw_input = raw_input + BytesIO = StringIO + + +from distutils.sysconfig import get_python_lib, get_python_version + +#site.USER_SITE was created in py2.6 +user_site = getattr(site, 'USER_SITE', None) + + +def product(*args, **kwds): + # product('ABCD', 'xy') --> Ax Ay Bx By Cx Cy Dx Dy + # product(range(2), repeat=3) --> 000 001 010 011 100 101 110 111 + pools = list(map(tuple, args)) * kwds.get('repeat', 1) + result = [[]] + for pool in pools: + result = [x + [y] for x in result for y in pool] + for prod in result: + yield tuple(prod) + + +def get_path_uid(path): + """ + Return path's uid. + + Does not follow symlinks: https://github.com/pypa/pip/pull/935#discussion_r5307003 + + Placed this function in backwardcompat due to differences on AIX and Jython, + that should eventually go away. + + :raises OSError: When path is a symlink or can't be read. + """ + if hasattr(os, 'O_NOFOLLOW'): + fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW) + file_uid = os.fstat(fd).st_uid + os.close(fd) + else: # AIX and Jython + # WARNING: time of check vulnerabity, but best we can do w/o NOFOLLOW + if not os.path.islink(path): + # older versions of Jython don't have `os.fstat` + file_uid = os.stat(path).st_uid + else: + # raise OSError for parity with os.O_NOFOLLOW above + raise OSError("%s is a symlink; Will not return uid for symlinks" % path) + return file_uid diff --git a/lib/python3.4/site-packages/pip/basecommand.py b/lib/python3.4/site-packages/pip/basecommand.py new file mode 100644 index 0000000..e467019 --- /dev/null +++ b/lib/python3.4/site-packages/pip/basecommand.py @@ -0,0 +1,201 @@ +"""Base Command class, and related routines""" + +import os +import sys +import tempfile +import traceback +import time +import optparse + +from pip import cmdoptions +from pip.locations import running_under_virtualenv +from pip.log import logger +from pip.download import PipSession +from pip.exceptions import (BadCommand, InstallationError, UninstallationError, + CommandError, PreviousBuildDirError) +from pip.backwardcompat import StringIO +from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter +from pip.status_codes import (SUCCESS, ERROR, UNKNOWN_ERROR, VIRTUALENV_NOT_FOUND, + PREVIOUS_BUILD_DIR_ERROR) +from pip.util import get_prog + + +__all__ = ['Command'] + + +class Command(object): + name = None + usage = None + hidden = False + + def __init__(self): + parser_kw = { + 'usage': self.usage, + 'prog': '%s %s' % (get_prog(), self.name), + 'formatter': UpdatingDefaultsHelpFormatter(), + 'add_help_option': False, + 'name': self.name, + 'description': self.__doc__, + } + + self.parser = ConfigOptionParser(**parser_kw) + + # Commands should add options to this option group + optgroup_name = '%s Options' % self.name.capitalize() + self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name) + + # Add the general options + gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, self.parser) + self.parser.add_option_group(gen_opts) + + def _build_session(self, options): + session = PipSession() + + # Handle custom ca-bundles from the user + if options.cert: + session.verify = options.cert + + # Handle timeouts + if options.timeout: + session.timeout = options.timeout + + # Handle configured proxies + if options.proxy: + session.proxies = { + "http": options.proxy, + "https": options.proxy, + } + + # Determine if we can prompt the user for authentication or not + session.auth.prompting = not options.no_input + + return session + + def setup_logging(self): + pass + + def parse_args(self, args): + # factored out for testability + return self.parser.parse_args(args) + + def main(self, args): + options, args = self.parse_args(args) + + level = 1 # Notify + level += options.verbose + level -= options.quiet + level = logger.level_for_integer(4 - level) + complete_log = [] + logger.add_consumers( + (level, sys.stdout), + (logger.DEBUG, complete_log.append), + ) + if options.log_explicit_levels: + logger.explicit_levels = True + + self.setup_logging() + + #TODO: try to get these passing down from the command? + # without resorting to os.environ to hold these. + + if options.no_input: + os.environ['PIP_NO_INPUT'] = '1' + + if options.exists_action: + os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action) + + if options.require_venv: + # If a venv is required check if it can really be found + if not running_under_virtualenv(): + logger.fatal('Could not find an activated virtualenv (required).') + sys.exit(VIRTUALENV_NOT_FOUND) + + if options.log: + log_fp = open_logfile(options.log, 'a') + logger.add_consumers((logger.DEBUG, log_fp)) + else: + log_fp = None + + exit = SUCCESS + store_log = False + try: + status = self.run(options, args) + # FIXME: all commands should return an exit status + # and when it is done, isinstance is not needed anymore + if isinstance(status, int): + exit = status + except PreviousBuildDirError: + e = sys.exc_info()[1] + logger.fatal(str(e)) + logger.info('Exception information:\n%s' % format_exc()) + store_log = True + exit = PREVIOUS_BUILD_DIR_ERROR + except (InstallationError, UninstallationError): + e = sys.exc_info()[1] + logger.fatal(str(e)) + logger.info('Exception information:\n%s' % format_exc()) + store_log = True + exit = ERROR + except BadCommand: + e = sys.exc_info()[1] + logger.fatal(str(e)) + logger.info('Exception information:\n%s' % format_exc()) + store_log = True + exit = ERROR + except CommandError: + e = sys.exc_info()[1] + logger.fatal('ERROR: %s' % e) + logger.info('Exception information:\n%s' % format_exc()) + exit = ERROR + except KeyboardInterrupt: + logger.fatal('Operation cancelled by user') + logger.info('Exception information:\n%s' % format_exc()) + store_log = True + exit = ERROR + except: + logger.fatal('Exception:\n%s' % format_exc()) + store_log = True + exit = UNKNOWN_ERROR + if store_log: + log_file_fn = options.log_file + text = '\n'.join(complete_log) + try: + log_file_fp = open_logfile(log_file_fn, 'w') + except IOError: + temp = tempfile.NamedTemporaryFile(delete=False) + log_file_fn = temp.name + log_file_fp = open_logfile(log_file_fn, 'w') + logger.fatal('Storing debug log for failure in %s' % log_file_fn) + log_file_fp.write(text) + log_file_fp.close() + if log_fp is not None: + log_fp.close() + return exit + + +def format_exc(exc_info=None): + if exc_info is None: + exc_info = sys.exc_info() + out = StringIO() + traceback.print_exception(*exc_info, **dict(file=out)) + return out.getvalue() + + +def open_logfile(filename, mode='a'): + """Open the named log file in append mode. + + If the file already exists, a separator will also be printed to + the file to separate past activity from current activity. + """ + filename = os.path.expanduser(filename) + filename = os.path.abspath(filename) + dirname = os.path.dirname(filename) + if not os.path.exists(dirname): + os.makedirs(dirname) + exists = os.path.exists(filename) + + log_fp = open(filename, mode) + if exists: + log_fp.write('%s\n' % ('-' * 60)) + log_fp.write('%s run on %s\n' % (sys.argv[0], time.strftime('%c'))) + return log_fp diff --git a/lib/python3.4/site-packages/pip/baseparser.py b/lib/python3.4/site-packages/pip/baseparser.py new file mode 100644 index 0000000..a5e8bac --- /dev/null +++ b/lib/python3.4/site-packages/pip/baseparser.py @@ -0,0 +1,223 @@ +"""Base option parser setup""" + +import sys +import optparse +import os +import textwrap +from distutils.util import strtobool + +from pip.backwardcompat import ConfigParser, string_types +from pip.locations import default_config_file +from pip.util import get_terminal_size, get_prog + + +class PrettyHelpFormatter(optparse.IndentedHelpFormatter): + """A prettier/less verbose help formatter for optparse.""" + + def __init__(self, *args, **kwargs): + # help position must be aligned with __init__.parseopts.description + kwargs['max_help_position'] = 30 + kwargs['indent_increment'] = 1 + kwargs['width'] = get_terminal_size()[0] - 2 + optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs) + + def format_option_strings(self, option): + return self._format_option_strings(option, ' <%s>', ', ') + + def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '): + """ + Return a comma-separated list of option strings and metavars. + + :param option: tuple of (short opt, long opt), e.g: ('-f', '--format') + :param mvarfmt: metavar format string - evaluated as mvarfmt % metavar + :param optsep: separator + """ + opts = [] + + if option._short_opts: + opts.append(option._short_opts[0]) + if option._long_opts: + opts.append(option._long_opts[0]) + if len(opts) > 1: + opts.insert(1, optsep) + + if option.takes_value(): + metavar = option.metavar or option.dest.lower() + opts.append(mvarfmt % metavar.lower()) + + return ''.join(opts) + + def format_heading(self, heading): + if heading == 'Options': + return '' + return heading + ':\n' + + def format_usage(self, usage): + """ + Ensure there is only one newline between usage and the first heading + if there is no description. + """ + msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ") + return msg + + def format_description(self, description): + # leave full control over description to us + if description: + if hasattr(self.parser, 'main'): + label = 'Commands' + else: + label = 'Description' + #some doc strings have inital newlines, some don't + description = description.lstrip('\n') + #some doc strings have final newlines and spaces, some don't + description = description.rstrip() + #dedent, then reindent + description = self.indent_lines(textwrap.dedent(description), " ") + description = '%s:\n%s\n' % (label, description) + return description + else: + return '' + + def format_epilog(self, epilog): + # leave full control over epilog to us + if epilog: + return epilog + else: + return '' + + def indent_lines(self, text, indent): + new_lines = [indent + line for line in text.split('\n')] + return "\n".join(new_lines) + + +class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter): + """Custom help formatter for use in ConfigOptionParser that updates + the defaults before expanding them, allowing them to show up correctly + in the help listing""" + + def expand_default(self, option): + if self.parser is not None: + self.parser.update_defaults(self.parser.defaults) + return optparse.IndentedHelpFormatter.expand_default(self, option) + + +class CustomOptionParser(optparse.OptionParser): + def insert_option_group(self, idx, *args, **kwargs): + """Insert an OptionGroup at a given position.""" + group = self.add_option_group(*args, **kwargs) + + self.option_groups.pop() + self.option_groups.insert(idx, group) + + return group + + @property + def option_list_all(self): + """Get a list of all options, including those in option groups.""" + res = self.option_list[:] + for i in self.option_groups: + res.extend(i.option_list) + + return res + + +class ConfigOptionParser(CustomOptionParser): + """Custom option parser which updates its defaults by checking the + configuration files and environmental variables""" + + def __init__(self, *args, **kwargs): + self.config = ConfigParser.RawConfigParser() + self.name = kwargs.pop('name') + self.files = self.get_config_files() + if self.files: + self.config.read(self.files) + assert self.name + optparse.OptionParser.__init__(self, *args, **kwargs) + + def get_config_files(self): + config_file = os.environ.get('PIP_CONFIG_FILE', False) + if config_file == os.devnull: + return [] + if config_file and os.path.exists(config_file): + return [config_file] + return [default_config_file] + + def check_default(self, option, key, val): + try: + return option.check_value(key, val) + except optparse.OptionValueError: + e = sys.exc_info()[1] + print("An error occurred during configuration: %s" % e) + sys.exit(3) + + def update_defaults(self, defaults): + """Updates the given defaults with values from the config files and + the environ. Does a little special handling for certain types of + options (lists).""" + # Then go and look for the other sources of configuration: + config = {} + # 1. config files + for section in ('global', self.name): + config.update(self.normalize_keys(self.get_config_section(section))) + # 2. environmental variables + config.update(self.normalize_keys(self.get_environ_vars())) + # Then set the options with those values + for key, val in config.items(): + option = self.get_option(key) + if option is not None: + # ignore empty values + if not val: + continue + if option.action in ('store_true', 'store_false', 'count'): + val = strtobool(val) + if option.action == 'append': + val = val.split() + val = [self.check_default(option, key, v) for v in val] + else: + val = self.check_default(option, key, val) + + defaults[option.dest] = val + return defaults + + def normalize_keys(self, items): + """Return a config dictionary with normalized keys regardless of + whether the keys were specified in environment variables or in config + files""" + normalized = {} + for key, val in items: + key = key.replace('_', '-') + if not key.startswith('--'): + key = '--%s' % key # only prefer long opts + normalized[key] = val + return normalized + + def get_config_section(self, name): + """Get a section of a configuration""" + if self.config.has_section(name): + return self.config.items(name) + return [] + + def get_environ_vars(self, prefix='PIP_'): + """Returns a generator with all environmental vars with prefix PIP_""" + for key, val in os.environ.items(): + if key.startswith(prefix): + yield (key.replace(prefix, '').lower(), val) + + def get_default_values(self): + """Overridding to make updating the defaults after instantiation of + the option parser possible, update_defaults() does the dirty work.""" + if not self.process_default_values: + # Old, pre-Optik 1.5 behaviour. + return optparse.Values(self.defaults) + + defaults = self.update_defaults(self.defaults.copy()) # ours + for option in self._get_all_options(): + default = defaults.get(option.dest) + if isinstance(default, string_types): + opt_str = option.get_opt_string() + defaults[option.dest] = option.check_value(opt_str, default) + return optparse.Values(defaults) + + def error(self, msg): + self.print_usage(sys.stderr) + self.exit(2, "%s\n" % msg) diff --git a/lib/python3.4/site-packages/pip/cmdoptions.py b/lib/python3.4/site-packages/pip/cmdoptions.py new file mode 100644 index 0000000..01b2104 --- /dev/null +++ b/lib/python3.4/site-packages/pip/cmdoptions.py @@ -0,0 +1,369 @@ +""" +shared options and groups + +The principle here is to define options once, but *not* instantiate them globally. +One reason being that options with action='append' can carry state between parses. +pip parse's general options twice internally, and shouldn't pass on state. +To be consistent, all options will follow this design. + +""" +import copy +from optparse import OptionGroup, SUPPRESS_HELP, Option +from pip.locations import default_log_file + + +def make_option_group(group, parser): + """ + Return an OptionGroup object + group -- assumed to be dict with 'name' and 'options' keys + parser -- an optparse Parser + """ + option_group = OptionGroup(parser, group['name']) + for option in group['options']: + option_group.add_option(option.make()) + return option_group + +class OptionMaker(object): + """Class that stores the args/kwargs that would be used to make an Option, + for making them later, and uses deepcopy's to reset state.""" + def __init__(self, *args, **kwargs): + self.args = args + self.kwargs = kwargs + def make(self): + args_copy = copy.deepcopy(self.args) + kwargs_copy = copy.deepcopy(self.kwargs) + return Option(*args_copy, **kwargs_copy) + +########### +# options # +########### + +help_ = OptionMaker( + '-h', '--help', + dest='help', + action='help', + help='Show help.') + +require_virtualenv = OptionMaker( + # Run only if inside a virtualenv, bail if not. + '--require-virtualenv', '--require-venv', + dest='require_venv', + action='store_true', + default=False, + help=SUPPRESS_HELP) + +verbose = OptionMaker( + '-v', '--verbose', + dest='verbose', + action='count', + default=0, + help='Give more output. Option is additive, and can be used up to 3 times.') + +version = OptionMaker( + '-V', '--version', + dest='version', + action='store_true', + help='Show version and exit.') + +quiet = OptionMaker( + '-q', '--quiet', + dest='quiet', + action='count', + default=0, + help='Give less output.') + +log = OptionMaker( + '--log', + dest='log', + metavar='path', + help='Path to a verbose appending log. This log is inactive by default.') + +log_explicit_levels = OptionMaker( + # Writes the log levels explicitely to the log' + '--log-explicit-levels', + dest='log_explicit_levels', + action='store_true', + default=False, + help=SUPPRESS_HELP) + +log_file = OptionMaker( + # The default log file + '--log-file', '--local-log', + dest='log_file', + metavar='path', + default=default_log_file, + help='Path to a verbose non-appending log, that only logs failures. This log is active by default at %default.') + +no_input = OptionMaker( + # Don't ask for input + '--no-input', + dest='no_input', + action='store_true', + default=False, + help=SUPPRESS_HELP) + +proxy = OptionMaker( + '--proxy', + dest='proxy', + type='str', + default='', + help="Specify a proxy in the form [user:passwd@]proxy.server:port.") + +timeout = OptionMaker( + '--timeout', '--default-timeout', + metavar='sec', + dest='timeout', + type='float', + default=15, + help='Set the socket timeout (default %default seconds).') + +default_vcs = OptionMaker( + # The default version control system for editables, e.g. 'svn' + '--default-vcs', + dest='default_vcs', + type='str', + default='', + help=SUPPRESS_HELP) + +skip_requirements_regex = OptionMaker( + # A regex to be used to skip requirements + '--skip-requirements-regex', + dest='skip_requirements_regex', + type='str', + default='', + help=SUPPRESS_HELP) + +exists_action = OptionMaker( + # Option when path already exist + '--exists-action', + dest='exists_action', + type='choice', + choices=['s', 'i', 'w', 'b'], + default=[], + action='append', + metavar='action', + help="Default action when a path already exists: " + "(s)witch, (i)gnore, (w)ipe, (b)ackup.") + +cert = OptionMaker( + '--cert', + dest='cert', + type='str', + default='', + metavar='path', + help = "Path to alternate CA bundle.") + +index_url = OptionMaker( + '-i', '--index-url', '--pypi-url', + dest='index_url', + metavar='URL', + default='https://pypi.python.org/simple/', + help='Base URL of Python Package Index (default %default).') + +extra_index_url = OptionMaker( + '--extra-index-url', + dest='extra_index_urls', + metavar='URL', + action='append', + default=[], + help='Extra URLs of package indexes to use in addition to --index-url.') + +no_index = OptionMaker( + '--no-index', + dest='no_index', + action='store_true', + default=False, + help='Ignore package index (only looking at --find-links URLs instead).') + +find_links = OptionMaker( + '-f', '--find-links', + dest='find_links', + action='append', + default=[], + metavar='url', + help="If a url or path to an html file, then parse for links to archives. If a local path or file:// url that's a directory, then look for archives in the directory listing.") + +# TODO: Remove after 1.6 +use_mirrors = OptionMaker( + '-M', '--use-mirrors', + dest='use_mirrors', + action='store_true', + default=False, + help=SUPPRESS_HELP) + +# TODO: Remove after 1.6 +mirrors = OptionMaker( + '--mirrors', + dest='mirrors', + metavar='URL', + action='append', + default=[], + help=SUPPRESS_HELP) + +allow_external = OptionMaker( + "--allow-external", + dest="allow_external", + action="append", + default=[], + metavar="PACKAGE", + help="Allow the installation of externally hosted files", +) + +allow_all_external = OptionMaker( + "--allow-all-external", + dest="allow_all_external", + action="store_true", + default=False, + help="Allow the installation of all externally hosted files", +) + +# Remove after 1.7 +no_allow_external = OptionMaker( + "--no-allow-external", + dest="allow_all_external", + action="store_false", + default=False, + help=SUPPRESS_HELP, +) + +# Remove --allow-insecure after 1.7 +allow_unsafe = OptionMaker( + "--allow-unverified", "--allow-insecure", + dest="allow_unverified", + action="append", + default=[], + metavar="PACKAGE", + help="Allow the installation of insecure and unverifiable files", +) + +# Remove after 1.7 +no_allow_unsafe = OptionMaker( + "--no-allow-insecure", + dest="allow_all_insecure", + action="store_false", + default=False, + help=SUPPRESS_HELP +) + +# Remove after 1.5 +process_dependency_links = OptionMaker( + "--process-dependency-links", + dest="process_dependency_links", + action="store_true", + default=False, + help="Enable the processing of dependency links.", +) + +requirements = OptionMaker( + '-r', '--requirement', + dest='requirements', + action='append', + default=[], + metavar='file', + help='Install from the given requirements file. ' + 'This option can be used multiple times.') + +use_wheel = OptionMaker( + '--use-wheel', + dest='use_wheel', + action='store_true', + help=SUPPRESS_HELP, +) + +no_use_wheel = OptionMaker( + '--no-use-wheel', + dest='use_wheel', + action='store_false', + default=True, + help=('Do not Find and prefer wheel archives when searching indexes and ' + 'find-links locations.'), +) + +download_cache = OptionMaker( + '--download-cache', + dest='download_cache', + metavar='dir', + default=None, + help='Cache downloaded packages in .') + +no_deps = OptionMaker( + '--no-deps', '--no-dependencies', + dest='ignore_dependencies', + action='store_true', + default=False, + help="Don't install package dependencies.") + +build_dir = OptionMaker( + '-b', '--build', '--build-dir', '--build-directory', + dest='build_dir', + metavar='dir', + help='Directory to unpack packages into and build in.', +) + +install_options = OptionMaker( + '--install-option', + dest='install_options', + action='append', + metavar='options', + help="Extra arguments to be supplied to the setup.py install " + "command (use like --install-option=\"--install-scripts=/usr/local/bin\"). " + "Use multiple --install-option options to pass multiple options to setup.py install. " + "If you are using an option with a directory path, be sure to use absolute path.") + +global_options = OptionMaker( + '--global-option', + dest='global_options', + action='append', + metavar='options', + help="Extra global options to be supplied to the setup.py " + "call before the install command.") + +no_clean = OptionMaker( + '--no-clean', + action='store_true', + default=False, + help="Don't clean up build directories.") + + +########## +# groups # +########## + +general_group = { + 'name': 'General Options', + 'options': [ + help_, + require_virtualenv, + verbose, + version, + quiet, + log_file, + log, + log_explicit_levels, + no_input, + proxy, + timeout, + default_vcs, + skip_requirements_regex, + exists_action, + cert, + ] + } + +index_group = { + 'name': 'Package Index Options', + 'options': [ + index_url, + extra_index_url, + no_index, + find_links, + use_mirrors, + mirrors, + allow_external, + allow_all_external, + no_allow_external, + allow_unsafe, + no_allow_unsafe, + process_dependency_links, + ] + } diff --git a/lib/python3.4/site-packages/pip/commands/__init__.py b/lib/python3.4/site-packages/pip/commands/__init__.py new file mode 100644 index 0000000..e0702d2 --- /dev/null +++ b/lib/python3.4/site-packages/pip/commands/__init__.py @@ -0,0 +1,88 @@ +""" +Package containing all pip commands +""" + + +from pip.commands.bundle import BundleCommand +from pip.commands.completion import CompletionCommand +from pip.commands.freeze import FreezeCommand +from pip.commands.help import HelpCommand +from pip.commands.list import ListCommand +from pip.commands.search import SearchCommand +from pip.commands.show import ShowCommand +from pip.commands.install import InstallCommand +from pip.commands.uninstall import UninstallCommand +from pip.commands.unzip import UnzipCommand +from pip.commands.zip import ZipCommand +from pip.commands.wheel import WheelCommand + + +commands = { + BundleCommand.name: BundleCommand, + CompletionCommand.name: CompletionCommand, + FreezeCommand.name: FreezeCommand, + HelpCommand.name: HelpCommand, + SearchCommand.name: SearchCommand, + ShowCommand.name: ShowCommand, + InstallCommand.name: InstallCommand, + UninstallCommand.name: UninstallCommand, + UnzipCommand.name: UnzipCommand, + ZipCommand.name: ZipCommand, + ListCommand.name: ListCommand, + WheelCommand.name: WheelCommand, +} + + +commands_order = [ + InstallCommand, + UninstallCommand, + FreezeCommand, + ListCommand, + ShowCommand, + SearchCommand, + WheelCommand, + ZipCommand, + UnzipCommand, + BundleCommand, + HelpCommand, +] + + +def get_summaries(ignore_hidden=True, ordered=True): + """Yields sorted (command name, command summary) tuples.""" + + if ordered: + cmditems = _sort_commands(commands, commands_order) + else: + cmditems = commands.items() + + for name, command_class in cmditems: + if ignore_hidden and command_class.hidden: + continue + + yield (name, command_class.summary) + + +def get_similar_commands(name): + """Command name auto-correct.""" + from difflib import get_close_matches + + close_commands = get_close_matches(name, commands.keys()) + + if close_commands: + guess = close_commands[0] + else: + guess = False + + return guess + + +def _sort_commands(cmddict, order): + def keyfn(key): + try: + return order.index(key[1]) + except ValueError: + # unordered items should come last + return 0xff + + return sorted(cmddict.items(), key=keyfn) diff --git a/lib/python3.4/site-packages/pip/commands/bundle.py b/lib/python3.4/site-packages/pip/commands/bundle.py new file mode 100644 index 0000000..69967fe --- /dev/null +++ b/lib/python3.4/site-packages/pip/commands/bundle.py @@ -0,0 +1,42 @@ +import textwrap +from pip.locations import build_prefix, src_prefix +from pip.util import display_path, backup_dir +from pip.log import logger +from pip.exceptions import InstallationError +from pip.commands.install import InstallCommand + + +class BundleCommand(InstallCommand): + """Create pybundles (archives containing multiple packages).""" + name = 'bundle' + usage = """ + %prog [options] .pybundle ...""" + summary = 'DEPRECATED. Create pybundles.' + bundle = True + + def __init__(self, *args, **kw): + super(BundleCommand, self).__init__(*args, **kw) + # bundle uses different default source and build dirs + build_opt = self.parser.get_option("--build") + build_opt.default = backup_dir(build_prefix, '-bundle') + src_opt = self.parser.get_option("--src") + src_opt.default = backup_dir(src_prefix, '-bundle') + self.parser.set_defaults(**{ + src_opt.dest: src_opt.default, + build_opt.dest: build_opt.default, + }) + + def run(self, options, args): + + logger.deprecated('1.6', "DEPRECATION: 'pip bundle' and support for installing from *.pybundle files is deprecated. " + "See https://github.com/pypa/pip/pull/1046") + + if not args: + raise InstallationError('You must give a bundle filename') + # We have to get everything when creating a bundle: + options.ignore_installed = True + logger.notify('Putting temporary build files in %s and source/develop files in %s' + % (display_path(options.build_dir), display_path(options.src_dir))) + self.bundle_filename = args.pop(0) + requirement_set = super(BundleCommand, self).run(options, args) + return requirement_set diff --git a/lib/python3.4/site-packages/pip/commands/completion.py b/lib/python3.4/site-packages/pip/commands/completion.py new file mode 100644 index 0000000..5fa2376 --- /dev/null +++ b/lib/python3.4/site-packages/pip/commands/completion.py @@ -0,0 +1,59 @@ +import sys +from pip.basecommand import Command + +BASE_COMPLETION = """ +# pip %(shell)s completion start%(script)s# pip %(shell)s completion end +""" + +COMPLETION_SCRIPTS = { + 'bash': """ +_pip_completion() +{ + COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\ + COMP_CWORD=$COMP_CWORD \\ + PIP_AUTO_COMPLETE=1 $1 ) ) +} +complete -o default -F _pip_completion pip +""", 'zsh': """ +function _pip_completion { + local words cword + read -Ac words + read -cn cword + reply=( $( COMP_WORDS="$words[*]" \\ + COMP_CWORD=$(( cword-1 )) \\ + PIP_AUTO_COMPLETE=1 $words[1] ) ) +} +compctl -K _pip_completion pip +"""} + + +class CompletionCommand(Command): + """A helper command to be used for command completion.""" + name = 'completion' + summary = 'A helper command to be used for command completion' + hidden = True + + def __init__(self, *args, **kw): + super(CompletionCommand, self).__init__(*args, **kw) + self.parser.add_option( + '--bash', '-b', + action='store_const', + const='bash', + dest='shell', + help='Emit completion code for bash') + self.parser.add_option( + '--zsh', '-z', + action='store_const', + const='zsh', + dest='shell', + help='Emit completion code for zsh') + + def run(self, options, args): + """Prints the completion code of the given shell""" + shells = COMPLETION_SCRIPTS.keys() + shell_options = ['--' + shell for shell in sorted(shells)] + if options.shell in shells: + script = COMPLETION_SCRIPTS.get(options.shell, '') + print(BASE_COMPLETION % {'script': script, 'shell': options.shell}) + else: + sys.stderr.write('ERROR: You must pass %s\n' % ' or '.join(shell_options)) diff --git a/lib/python3.4/site-packages/pip/commands/freeze.py b/lib/python3.4/site-packages/pip/commands/freeze.py new file mode 100644 index 0000000..7b12907 --- /dev/null +++ b/lib/python3.4/site-packages/pip/commands/freeze.py @@ -0,0 +1,114 @@ +import re +import sys +import pip + +from pip.req import InstallRequirement +from pip.log import logger +from pip.basecommand import Command +from pip.util import get_installed_distributions +import pkg_resources + + +class FreezeCommand(Command): + """Output installed packages in requirements format.""" + name = 'freeze' + usage = """ + %prog [options]""" + summary = 'Output installed packages in requirements format.' + + def __init__(self, *args, **kw): + super(FreezeCommand, self).__init__(*args, **kw) + + self.cmd_opts.add_option( + '-r', '--requirement', + dest='requirement', + action='store', + default=None, + metavar='file', + help="Use the order in the given requirements file and it's comments when generating output.") + self.cmd_opts.add_option( + '-f', '--find-links', + dest='find_links', + action='append', + default=[], + metavar='URL', + help='URL for finding packages, which will be added to the output.') + self.cmd_opts.add_option( + '-l', '--local', + dest='local', + action='store_true', + default=False, + help='If in a virtualenv that has global access, do not output globally-installed packages.') + + self.parser.insert_option_group(0, self.cmd_opts) + + def setup_logging(self): + logger.move_stdout_to_stderr() + + def run(self, options, args): + requirement = options.requirement + find_links = options.find_links or [] + local_only = options.local + ## FIXME: Obviously this should be settable: + find_tags = False + skip_match = None + + skip_regex = options.skip_requirements_regex + if skip_regex: + skip_match = re.compile(skip_regex) + + dependency_links = [] + + f = sys.stdout + + for dist in pkg_resources.working_set: + if dist.has_metadata('dependency_links.txt'): + dependency_links.extend(dist.get_metadata_lines('dependency_links.txt')) + for link in find_links: + if '#egg=' in link: + dependency_links.append(link) + for link in find_links: + f.write('-f %s\n' % link) + installations = {} + for dist in get_installed_distributions(local_only=local_only): + req = pip.FrozenRequirement.from_dist(dist, dependency_links, find_tags=find_tags) + installations[req.name] = req + if requirement: + req_f = open(requirement) + for line in req_f: + if not line.strip() or line.strip().startswith('#'): + f.write(line) + continue + if skip_match and skip_match.search(line): + f.write(line) + continue + elif line.startswith('-e') or line.startswith('--editable'): + if line.startswith('-e'): + line = line[2:].strip() + else: + line = line[len('--editable'):].strip().lstrip('=') + line_req = InstallRequirement.from_editable(line, default_vcs=options.default_vcs) + elif (line.startswith('-r') or line.startswith('--requirement') + or line.startswith('-Z') or line.startswith('--always-unzip') + or line.startswith('-f') or line.startswith('-i') + or line.startswith('--extra-index-url') + or line.startswith('--find-links') + or line.startswith('--index-url')): + f.write(line) + continue + else: + line_req = InstallRequirement.from_line(line) + if not line_req.name: + logger.notify("Skipping line because it's not clear what it would install: %s" + % line.strip()) + logger.notify(" (add #egg=PackageName to the URL to avoid this warning)") + continue + if line_req.name not in installations: + logger.warn("Requirement file contains %s, but that package is not installed" + % line.strip()) + continue + f.write(str(installations[line_req.name])) + del installations[line_req.name] + f.write('## The following requirements were added by pip --freeze:\n') + for installation in sorted(installations.values(), key=lambda x: x.name): + f.write(str(installation)) diff --git a/lib/python3.4/site-packages/pip/commands/help.py b/lib/python3.4/site-packages/pip/commands/help.py new file mode 100644 index 0000000..2253387 --- /dev/null +++ b/lib/python3.4/site-packages/pip/commands/help.py @@ -0,0 +1,33 @@ +from pip.basecommand import Command, SUCCESS +from pip.exceptions import CommandError + + +class HelpCommand(Command): + """Show help for commands""" + name = 'help' + usage = """ + %prog """ + summary = 'Show help for commands.' + + def run(self, options, args): + from pip.commands import commands, get_similar_commands + + try: + # 'pip help' with no args is handled by pip.__init__.parseopt() + cmd_name = args[0] # the command we need help for + except IndexError: + return SUCCESS + + if cmd_name not in commands: + guess = get_similar_commands(cmd_name) + + msg = ['unknown command "%s"' % cmd_name] + if guess: + msg.append('maybe you meant "%s"' % guess) + + raise CommandError(' - '.join(msg)) + + command = commands[cmd_name]() + command.parser.print_help() + + return SUCCESS diff --git a/lib/python3.4/site-packages/pip/commands/install.py b/lib/python3.4/site-packages/pip/commands/install.py new file mode 100644 index 0000000..cb7d0db --- /dev/null +++ b/lib/python3.4/site-packages/pip/commands/install.py @@ -0,0 +1,326 @@ +import os +import sys +import tempfile +import shutil +from pip.req import InstallRequirement, RequirementSet, parse_requirements +from pip.log import logger +from pip.locations import (src_prefix, virtualenv_no_global, distutils_scheme, + build_prefix) +from pip.basecommand import Command +from pip.index import PackageFinder +from pip.exceptions import InstallationError, CommandError, PreviousBuildDirError +from pip import cmdoptions +from pip.util import BuildDirectory + + +class InstallCommand(Command): + """ + Install packages from: + + - PyPI (and other indexes) using requirement specifiers. + - VCS project urls. + - Local project directories. + - Local or remote source archives. + + pip also supports installing from "requirements files", which provide + an easy way to specify a whole environment to be installed. + """ + name = 'install' + + usage = """ + %prog [options] ... + %prog [options] -r ... + %prog [options] [-e] ... + %prog [options] [-e] ... + %prog [options] ...""" + + summary = 'Install packages.' + bundle = False + + def __init__(self, *args, **kw): + super(InstallCommand, self).__init__(*args, **kw) + + cmd_opts = self.cmd_opts + + cmd_opts.add_option( + '-e', '--editable', + dest='editables', + action='append', + default=[], + metavar='path/url', + help='Install a project in editable mode (i.e. setuptools "develop mode") from a local project path or a VCS url.') + + cmd_opts.add_option(cmdoptions.requirements.make()) + cmd_opts.add_option(cmdoptions.build_dir.make()) + + cmd_opts.add_option( + '-t', '--target', + dest='target_dir', + metavar='dir', + default=None, + help='Install packages into .') + + cmd_opts.add_option( + '-d', '--download', '--download-dir', '--download-directory', + dest='download_dir', + metavar='dir', + default=None, + help="Download packages into instead of installing them, regardless of what's already installed.") + + cmd_opts.add_option(cmdoptions.download_cache.make()) + + cmd_opts.add_option( + '--src', '--source', '--source-dir', '--source-directory', + dest='src_dir', + metavar='dir', + default=src_prefix, + help='Directory to check out editable projects into. ' + 'The default in a virtualenv is "/src". ' + 'The default for global installs is "/src".') + + cmd_opts.add_option( + '-U', '--upgrade', + dest='upgrade', + action='store_true', + help='Upgrade all packages to the newest available version. ' + 'This process is recursive regardless of whether a dependency is already satisfied.') + + cmd_opts.add_option( + '--force-reinstall', + dest='force_reinstall', + action='store_true', + help='When upgrading, reinstall all packages even if they are ' + 'already up-to-date.') + + cmd_opts.add_option( + '-I', '--ignore-installed', + dest='ignore_installed', + action='store_true', + help='Ignore the installed packages (reinstalling instead).') + + cmd_opts.add_option(cmdoptions.no_deps.make()) + + cmd_opts.add_option( + '--no-install', + dest='no_install', + action='store_true', + help="DEPRECATED. Download and unpack all packages, but don't actually install them.") + + cmd_opts.add_option( + '--no-download', + dest='no_download', + action="store_true", + help="DEPRECATED. Don't download any packages, just install the ones already downloaded " + "(completes an install run with --no-install).") + + cmd_opts.add_option(cmdoptions.install_options.make()) + cmd_opts.add_option(cmdoptions.global_options.make()) + + cmd_opts.add_option( + '--user', + dest='use_user_site', + action='store_true', + help='Install using the user scheme.') + + cmd_opts.add_option( + '--egg', + dest='as_egg', + action='store_true', + help="Install packages as eggs, not 'flat', like pip normally does. This option is not about installing *from* eggs. (WARNING: Because this option overrides pip's normal install logic, requirements files may not behave as expected.)") + + cmd_opts.add_option( + '--root', + dest='root_path', + metavar='dir', + default=None, + help="Install everything relative to this alternate root directory.") + + cmd_opts.add_option( + "--compile", + action="store_true", + dest="compile", + default=True, + help="Compile py files to pyc", + ) + + cmd_opts.add_option( + "--no-compile", + action="store_false", + dest="compile", + help="Do not compile py files to pyc", + ) + + cmd_opts.add_option(cmdoptions.use_wheel.make()) + cmd_opts.add_option(cmdoptions.no_use_wheel.make()) + + cmd_opts.add_option( + '--pre', + action='store_true', + default=False, + help="Include pre-release and development versions. By default, pip only finds stable versions.") + + cmd_opts.add_option(cmdoptions.no_clean.make()) + + index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, cmd_opts) + + def _build_package_finder(self, options, index_urls, session): + """ + Create a package finder appropriate to this install command. + This method is meant to be overridden by subclasses, not + called directly. + """ + return PackageFinder(find_links=options.find_links, + index_urls=index_urls, + use_wheel=options.use_wheel, + allow_external=options.allow_external, + allow_unverified=options.allow_unverified, + allow_all_external=options.allow_all_external, + allow_all_prereleases=options.pre, + process_dependency_links= + options.process_dependency_links, + session=session, + ) + + def run(self, options, args): + + if ( + options.no_install or + options.no_download or + options.build_dir or + options.no_clean + ): + logger.deprecated('1.7', 'DEPRECATION: --no-install, --no-download, --build, ' + 'and --no-clean are deprecated. See https://github.com/pypa/pip/issues/906.') + + if options.download_dir: + options.no_install = True + options.ignore_installed = True + + # If we have --no-install or --no-download and no --build we use the + # legacy static build dir + if (options.build_dir is None + and (options.no_install or options.no_download)): + options.build_dir = build_prefix + + if options.build_dir: + options.build_dir = os.path.abspath(options.build_dir) + + options.src_dir = os.path.abspath(options.src_dir) + install_options = options.install_options or [] + if options.use_user_site: + if virtualenv_no_global(): + raise InstallationError("Can not perform a '--user' install. User site-packages are not visible in this virtualenv.") + install_options.append('--user') + + temp_target_dir = None + if options.target_dir: + options.ignore_installed = True + temp_target_dir = tempfile.mkdtemp() + options.target_dir = os.path.abspath(options.target_dir) + if os.path.exists(options.target_dir) and not os.path.isdir(options.target_dir): + raise CommandError("Target path exists but is not a directory, will not continue.") + install_options.append('--home=' + temp_target_dir) + + global_options = options.global_options or [] + index_urls = [options.index_url] + options.extra_index_urls + if options.no_index: + logger.notify('Ignoring indexes: %s' % ','.join(index_urls)) + index_urls = [] + + if options.use_mirrors: + logger.deprecated("1.7", + "--use-mirrors has been deprecated and will be removed" + " in the future. Explicit uses of --index-url and/or " + "--extra-index-url is suggested.") + + if options.mirrors: + logger.deprecated("1.7", + "--mirrors has been deprecated and will be removed in " + " the future. Explicit uses of --index-url and/or " + "--extra-index-url is suggested.") + index_urls += options.mirrors + + session = self._build_session(options) + + finder = self._build_package_finder(options, index_urls, session) + + build_delete = (not (options.no_clean or options.build_dir)) + with BuildDirectory(options.build_dir, delete=build_delete) as build_dir: + requirement_set = RequirementSet( + build_dir=build_dir, + src_dir=options.src_dir, + download_dir=options.download_dir, + download_cache=options.download_cache, + upgrade=options.upgrade, + as_egg=options.as_egg, + ignore_installed=options.ignore_installed, + ignore_dependencies=options.ignore_dependencies, + force_reinstall=options.force_reinstall, + use_user_site=options.use_user_site, + target_dir=temp_target_dir, + session=session, + pycompile=options.compile, + ) + for name in args: + requirement_set.add_requirement( + InstallRequirement.from_line(name, None)) + for name in options.editables: + requirement_set.add_requirement( + InstallRequirement.from_editable(name, default_vcs=options.default_vcs)) + for filename in options.requirements: + for req in parse_requirements(filename, finder=finder, options=options, session=session): + requirement_set.add_requirement(req) + if not requirement_set.has_requirements: + opts = {'name': self.name} + if options.find_links: + msg = ('You must give at least one requirement to %(name)s ' + '(maybe you meant "pip %(name)s %(links)s"?)' % + dict(opts, links=' '.join(options.find_links))) + else: + msg = ('You must give at least one requirement ' + 'to %(name)s (see "pip help %(name)s")' % opts) + logger.warn(msg) + return + + try: + if not options.no_download: + requirement_set.prepare_files(finder, force_root_egg_info=self.bundle, bundle=self.bundle) + else: + requirement_set.locate_files() + + if not options.no_install and not self.bundle: + requirement_set.install(install_options, global_options, root=options.root_path) + installed = ' '.join([req.name for req in + requirement_set.successfully_installed]) + if installed: + logger.notify('Successfully installed %s' % installed) + elif not self.bundle: + downloaded = ' '.join([req.name for req in + requirement_set.successfully_downloaded]) + if downloaded: + logger.notify('Successfully downloaded %s' % downloaded) + elif self.bundle: + requirement_set.create_bundle(self.bundle_filename) + logger.notify('Created bundle in %s' % self.bundle_filename) + except PreviousBuildDirError: + options.no_clean = True + raise + finally: + # Clean up + if (not options.no_clean) and ((not options.no_install) or options.download_dir): + requirement_set.cleanup_files(bundle=self.bundle) + + if options.target_dir: + if not os.path.exists(options.target_dir): + os.makedirs(options.target_dir) + lib_dir = distutils_scheme('', home=temp_target_dir)['purelib'] + for item in os.listdir(lib_dir): + shutil.move( + os.path.join(lib_dir, item), + os.path.join(options.target_dir, item) + ) + shutil.rmtree(temp_target_dir) + return requirement_set diff --git a/lib/python3.4/site-packages/pip/commands/list.py b/lib/python3.4/site-packages/pip/commands/list.py new file mode 100644 index 0000000..207f068 --- /dev/null +++ b/lib/python3.4/site-packages/pip/commands/list.py @@ -0,0 +1,162 @@ +from pip.basecommand import Command +from pip.exceptions import DistributionNotFound, BestVersionAlreadyInstalled +from pip.index import PackageFinder +from pip.log import logger +from pip.req import InstallRequirement +from pip.util import get_installed_distributions, dist_is_editable +from pip.cmdoptions import make_option_group, index_group + + +class ListCommand(Command): + """List installed packages, including editables.""" + name = 'list' + usage = """ + %prog [options]""" + summary = 'List installed packages.' + + # distributions to skip (python itself is reported by pkg_resources.working_set) + skip = ['python'] + + def __init__(self, *args, **kw): + super(ListCommand, self).__init__(*args, **kw) + + cmd_opts = self.cmd_opts + + cmd_opts.add_option( + '-o', '--outdated', + action='store_true', + default=False, + help='List outdated packages (excluding editables)') + cmd_opts.add_option( + '-u', '--uptodate', + action='store_true', + default=False, + help='List uptodate packages (excluding editables)') + cmd_opts.add_option( + '-e', '--editable', + action='store_true', + default=False, + help='List editable projects.') + cmd_opts.add_option( + '-l', '--local', + action='store_true', + default=False, + help='If in a virtualenv that has global access, do not list globally-installed packages.') + + cmd_opts.add_option( + '--pre', + action='store_true', + default=False, + help="Include pre-release and development versions. By default, pip only finds stable versions.") + + index_opts = make_option_group(index_group, self.parser) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, cmd_opts) + + def _build_package_finder(self, options, index_urls, session): + """ + Create a package finder appropriate to this list command. + """ + return PackageFinder(find_links=options.find_links, + index_urls=index_urls, + allow_external=options.allow_external, + allow_unverified=options.allow_unverified, + allow_all_external=options.allow_all_external, + allow_all_prereleases=options.pre, + process_dependency_links= + options.process_dependency_links, + session=session, + ) + + def run(self, options, args): + if options.outdated: + self.run_outdated(options) + elif options.uptodate: + self.run_uptodate(options) + elif options.editable: + self.run_editables(options) + else: + self.run_listing(options) + + def run_outdated(self, options): + for dist, remote_version_raw, remote_version_parsed in self.find_packages_latests_versions(options): + if remote_version_parsed > dist.parsed_version: + logger.notify('%s (Current: %s Latest: %s)' % (dist.project_name, + dist.version, remote_version_raw)) + + def find_packages_latests_versions(self, options): + index_urls = [options.index_url] + options.extra_index_urls + if options.no_index: + logger.notify('Ignoring indexes: %s' % ','.join(index_urls)) + index_urls = [] + + if options.use_mirrors: + logger.deprecated("1.7", + "--use-mirrors has been deprecated and will be removed" + " in the future. Explicit uses of --index-url and/or " + "--extra-index-url is suggested.") + + if options.mirrors: + logger.deprecated("1.7", + "--mirrors has been deprecated and will be removed in " + " the future. Explicit uses of --index-url and/or " + "--extra-index-url is suggested.") + index_urls += options.mirrors + + dependency_links = [] + for dist in get_installed_distributions(local_only=options.local, skip=self.skip): + if dist.has_metadata('dependency_links.txt'): + dependency_links.extend( + dist.get_metadata_lines('dependency_links.txt'), + ) + + session = self._build_session(options) + + finder = self._build_package_finder(options, index_urls, session) + finder.add_dependency_links(dependency_links) + + installed_packages = get_installed_distributions(local_only=options.local, include_editables=False, skip=self.skip) + for dist in installed_packages: + req = InstallRequirement.from_line(dist.key, None) + try: + link = finder.find_requirement(req, True) + + # If link is None, means installed version is most up-to-date + if link is None: + continue + except DistributionNotFound: + continue + except BestVersionAlreadyInstalled: + remote_version = req.installed_version + else: + # It might be a good idea that link or finder had a public method + # that returned version + remote_version = finder._link_package_versions(link, req.name)[0] + remote_version_raw = remote_version[2] + remote_version_parsed = remote_version[0] + yield dist, remote_version_raw, remote_version_parsed + + def run_listing(self, options): + installed_packages = get_installed_distributions(local_only=options.local, skip=self.skip) + self.output_package_listing(installed_packages) + + def run_editables(self, options): + installed_packages = get_installed_distributions(local_only=options.local, editables_only=True) + self.output_package_listing(installed_packages) + + def output_package_listing(self, installed_packages): + installed_packages = sorted(installed_packages, key=lambda dist: dist.project_name.lower()) + for dist in installed_packages: + if dist_is_editable(dist): + line = '%s (%s, %s)' % (dist.project_name, dist.version, dist.location) + else: + line = '%s (%s)' % (dist.project_name, dist.version) + logger.notify(line) + + def run_uptodate(self, options): + uptodate = [] + for dist, remote_version_raw, remote_version_parsed in self.find_packages_latests_versions(options): + if dist.parsed_version == remote_version_parsed: + uptodate.append(dist) + self.output_package_listing(uptodate) diff --git a/lib/python3.4/site-packages/pip/commands/search.py b/lib/python3.4/site-packages/pip/commands/search.py new file mode 100644 index 0000000..7bf3397 --- /dev/null +++ b/lib/python3.4/site-packages/pip/commands/search.py @@ -0,0 +1,132 @@ +import sys +import textwrap + +import pip.download + +from pip.basecommand import Command, SUCCESS +from pip.util import get_terminal_size +from pip.log import logger +from pip.backwardcompat import xmlrpclib, reduce, cmp +from pip.exceptions import CommandError +from pip.status_codes import NO_MATCHES_FOUND +import pkg_resources +from distutils.version import StrictVersion, LooseVersion + + +class SearchCommand(Command): + """Search for PyPI packages whose name or summary contains .""" + name = 'search' + usage = """ + %prog [options] """ + summary = 'Search PyPI for packages.' + + def __init__(self, *args, **kw): + super(SearchCommand, self).__init__(*args, **kw) + self.cmd_opts.add_option( + '--index', + dest='index', + metavar='URL', + default='https://pypi.python.org/pypi', + help='Base URL of Python Package Index (default %default)') + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + if not args: + raise CommandError('Missing required argument (search query).') + query = args + index_url = options.index + + pypi_hits = self.search(query, index_url) + hits = transform_hits(pypi_hits) + + terminal_width = None + if sys.stdout.isatty(): + terminal_width = get_terminal_size()[0] + + print_results(hits, terminal_width=terminal_width) + if pypi_hits: + return SUCCESS + return NO_MATCHES_FOUND + + def search(self, query, index_url): + pypi = xmlrpclib.ServerProxy(index_url) + hits = pypi.search({'name': query, 'summary': query}, 'or') + return hits + + +def transform_hits(hits): + """ + The list from pypi is really a list of versions. We want a list of + packages with the list of versions stored inline. This converts the + list from pypi into one we can use. + """ + packages = {} + for hit in hits: + name = hit['name'] + summary = hit['summary'] + version = hit['version'] + score = hit['_pypi_ordering'] + if score is None: + score = 0 + + if name not in packages.keys(): + packages[name] = {'name': name, 'summary': summary, 'versions': [version], 'score': score} + else: + packages[name]['versions'].append(version) + + # if this is the highest version, replace summary and score + if version == highest_version(packages[name]['versions']): + packages[name]['summary'] = summary + packages[name]['score'] = score + + # each record has a unique name now, so we will convert the dict into a list sorted by score + package_list = sorted(packages.values(), key=lambda x: x['score'], reverse=True) + return package_list + + +def print_results(hits, name_column_width=25, terminal_width=None): + installed_packages = [p.project_name for p in pkg_resources.working_set] + for hit in hits: + name = hit['name'] + summary = hit['summary'] or '' + if terminal_width is not None: + # wrap and indent summary to fit terminal + summary = textwrap.wrap(summary, terminal_width - name_column_width - 5) + summary = ('\n' + ' ' * (name_column_width + 3)).join(summary) + line = '%s - %s' % (name.ljust(name_column_width), summary) + try: + logger.notify(line) + if name in installed_packages: + dist = pkg_resources.get_distribution(name) + logger.indent += 2 + try: + latest = highest_version(hit['versions']) + if dist.version == latest: + logger.notify('INSTALLED: %s (latest)' % dist.version) + else: + logger.notify('INSTALLED: %s' % dist.version) + logger.notify('LATEST: %s' % latest) + finally: + logger.indent -= 2 + except UnicodeEncodeError: + pass + + +def compare_versions(version1, version2): + try: + return cmp(StrictVersion(version1), StrictVersion(version2)) + # in case of abnormal version number, fall back to LooseVersion + except ValueError: + pass + try: + return cmp(LooseVersion(version1), LooseVersion(version2)) + except TypeError: + # certain LooseVersion comparions raise due to unorderable types, + # fallback to string comparison + return cmp([str(v) for v in LooseVersion(version1).version], + [str(v) for v in LooseVersion(version2).version]) + + +def highest_version(versions): + return reduce((lambda v1, v2: compare_versions(v1, v2) == 1 and v1 or v2), versions) diff --git a/lib/python3.4/site-packages/pip/commands/show.py b/lib/python3.4/site-packages/pip/commands/show.py new file mode 100644 index 0000000..b3c476d --- /dev/null +++ b/lib/python3.4/site-packages/pip/commands/show.py @@ -0,0 +1,80 @@ +import os + +from pip.basecommand import Command +from pip.log import logger +import pkg_resources + + +class ShowCommand(Command): + """Show information about one or more installed packages.""" + name = 'show' + usage = """ + %prog [options] ...""" + summary = 'Show information about installed packages.' + + def __init__(self, *args, **kw): + super(ShowCommand, self).__init__(*args, **kw) + self.cmd_opts.add_option( + '-f', '--files', + dest='files', + action='store_true', + default=False, + help='Show the full list of installed files for each package.') + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + if not args: + logger.warn('ERROR: Please provide a package name or names.') + return + query = args + + results = search_packages_info(query) + print_results(results, options.files) + + +def search_packages_info(query): + """ + Gather details from installed distributions. Print distribution name, + version, location, and installed files. Installed files requires a + pip generated 'installed-files.txt' in the distributions '.egg-info' + directory. + """ + installed_packages = dict( + [(p.project_name.lower(), p) for p in pkg_resources.working_set]) + for name in query: + normalized_name = name.lower() + if normalized_name in installed_packages: + dist = installed_packages[normalized_name] + package = { + 'name': dist.project_name, + 'version': dist.version, + 'location': dist.location, + 'requires': [dep.project_name for dep in dist.requires()], + } + filelist = os.path.join( + dist.location, + dist.egg_name() + '.egg-info', + 'installed-files.txt') + if os.path.isfile(filelist): + package['files'] = filelist + yield package + + +def print_results(distributions, list_all_files): + """ + Print the informations from installed distributions found. + """ + for dist in distributions: + logger.notify("---") + logger.notify("Name: %s" % dist['name']) + logger.notify("Version: %s" % dist['version']) + logger.notify("Location: %s" % dist['location']) + logger.notify("Requires: %s" % ', '.join(dist['requires'])) + if list_all_files: + logger.notify("Files:") + if 'files' in dist: + for line in open(dist['files']): + logger.notify(" %s" % line.strip()) + else: + logger.notify("Cannot locate installed-files.txt") diff --git a/lib/python3.4/site-packages/pip/commands/uninstall.py b/lib/python3.4/site-packages/pip/commands/uninstall.py new file mode 100644 index 0000000..b7099cf --- /dev/null +++ b/lib/python3.4/site-packages/pip/commands/uninstall.py @@ -0,0 +1,59 @@ +from pip.req import InstallRequirement, RequirementSet, parse_requirements +from pip.basecommand import Command +from pip.exceptions import InstallationError + + +class UninstallCommand(Command): + """ + Uninstall packages. + + pip is able to uninstall most installed packages. Known exceptions are: + + - Pure distutils packages installed with ``python setup.py install``, which + leave behind no metadata to determine what files were installed. + - Script wrappers installed by ``python setup.py develop``. + """ + name = 'uninstall' + usage = """ + %prog [options] ... + %prog [options] -r ...""" + summary = 'Uninstall packages.' + + def __init__(self, *args, **kw): + super(UninstallCommand, self).__init__(*args, **kw) + self.cmd_opts.add_option( + '-r', '--requirement', + dest='requirements', + action='append', + default=[], + metavar='file', + help='Uninstall all the packages listed in the given requirements file. ' + 'This option can be used multiple times.') + self.cmd_opts.add_option( + '-y', '--yes', + dest='yes', + action='store_true', + help="Don't ask for confirmation of uninstall deletions.") + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + session = self._build_session(options) + + requirement_set = RequirementSet( + build_dir=None, + src_dir=None, + download_dir=None, + session=session, + ) + for name in args: + requirement_set.add_requirement( + InstallRequirement.from_line(name)) + for filename in options.requirements: + for req in parse_requirements(filename, + options=options, session=session): + requirement_set.add_requirement(req) + if not requirement_set.has_requirements: + raise InstallationError('You must give at least one requirement ' + 'to %(name)s (see "pip help %(name)s")' % dict(name=self.name)) + requirement_set.uninstall(auto_confirm=options.yes) diff --git a/lib/python3.4/site-packages/pip/commands/unzip.py b/lib/python3.4/site-packages/pip/commands/unzip.py new file mode 100644 index 0000000..ed66ab9 --- /dev/null +++ b/lib/python3.4/site-packages/pip/commands/unzip.py @@ -0,0 +1,7 @@ +from pip.commands.zip import ZipCommand + + +class UnzipCommand(ZipCommand): + """Unzip individual packages.""" + name = 'unzip' + summary = 'DEPRECATED. Unzip individual packages.' diff --git a/lib/python3.4/site-packages/pip/commands/wheel.py b/lib/python3.4/site-packages/pip/commands/wheel.py new file mode 100644 index 0000000..c1c6e50 --- /dev/null +++ b/lib/python3.4/site-packages/pip/commands/wheel.py @@ -0,0 +1,203 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import + +import os +import sys +from pip.basecommand import Command +from pip.index import PackageFinder +from pip.log import logger +from pip.exceptions import CommandError, PreviousBuildDirError +from pip.req import InstallRequirement, RequirementSet, parse_requirements +from pip.util import BuildDirectory, normalize_path +from pip.wheel import WheelBuilder +from pip import cmdoptions + +DEFAULT_WHEEL_DIR = os.path.join(normalize_path(os.curdir), 'wheelhouse') + +class WheelCommand(Command): + """ + Build Wheel archives for your requirements and dependencies. + + Wheel is a built-package format, and offers the advantage of not recompiling your software during every install. + For more details, see the wheel docs: http://wheel.readthedocs.org/en/latest. + + Requirements: setuptools>=0.8, and wheel. + + 'pip wheel' uses the bdist_wheel setuptools extension from the wheel package to build individual wheels. + + """ + + name = 'wheel' + usage = """ + %prog [options] ... + %prog [options] -r ... + %prog [options] ... + %prog [options] ... + %prog [options] ...""" + + summary = 'Build wheels from your requirements.' + + def __init__(self, *args, **kw): + super(WheelCommand, self).__init__(*args, **kw) + + cmd_opts = self.cmd_opts + + cmd_opts.add_option( + '-w', '--wheel-dir', + dest='wheel_dir', + metavar='dir', + default=DEFAULT_WHEEL_DIR, + help="Build wheels into , where the default is '/wheelhouse'.") + cmd_opts.add_option(cmdoptions.use_wheel.make()) + cmd_opts.add_option(cmdoptions.no_use_wheel.make()) + cmd_opts.add_option( + '--build-option', + dest='build_options', + metavar='options', + action='append', + help="Extra arguments to be supplied to 'setup.py bdist_wheel'.") + cmd_opts.add_option(cmdoptions.requirements.make()) + cmd_opts.add_option(cmdoptions.download_cache.make()) + cmd_opts.add_option(cmdoptions.no_deps.make()) + cmd_opts.add_option(cmdoptions.build_dir.make()) + + cmd_opts.add_option( + '--global-option', + dest='global_options', + action='append', + metavar='options', + help="Extra global options to be supplied to the setup.py " + "call before the 'bdist_wheel' command.") + + cmd_opts.add_option( + '--pre', + action='store_true', + default=False, + help="Include pre-release and development versions. By default, pip only finds stable versions.") + + cmd_opts.add_option(cmdoptions.no_clean.make()) + + index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, cmd_opts) + + def run(self, options, args): + + # confirm requirements + try: + import wheel.bdist_wheel + except ImportError: + if sys.version_info < (3,): + debian_package = 'python-wheel' + else: + debian_package = 'python3-wheel' + raise CommandError("'pip wheel' requires the 'wheel' package. To fix this, run: sudo apt-get install %s" % debian_package) + + try: + import pkg_resources + except ImportError: + raise CommandError( + "'pip wheel' requires setuptools >= 0.8 for dist-info support." + " To fix this, run: pip install --upgrade setuptools" + ) + else: + if not hasattr(pkg_resources, 'DistInfoDistribution'): + raise CommandError( + "'pip wheel' requires setuptools >= 0.8 for dist-info " + "support. To fix this, run: pip install --upgrade " + "setuptools" + ) + + index_urls = [options.index_url] + options.extra_index_urls + if options.no_index: + logger.notify('Ignoring indexes: %s' % ','.join(index_urls)) + index_urls = [] + + if options.use_mirrors: + logger.deprecated("1.7", + "--use-mirrors has been deprecated and will be removed" + " in the future. Explicit uses of --index-url and/or " + "--extra-index-url is suggested.") + + if options.mirrors: + logger.deprecated("1.7", + "--mirrors has been deprecated and will be removed in " + " the future. Explicit uses of --index-url and/or " + "--extra-index-url is suggested.") + index_urls += options.mirrors + + if options.build_dir: + options.build_dir = os.path.abspath(options.build_dir) + + session = self._build_session(options) + + finder = PackageFinder(find_links=options.find_links, + index_urls=index_urls, + use_wheel=options.use_wheel, + allow_external=options.allow_external, + allow_unverified=options.allow_unverified, + allow_all_external=options.allow_all_external, + allow_all_prereleases=options.pre, + process_dependency_links= + options.process_dependency_links, + session=session, + ) + + build_delete = (not (options.no_clean or options.build_dir)) + with BuildDirectory(options.build_dir, delete=build_delete) as build_dir: + requirement_set = RequirementSet( + build_dir=build_dir, + src_dir=None, + download_dir=None, + download_cache=options.download_cache, + ignore_dependencies=options.ignore_dependencies, + ignore_installed=True, + session=session, + wheel_download_dir=options.wheel_dir + ) + + # make the wheelhouse + if not os.path.exists(options.wheel_dir): + os.makedirs(options.wheel_dir) + + #parse args and/or requirements files + for name in args: + requirement_set.add_requirement( + InstallRequirement.from_line(name, None)) + + for filename in options.requirements: + for req in parse_requirements( + filename, + finder=finder, + options=options, + session=session): + if req.editable: + logger.notify("ignoring %s" % req.url) + continue + requirement_set.add_requirement(req) + + #fail if no requirements + if not requirement_set.has_requirements: + opts = {'name': self.name} + msg = ('You must give at least one requirement ' + 'to %(name)s (see "pip help %(name)s")' % opts) + logger.error(msg) + return + + try: + #build wheels + wb = WheelBuilder( + requirement_set, + finder, + options.wheel_dir, + build_options = options.build_options or [], + global_options = options.global_options or [] + ) + wb.build() + except PreviousBuildDirError: + options.no_clean = True + raise + finally: + if not options.no_clean: + requirement_set.cleanup_files() diff --git a/lib/python3.4/site-packages/pip/commands/zip.py b/lib/python3.4/site-packages/pip/commands/zip.py new file mode 100644 index 0000000..c801359 --- /dev/null +++ b/lib/python3.4/site-packages/pip/commands/zip.py @@ -0,0 +1,351 @@ +import sys +import re +import fnmatch +import os +import shutil +import zipfile +from pip.util import display_path, backup_dir, rmtree +from pip.log import logger +from pip.exceptions import InstallationError +from pip.basecommand import Command + + +class ZipCommand(Command): + """Zip individual packages.""" + name = 'zip' + usage = """ + %prog [options] ...""" + summary = 'DEPRECATED. Zip individual packages.' + + def __init__(self, *args, **kw): + super(ZipCommand, self).__init__(*args, **kw) + if self.name == 'zip': + self.cmd_opts.add_option( + '--unzip', + action='store_true', + dest='unzip', + help='Unzip (rather than zip) a package.') + else: + self.cmd_opts.add_option( + '--zip', + action='store_false', + dest='unzip', + default=True, + help='Zip (rather than unzip) a package.') + self.cmd_opts.add_option( + '--no-pyc', + action='store_true', + dest='no_pyc', + help='Do not include .pyc files in zip files (useful on Google App Engine).') + self.cmd_opts.add_option( + '-l', '--list', + action='store_true', + dest='list', + help='List the packages available, and their zip status.') + self.cmd_opts.add_option( + '--sort-files', + action='store_true', + dest='sort_files', + help='With --list, sort packages according to how many files they contain.') + self.cmd_opts.add_option( + '--path', + action='append', + dest='paths', + help='Restrict operations to the given paths (may include wildcards).') + self.cmd_opts.add_option( + '-n', '--simulate', + action='store_true', + help='Do not actually perform the zip/unzip operation.') + + self.parser.insert_option_group(0, self.cmd_opts) + + def paths(self): + """All the entries of sys.path, possibly restricted by --path""" + if not self.select_paths: + return sys.path + result = [] + match_any = set() + for path in sys.path: + path = os.path.normcase(os.path.abspath(path)) + for match in self.select_paths: + match = os.path.normcase(os.path.abspath(match)) + if '*' in match: + if re.search(fnmatch.translate(match + '*'), path): + result.append(path) + match_any.add(match) + break + else: + if path.startswith(match): + result.append(path) + match_any.add(match) + break + else: + logger.debug("Skipping path %s because it doesn't match %s" + % (path, ', '.join(self.select_paths))) + for match in self.select_paths: + if match not in match_any and '*' not in match: + result.append(match) + logger.debug("Adding path %s because it doesn't match " + "anything already on sys.path" % match) + return result + + def run(self, options, args): + + logger.deprecated('1.7', "DEPRECATION: 'pip zip' and 'pip unzip` are deprecated, and will be removed in a future release.") + + self.select_paths = options.paths + self.simulate = options.simulate + if options.list: + return self.list(options, args) + if not args: + raise InstallationError( + 'You must give at least one package to zip or unzip') + packages = [] + for arg in args: + module_name, filename = self.find_package(arg) + if options.unzip and os.path.isdir(filename): + raise InstallationError( + 'The module %s (in %s) is not a zip file; cannot be unzipped' + % (module_name, filename)) + elif not options.unzip and not os.path.isdir(filename): + raise InstallationError( + 'The module %s (in %s) is not a directory; cannot be zipped' + % (module_name, filename)) + packages.append((module_name, filename)) + last_status = None + for module_name, filename in packages: + if options.unzip: + last_status = self.unzip_package(module_name, filename) + else: + last_status = self.zip_package(module_name, filename, options.no_pyc) + return last_status + + def unzip_package(self, module_name, filename): + zip_filename = os.path.dirname(filename) + if not os.path.isfile(zip_filename) and zipfile.is_zipfile(zip_filename): + raise InstallationError( + 'Module %s (in %s) isn\'t located in a zip file in %s' + % (module_name, filename, zip_filename)) + package_path = os.path.dirname(zip_filename) + if not package_path in self.paths(): + logger.warn( + 'Unpacking %s into %s, but %s is not on sys.path' + % (display_path(zip_filename), display_path(package_path), + display_path(package_path))) + logger.notify('Unzipping %s (in %s)' % (module_name, display_path(zip_filename))) + if self.simulate: + logger.notify('Skipping remaining operations because of --simulate') + return + logger.indent += 2 + try: + ## FIXME: this should be undoable: + zip = zipfile.ZipFile(zip_filename) + to_save = [] + for info in zip.infolist(): + name = info.filename + if name.startswith(module_name + os.path.sep): + content = zip.read(name) + dest = os.path.join(package_path, name) + if not os.path.exists(os.path.dirname(dest)): + os.makedirs(os.path.dirname(dest)) + if not content and dest.endswith(os.path.sep): + if not os.path.exists(dest): + os.makedirs(dest) + else: + f = open(dest, 'wb') + f.write(content) + f.close() + else: + to_save.append((name, zip.read(name))) + zip.close() + if not to_save: + logger.info('Removing now-empty zip file %s' % display_path(zip_filename)) + os.unlink(zip_filename) + self.remove_filename_from_pth(zip_filename) + else: + logger.info('Removing entries in %s/ from zip file %s' % (module_name, display_path(zip_filename))) + zip = zipfile.ZipFile(zip_filename, 'w') + for name, content in to_save: + zip.writestr(name, content) + zip.close() + finally: + logger.indent -= 2 + + def zip_package(self, module_name, filename, no_pyc): + orig_filename = filename + logger.notify('Zip %s (in %s)' % (module_name, display_path(filename))) + logger.indent += 2 + if filename.endswith('.egg'): + dest_filename = filename + else: + dest_filename = filename + '.zip' + try: + ## FIXME: I think this needs to be undoable: + if filename == dest_filename: + filename = backup_dir(orig_filename) + logger.notify('Moving %s aside to %s' % (orig_filename, filename)) + if not self.simulate: + shutil.move(orig_filename, filename) + try: + logger.info('Creating zip file in %s' % display_path(dest_filename)) + if not self.simulate: + zip = zipfile.ZipFile(dest_filename, 'w') + zip.writestr(module_name + '/', '') + for dirpath, dirnames, filenames in os.walk(filename): + if no_pyc: + filenames = [f for f in filenames + if not f.lower().endswith('.pyc')] + for fns, is_dir in [(dirnames, True), (filenames, False)]: + for fn in fns: + full = os.path.join(dirpath, fn) + dest = os.path.join(module_name, dirpath[len(filename):].lstrip(os.path.sep), fn) + if is_dir: + zip.writestr(dest + '/', '') + else: + zip.write(full, dest) + zip.close() + logger.info('Removing old directory %s' % display_path(filename)) + if not self.simulate: + rmtree(filename) + except: + ## FIXME: need to do an undo here + raise + ## FIXME: should also be undone: + self.add_filename_to_pth(dest_filename) + finally: + logger.indent -= 2 + + def remove_filename_from_pth(self, filename): + for pth in self.pth_files(): + f = open(pth, 'r') + lines = f.readlines() + f.close() + new_lines = [ + l for l in lines if l.strip() != filename] + if lines != new_lines: + logger.info('Removing reference to %s from .pth file %s' + % (display_path(filename), display_path(pth))) + if not [line for line in new_lines if line]: + logger.info('%s file would be empty: deleting' % display_path(pth)) + if not self.simulate: + os.unlink(pth) + else: + if not self.simulate: + f = open(pth, 'wb') + f.writelines(new_lines) + f.close() + return + logger.warn('Cannot find a reference to %s in any .pth file' % display_path(filename)) + + def add_filename_to_pth(self, filename): + path = os.path.dirname(filename) + dest = filename + '.pth' + if path not in self.paths(): + logger.warn('Adding .pth file %s, but it is not on sys.path' % display_path(dest)) + if not self.simulate: + if os.path.exists(dest): + f = open(dest) + lines = f.readlines() + f.close() + if lines and not lines[-1].endswith('\n'): + lines[-1] += '\n' + lines.append(filename + '\n') + else: + lines = [filename + '\n'] + f = open(dest, 'wb') + f.writelines(lines) + f.close() + + def pth_files(self): + for path in self.paths(): + if not os.path.exists(path) or not os.path.isdir(path): + continue + for filename in os.listdir(path): + if filename.endswith('.pth'): + yield os.path.join(path, filename) + + def find_package(self, package): + for path in self.paths(): + full = os.path.join(path, package) + if os.path.exists(full): + return package, full + if not os.path.isdir(path) and zipfile.is_zipfile(path): + zip = zipfile.ZipFile(path, 'r') + try: + zip.read(os.path.join(package, '__init__.py')) + except KeyError: + pass + else: + zip.close() + return package, full + zip.close() + ## FIXME: need special error for package.py case: + raise InstallationError( + 'No package with the name %s found' % package) + + def list(self, options, args): + if args: + raise InstallationError( + 'You cannot give an argument with --list') + for path in sorted(self.paths()): + if not os.path.exists(path): + continue + basename = os.path.basename(path.rstrip(os.path.sep)) + if os.path.isfile(path) and zipfile.is_zipfile(path): + if os.path.dirname(path) not in self.paths(): + logger.notify('Zipped egg: %s' % display_path(path)) + continue + if (basename != 'site-packages' and basename != 'dist-packages' + and not path.replace('\\', '/').endswith('lib/python')): + continue + logger.notify('In %s:' % display_path(path)) + logger.indent += 2 + zipped = [] + unzipped = [] + try: + for filename in sorted(os.listdir(path)): + ext = os.path.splitext(filename)[1].lower() + if ext in ('.pth', '.egg-info', '.egg-link'): + continue + if ext == '.py': + logger.info('Not displaying %s: not a package' % display_path(filename)) + continue + full = os.path.join(path, filename) + if os.path.isdir(full): + unzipped.append((filename, self.count_package(full))) + elif zipfile.is_zipfile(full): + zipped.append(filename) + else: + logger.info('Unknown file: %s' % display_path(filename)) + if zipped: + logger.notify('Zipped packages:') + logger.indent += 2 + try: + for filename in zipped: + logger.notify(filename) + finally: + logger.indent -= 2 + else: + logger.notify('No zipped packages.') + if unzipped: + if options.sort_files: + unzipped.sort(key=lambda x: -x[1]) + logger.notify('Unzipped packages:') + logger.indent += 2 + try: + for filename, count in unzipped: + logger.notify('%s (%i files)' % (filename, count)) + finally: + logger.indent -= 2 + else: + logger.notify('No unzipped packages.') + finally: + logger.indent -= 2 + + def count_package(self, path): + total = 0 + for dirpath, dirnames, filenames in os.walk(path): + filenames = [f for f in filenames + if not f.lower().endswith('.pyc')] + total += len(filenames) + return total diff --git a/lib/python3.4/site-packages/pip/download.py b/lib/python3.4/site-packages/pip/download.py new file mode 100644 index 0000000..adff2ec --- /dev/null +++ b/lib/python3.4/site-packages/pip/download.py @@ -0,0 +1,644 @@ +import cgi +import email.utils +import hashlib +import getpass +import mimetypes +import os +import platform +import re +import shutil +import sys +import tempfile + +import pip + +from pip.backwardcompat import urllib, urlparse, raw_input +from pip.exceptions import InstallationError, HashMismatch +from pip.util import (splitext, rmtree, format_size, display_path, + backup_dir, ask_path_exists, unpack_file, + create_download_cache_folder, cache_download) +from pip.vcs import vcs +from pip.log import logger +import requests, six +from requests.adapters import BaseAdapter +from requests.auth import AuthBase, HTTPBasicAuth +from requests.compat import IncompleteRead +from requests.exceptions import InvalidURL, ChunkedEncodingError +from requests.models import Response +from requests.structures import CaseInsensitiveDict + +__all__ = ['get_file_content', + 'is_url', 'url_to_path', 'path_to_url', + 'is_archive_file', 'unpack_vcs_link', + 'unpack_file_url', 'is_vcs_url', 'is_file_url', 'unpack_http_url'] + + +def user_agent(): + """Return a string representing the user agent.""" + _implementation = platform.python_implementation() + + if _implementation == 'CPython': + _implementation_version = platform.python_version() + elif _implementation == 'PyPy': + _implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major, + sys.pypy_version_info.minor, + sys.pypy_version_info.micro) + if sys.pypy_version_info.releaselevel != 'final': + _implementation_version = ''.join([ + _implementation_version, + sys.pypy_version_info.releaselevel, + ]) + elif _implementation == 'Jython': + _implementation_version = platform.python_version() # Complete Guess + elif _implementation == 'IronPython': + _implementation_version = platform.python_version() # Complete Guess + else: + _implementation_version = 'Unknown' + + try: + p_system = platform.system() + p_release = platform.release() + except IOError: + p_system = 'Unknown' + p_release = 'Unknown' + + return " ".join(['pip/%s' % pip.__version__, + '%s/%s' % (_implementation, _implementation_version), + '%s/%s' % (p_system, p_release)]) + + +class MultiDomainBasicAuth(AuthBase): + + def __init__(self, prompting=True): + self.prompting = prompting + self.passwords = {} + + def __call__(self, req): + parsed = urlparse.urlparse(req.url) + + # Get the netloc without any embedded credentials + netloc = parsed.netloc.split("@", 1)[-1] + + # Set the url of the request to the url without any credentials + req.url = urlparse.urlunparse(parsed[:1] + (netloc,) + parsed[2:]) + + # Use any stored credentials that we have for this netloc + username, password = self.passwords.get(netloc, (None, None)) + + # Extract credentials embedded in the url if we have none stored + if username is None: + username, password = self.parse_credentials(parsed.netloc) + + if username or password: + # Store the username and password + self.passwords[netloc] = (username, password) + + # Send the basic auth with this request + req = HTTPBasicAuth(username or "", password or "")(req) + + # Attach a hook to handle 401 responses + req.register_hook("response", self.handle_401) + + return req + + def handle_401(self, resp, **kwargs): + # We only care about 401 responses, anything else we want to just + # pass through the actual response + if resp.status_code != 401: + return resp + + # We are not able to prompt the user so simple return the response + if not self.prompting: + return resp + + parsed = urlparse.urlparse(resp.url) + + # Prompt the user for a new username and password + username = raw_input("User for %s: " % parsed.netloc) + password = getpass.getpass("Password: ") + + # Store the new username and password to use for future requests + if username or password: + self.passwords[parsed.netloc] = (username, password) + + # Consume content and release the original connection to allow our new + # request to reuse the same one. + resp.content + resp.raw.release_conn() + + # Add our new username and password to the request + req = HTTPBasicAuth(username or "", password or "")(resp.request) + + # Send our new request + new_resp = resp.connection.send(req, **kwargs) + new_resp.history.append(resp) + + return new_resp + + def parse_credentials(self, netloc): + if "@" in netloc: + userinfo = netloc.rsplit("@", 1)[0] + if ":" in userinfo: + return userinfo.split(":", 1) + return userinfo, None + return None, None + + +class LocalFSResponse(object): + + def __init__(self, fileobj): + self.fileobj = fileobj + + def __getattr__(self, name): + return getattr(self.fileobj, name) + + def read(self, amt=None, decode_content=None, cache_content=False): + return self.fileobj.read(amt) + + # Insert Hacks to Make Cookie Jar work w/ Requests + @property + def _original_response(self): + class FakeMessage(object): + def getheaders(self, header): + return [] + + def get_all(self, header, default): + return [] + + class FakeResponse(object): + @property + def msg(self): + return FakeMessage() + + return FakeResponse() + + +class LocalFSAdapter(BaseAdapter): + + def send(self, request, stream=None, timeout=None, verify=None, cert=None, + proxies=None): + parsed_url = urlparse.urlparse(request.url) + + # We only work for requests with a host of localhost + if parsed_url.netloc.lower() != "localhost": + raise InvalidURL("Invalid URL %r: Only localhost is allowed" % + request.url) + + real_url = urlparse.urlunparse(parsed_url[:1] + ("",) + parsed_url[2:]) + pathname = url_to_path(real_url) + + resp = Response() + resp.status_code = 200 + resp.url = real_url + + stats = os.stat(pathname) + modified = email.utils.formatdate(stats.st_mtime, usegmt=True) + resp.headers = CaseInsensitiveDict({ + "Content-Type": mimetypes.guess_type(pathname)[0] or "text/plain", + "Content-Length": stats.st_size, + "Last-Modified": modified, + }) + + resp.raw = LocalFSResponse(open(pathname, "rb")) + resp.close = resp.raw.close + + return resp + + def close(self): + pass + + +class PipSession(requests.Session): + + timeout = None + + def __init__(self, *args, **kwargs): + super(PipSession, self).__init__(*args, **kwargs) + + # Attach our User Agent to the request + self.headers["User-Agent"] = user_agent() + + # Attach our Authentication handler to the session + self.auth = MultiDomainBasicAuth() + + # Enable file:// urls + self.mount("file://", LocalFSAdapter()) + + def request(self, method, url, *args, **kwargs): + # Make file:// urls not fail due to lack of a hostname + parsed = urlparse.urlparse(url) + if parsed.scheme == "file": + url = urlparse.urlunparse(parsed[:1] + ("localhost",) + parsed[2:]) + + # Allow setting a default timeout on a session + kwargs.setdefault("timeout", self.timeout) + + # Dispatch the actual request + return super(PipSession, self).request(method, url, *args, **kwargs) + + +def get_file_content(url, comes_from=None, session=None): + """Gets the content of a file; it may be a filename, file: URL, or + http: URL. Returns (location, content). Content is unicode.""" + if session is None: + session = PipSession() + + match = _scheme_re.search(url) + if match: + scheme = match.group(1).lower() + if (scheme == 'file' and comes_from + and comes_from.startswith('http')): + raise InstallationError( + 'Requirements file %s references URL %s, which is local' + % (comes_from, url)) + if scheme == 'file': + path = url.split(':', 1)[1] + path = path.replace('\\', '/') + match = _url_slash_drive_re.match(path) + if match: + path = match.group(1) + ':' + path.split('|', 1)[1] + path = urllib.unquote(path) + if path.startswith('/'): + path = '/' + path.lstrip('/') + url = path + else: + ## FIXME: catch some errors + resp = session.get(url) + resp.raise_for_status() + + if six.PY3: + return resp.url, resp.text + else: + return resp.url, resp.content + try: + f = open(url) + content = f.read() + except IOError: + e = sys.exc_info()[1] + raise InstallationError('Could not open requirements file: %s' % str(e)) + else: + f.close() + return url, content + + +_scheme_re = re.compile(r'^(http|https|file):', re.I) +_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I) + + +def is_url(name): + """Returns true if the name looks like a URL""" + if ':' not in name: + return False + scheme = name.split(':', 1)[0].lower() + return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes + + +def url_to_path(url): + """ + Convert a file: URL to a path. + """ + assert url.startswith('file:'), ( + "You can only turn file: urls into filenames (not %r)" % url) + path = url[len('file:'):].lstrip('/') + path = urllib.unquote(path) + if _url_drive_re.match(path): + path = path[0] + ':' + path[2:] + else: + path = '/' + path + return path + + +_drive_re = re.compile('^([a-z]):', re.I) +_url_drive_re = re.compile('^([a-z])[:|]', re.I) + + +def path_to_url(path): + """ + Convert a path to a file: URL. The path will be made absolute and have + quoted path parts. + """ + path = os.path.normpath(os.path.abspath(path)) + drive, path = os.path.splitdrive(path) + filepath = path.split(os.path.sep) + url = '/'.join([urllib.quote(part) for part in filepath]) + if not drive: + url = url.lstrip('/') + return 'file:///' + drive + url + + +def is_archive_file(name): + """Return True if `name` is a considered as an archive file.""" + archives = ('.zip', '.tar.gz', '.tar.bz2', '.tgz', '.tar', '.pybundle', + '.whl') + ext = splitext(name)[1].lower() + if ext in archives: + return True + return False + + +def unpack_vcs_link(link, location, only_download=False): + vcs_backend = _get_used_vcs_backend(link) + if only_download: + vcs_backend.export(location) + else: + vcs_backend.unpack(location) + + +def _get_used_vcs_backend(link): + for backend in vcs.backends: + if link.scheme in backend.schemes: + vcs_backend = backend(link.url) + return vcs_backend + + +def is_vcs_url(link): + return bool(_get_used_vcs_backend(link)) + + +def is_file_url(link): + return link.url.lower().startswith('file:') + + +def _check_hash(download_hash, link): + if download_hash.digest_size != hashlib.new(link.hash_name).digest_size: + logger.fatal("Hash digest size of the package %d (%s) doesn't match the expected hash name %s!" + % (download_hash.digest_size, link, link.hash_name)) + raise HashMismatch('Hash name mismatch for package %s' % link) + if download_hash.hexdigest() != link.hash: + logger.fatal("Hash of the package %s (%s) doesn't match the expected hash %s!" + % (link, download_hash.hexdigest(), link.hash)) + raise HashMismatch('Bad %s hash for package %s' % (link.hash_name, link)) + + +def _get_hash_from_file(target_file, link): + try: + download_hash = hashlib.new(link.hash_name) + except (ValueError, TypeError): + logger.warn("Unsupported hash name %s for package %s" % (link.hash_name, link)) + return None + + fp = open(target_file, 'rb') + while True: + chunk = fp.read(4096) + if not chunk: + break + download_hash.update(chunk) + fp.close() + return download_hash + + +def _download_url(resp, link, temp_location): + fp = open(temp_location, 'wb') + download_hash = None + if link.hash and link.hash_name: + try: + download_hash = hashlib.new(link.hash_name) + except ValueError: + logger.warn("Unsupported hash name %s for package %s" % (link.hash_name, link)) + try: + total_length = int(resp.headers['content-length']) + except (ValueError, KeyError, TypeError): + total_length = 0 + downloaded = 0 + show_progress = total_length > 40 * 1000 or not total_length + show_url = link.show_url + try: + if show_progress: + ## FIXME: the URL can get really long in this message: + if total_length: + logger.start_progress('Downloading %s (%s): ' % (show_url, format_size(total_length))) + else: + logger.start_progress('Downloading %s (unknown size): ' % show_url) + else: + logger.notify('Downloading %s' % show_url) + logger.info('Downloading from URL %s' % link) + + def resp_read(chunk_size): + try: + # Special case for urllib3. + try: + for chunk in resp.raw.stream( + chunk_size, decode_content=False): + yield chunk + except IncompleteRead as e: + raise ChunkedEncodingError(e) + except AttributeError: + # Standard file-like object. + while True: + chunk = resp.raw.read(chunk_size) + if not chunk: + break + yield chunk + + for chunk in resp_read(4096): + downloaded += len(chunk) + if show_progress: + if not total_length: + logger.show_progress('%s' % format_size(downloaded)) + else: + logger.show_progress('%3i%% %s' % (100 * downloaded / total_length, format_size(downloaded))) + if download_hash is not None: + download_hash.update(chunk) + fp.write(chunk) + fp.close() + finally: + if show_progress: + logger.end_progress('%s downloaded' % format_size(downloaded)) + return download_hash + + +def _copy_file(filename, location, content_type, link): + copy = True + download_location = os.path.join(location, link.filename) + if os.path.exists(download_location): + response = ask_path_exists( + 'The file %s exists. (i)gnore, (w)ipe, (b)ackup ' % + display_path(download_location), ('i', 'w', 'b')) + if response == 'i': + copy = False + elif response == 'w': + logger.warn('Deleting %s' % display_path(download_location)) + os.remove(download_location) + elif response == 'b': + dest_file = backup_dir(download_location) + logger.warn('Backing up %s to %s' + % (display_path(download_location), display_path(dest_file))) + shutil.move(download_location, dest_file) + if copy: + shutil.copy(filename, download_location) + logger.notify('Saved %s' % display_path(download_location)) + + +def unpack_http_url(link, location, download_cache, download_dir=None, + session=None): + if session is None: + session = PipSession() + + temp_dir = tempfile.mkdtemp('-unpack', 'pip-') + temp_location = None + target_url = link.url.split('#', 1)[0] + already_cached = False + cache_file = None + cache_content_type_file = None + download_hash = None + + # If a download cache is specified, is the file cached there? + if download_cache: + cache_file = os.path.join(download_cache, + urllib.quote(target_url, '')) + cache_content_type_file = cache_file + '.content-type' + already_cached = ( + os.path.exists(cache_file) and + os.path.exists(cache_content_type_file) + ) + if not os.path.isdir(download_cache): + create_download_cache_folder(download_cache) + + # If a download dir is specified, is the file already downloaded there? + already_downloaded = None + if download_dir: + already_downloaded = os.path.join(download_dir, link.filename) + if not os.path.exists(already_downloaded): + already_downloaded = None + + # If already downloaded, does it's hash match? + if already_downloaded: + temp_location = already_downloaded + content_type = mimetypes.guess_type(already_downloaded)[0] + logger.notify('File was already downloaded %s' % already_downloaded) + if link.hash: + download_hash = _get_hash_from_file(temp_location, link) + try: + _check_hash(download_hash, link) + except HashMismatch: + logger.warn( + 'Previously-downloaded file %s has bad hash, ' + 're-downloading.' % temp_location + ) + temp_location = None + os.unlink(already_downloaded) + already_downloaded = None + + # If not a valid download, let's confirm the cached file is valid + if already_cached and not temp_location: + with open(cache_content_type_file) as fp: + content_type = fp.read().strip() + temp_location = cache_file + logger.notify('Using download cache from %s' % cache_file) + if link.hash and link.hash_name: + download_hash = _get_hash_from_file(cache_file, link) + try: + _check_hash(download_hash, link) + except HashMismatch: + logger.warn( + 'Cached file %s has bad hash, ' + 're-downloading.' % temp_location + ) + temp_location = None + os.unlink(cache_file) + os.unlink(cache_content_type_file) + already_cached = False + + # We don't have either a cached or a downloaded copy + # let's download to a tmp dir + if not temp_location: + try: + resp = session.get(target_url, stream=True) + resp.raise_for_status() + except requests.HTTPError as exc: + logger.fatal("HTTP error %s while getting %s" % + (exc.response.status_code, link)) + raise + + content_type = resp.headers.get('content-type', '') + filename = link.filename # fallback + # Have a look at the Content-Disposition header for a better guess + content_disposition = resp.headers.get('content-disposition') + if content_disposition: + type, params = cgi.parse_header(content_disposition) + # We use ``or`` here because we don't want to use an "empty" value + # from the filename param. + filename = params.get('filename') or filename + ext = splitext(filename)[1] + if not ext: + ext = mimetypes.guess_extension(content_type) + if ext: + filename += ext + if not ext and link.url != resp.url: + ext = os.path.splitext(resp.url)[1] + if ext: + filename += ext + temp_location = os.path.join(temp_dir, filename) + download_hash = _download_url(resp, link, temp_location) + if link.hash and link.hash_name: + _check_hash(download_hash, link) + + # a download dir is specified; let's copy the archive there + if download_dir and not already_downloaded: + _copy_file(temp_location, download_dir, content_type, link) + + # unpack the archive to the build dir location. even when only downloading + # archives, they have to be unpacked to parse dependencies + unpack_file(temp_location, location, content_type, link) + + # if using a download cache, cache it, if needed + if cache_file and not already_cached: + cache_download(cache_file, temp_location, content_type) + + if not (already_cached or already_downloaded): + os.unlink(temp_location) + + os.rmdir(temp_dir) + + +def unpack_file_url(link, location, download_dir=None): + + link_path = url_to_path(link.url_without_fragment) + already_downloaded = False + + # If it's a url to a local directory + if os.path.isdir(link_path): + if os.path.isdir(location): + rmtree(location) + shutil.copytree(link_path, location, symlinks=True) + return + + # if link has a hash, let's confirm it matches + if link.hash: + link_path_hash = _get_hash_from_file(link_path, link) + _check_hash(link_path_hash, link) + + # If a download dir is specified, is the file already there and valid? + if download_dir: + download_path = os.path.join(download_dir, link.filename) + if os.path.exists(download_path): + content_type = mimetypes.guess_type(download_path)[0] + logger.notify('File was already downloaded %s' % download_path) + if link.hash: + download_hash = _get_hash_from_file(download_path, link) + try: + _check_hash(download_hash, link) + already_downloaded = True + except HashMismatch: + logger.warn( + 'Previously-downloaded file %s has bad hash, ' + 're-downloading.' % link_path + ) + os.unlink(download_path) + else: + already_downloaded = True + + if already_downloaded: + from_path = download_path + else: + from_path = link_path + + content_type = mimetypes.guess_type(from_path)[0] + + # unpack the archive to the build dir location. even when only downloading + # archives, they have to be unpacked to parse dependencies + unpack_file(from_path, location, content_type, link) + + # a download dir is specified and not already downloaded + if download_dir and not already_downloaded: + _copy_file(from_path, download_dir, content_type, link) diff --git a/lib/python3.4/site-packages/pip/exceptions.py b/lib/python3.4/site-packages/pip/exceptions.py new file mode 100644 index 0000000..febebfb --- /dev/null +++ b/lib/python3.4/site-packages/pip/exceptions.py @@ -0,0 +1,46 @@ +"""Exceptions used throughout package""" + + +class PipError(Exception): + """Base pip exception""" + + +class InstallationError(PipError): + """General exception during installation""" + + +class UninstallationError(PipError): + """General exception during uninstallation""" + + +class DistributionNotFound(InstallationError): + """Raised when a distribution cannot be found to satisfy a requirement""" + + +class BestVersionAlreadyInstalled(PipError): + """Raised when the most up-to-date version of a package is already + installed. """ + + +class BadCommand(PipError): + """Raised when virtualenv or a command is not found""" + + +class CommandError(PipError): + """Raised when there is an error in command-line arguments""" + + +class PreviousBuildDirError(PipError): + """Raised when there's a previous conflicting build directory""" + + +class HashMismatch(InstallationError): + """Distribution file hash values don't match.""" + + +class InvalidWheelFilename(InstallationError): + """Invalid wheel filename.""" + + +class UnsupportedWheel(InstallationError): + """Unsupported wheel.""" diff --git a/lib/python3.4/site-packages/pip/index.py b/lib/python3.4/site-packages/pip/index.py new file mode 100644 index 0000000..90fccf3 --- /dev/null +++ b/lib/python3.4/site-packages/pip/index.py @@ -0,0 +1,990 @@ +"""Routines related to PyPI, indexes""" + +import sys +import os +import re +import mimetypes +import posixpath + +from pip.log import logger +from pip.util import Inf, normalize_name, splitext, is_prerelease +from pip.exceptions import (DistributionNotFound, BestVersionAlreadyInstalled, + InstallationError, InvalidWheelFilename, UnsupportedWheel) +from pip.backwardcompat import urlparse, url2pathname +from pip.download import PipSession, url_to_path, path_to_url +from pip.wheel import Wheel, wheel_ext +from pip.pep425tags import supported_tags, supported_tags_noarch, get_platform +import html5lib, requests, pkg_resources +from requests.exceptions import SSLError + + +__all__ = ['PackageFinder'] + + +DEFAULT_MIRROR_HOSTNAME = "last.pypi.python.org" + +INSECURE_SCHEMES = { + "http": ["https"], +} + + +class PackageFinder(object): + """This finds packages. + + This is meant to match easy_install's technique for looking for + packages, by reading pages and looking for appropriate links + """ + + def __init__(self, find_links, index_urls, + use_wheel=True, allow_external=[], allow_unverified=[], + allow_all_external=False, allow_all_prereleases=False, + process_dependency_links=False, session=None): + self.find_links = find_links + self.index_urls = index_urls + self.dependency_links = [] + self.cache = PageCache() + # These are boring links that have already been logged somehow: + self.logged_links = set() + + self.use_wheel = use_wheel + + # Do we allow (safe and verifiable) externally hosted files? + self.allow_external = set(normalize_name(n) for n in allow_external) + + # Which names are allowed to install insecure and unverifiable files? + self.allow_unverified = set( + normalize_name(n) for n in allow_unverified + ) + + # Anything that is allowed unverified is also allowed external + self.allow_external |= self.allow_unverified + + # Do we allow all (safe and verifiable) externally hosted files? + self.allow_all_external = allow_all_external + + # Stores if we ignored any external links so that we can instruct + # end users how to install them if no distributions are available + self.need_warn_external = False + + # Stores if we ignored any unsafe links so that we can instruct + # end users how to install them if no distributions are available + self.need_warn_unverified = False + + # Do we want to allow _all_ pre-releases? + self.allow_all_prereleases = allow_all_prereleases + + # Do we process dependency links? + self.process_dependency_links = process_dependency_links + self._have_warned_dependency_links = False + + # The Session we'll use to make requests + self.session = session or PipSession() + + def add_dependency_links(self, links): + ## FIXME: this shouldn't be global list this, it should only + ## apply to requirements of the package that specifies the + ## dependency_links value + ## FIXME: also, we should track comes_from (i.e., use Link) + if self.process_dependency_links: + if not self._have_warned_dependency_links: + logger.deprecated( + "1.6", + "Dependency Links processing has been deprecated with an " + "accelerated time schedule and will be removed in pip 1.6", + ) + self._have_warned_dependency_links = True + self.dependency_links.extend(links) + + def _sort_locations(self, locations): + """ + Sort locations into "files" (archives) and "urls", and return + a pair of lists (files,urls) + """ + files = [] + urls = [] + + # puts the url for the given file path into the appropriate list + def sort_path(path): + url = path_to_url(path) + if mimetypes.guess_type(url, strict=False)[0] == 'text/html': + urls.append(url) + else: + files.append(url) + + for url in locations: + + is_local_path = os.path.exists(url) + is_file_url = url.startswith('file:') + is_find_link = url in self.find_links + + if is_local_path or is_file_url: + if is_local_path: + path = url + else: + path = url_to_path(url) + if is_find_link and os.path.isdir(path): + path = os.path.realpath(path) + for item in os.listdir(path): + sort_path(os.path.join(path, item)) + elif is_file_url and os.path.isdir(path): + urls.append(url) + elif os.path.isfile(path): + sort_path(path) + else: + urls.append(url) + + return files, urls + + def _link_sort_key(self, link_tuple): + """ + Function used to generate link sort key for link tuples. + The greater the return value, the more preferred it is. + If not finding wheels, then sorted by version only. + If finding wheels, then the sort order is by version, then: + 1. existing installs + 2. wheels ordered via Wheel.support_index_min() + 3. source archives + Note: it was considered to embed this logic into the Link + comparison operators, but then different sdist links + with the same version, would have to be considered equal + """ + parsed_version, link, _ = link_tuple + if self.use_wheel: + support_num = len(supported_tags) + if link == INSTALLED_VERSION: + pri = 1 + elif link.ext == wheel_ext: + wheel = Wheel(link.filename) # can raise InvalidWheelFilename + if not wheel.supported(): + raise UnsupportedWheel("%s is not a supported wheel for this platform. It can't be sorted." % wheel.filename) + pri = -(wheel.support_index_min()) + else: # sdist + pri = -(support_num) + return (parsed_version, pri) + else: + return parsed_version + + def _sort_versions(self, applicable_versions): + """ + Bring the latest version (and wheels) to the front, but maintain the existing ordering as secondary. + See the docstring for `_link_sort_key` for details. + This function is isolated for easier unit testing. + """ + return sorted(applicable_versions, key=self._link_sort_key, reverse=True) + + def find_requirement(self, req, upgrade): + + def mkurl_pypi_url(url): + loc = posixpath.join(url, url_name) + # For maximum compatibility with easy_install, ensure the path + # ends in a trailing slash. Although this isn't in the spec + # (and PyPI can handle it without the slash) some other index + # implementations might break if they relied on easy_install's behavior. + if not loc.endswith('/'): + loc = loc + '/' + return loc + + url_name = req.url_name + # Only check main index if index URL is given: + main_index_url = None + if self.index_urls: + # Check that we have the url_name correctly spelled: + main_index_url = Link(mkurl_pypi_url(self.index_urls[0]), trusted=True) + # This will also cache the page, so it's okay that we get it again later: + page = self._get_page(main_index_url, req) + if page is None: + url_name = self._find_url_name(Link(self.index_urls[0], trusted=True), url_name, req) or req.url_name + + if url_name is not None: + locations = [ + mkurl_pypi_url(url) + for url in self.index_urls] + self.find_links + else: + locations = list(self.find_links) + for version in req.absolute_versions: + if url_name is not None and main_index_url is not None: + locations = [ + posixpath.join(main_index_url.url, version)] + locations + + file_locations, url_locations = self._sort_locations(locations) + _flocations, _ulocations = self._sort_locations(self.dependency_links) + file_locations.extend(_flocations) + + # We trust every url that the user has given us whether it was given + # via --index-url or --find-links + locations = [Link(url, trusted=True) for url in url_locations] + + # We explicitly do not trust links that came from dependency_links + locations.extend([Link(url) for url in _ulocations]) + + logger.debug('URLs to search for versions for %s:' % req) + for location in locations: + logger.debug('* %s' % location) + + # Determine if this url used a secure transport mechanism + parsed = urlparse.urlparse(str(location)) + if parsed.scheme in INSECURE_SCHEMES: + secure_schemes = INSECURE_SCHEMES[parsed.scheme] + + if len(secure_schemes) == 1: + ctx = (location, parsed.scheme, secure_schemes[0], + parsed.netloc) + logger.warn("%s uses an insecure transport scheme (%s). " + "Consider using %s if %s has it available" % + ctx) + elif len(secure_schemes) > 1: + ctx = (location, parsed.scheme, ", ".join(secure_schemes), + parsed.netloc) + logger.warn("%s uses an insecure transport scheme (%s). " + "Consider using one of %s if %s has any of " + "them available" % ctx) + else: + ctx = (location, parsed.scheme) + logger.warn("%s uses an insecure transport scheme (%s)." % + ctx) + + found_versions = [] + found_versions.extend( + self._package_versions( + # We trust every directly linked archive in find_links + [Link(url, '-f', trusted=True) for url in self.find_links], req.name.lower())) + page_versions = [] + for page in self._get_pages(locations, req): + logger.debug('Analyzing links from page %s' % page.url) + logger.indent += 2 + try: + page_versions.extend(self._package_versions(page.links, req.name.lower())) + finally: + logger.indent -= 2 + dependency_versions = list(self._package_versions( + [Link(url) for url in self.dependency_links], req.name.lower())) + if dependency_versions: + logger.info('dependency_links found: %s' % ', '.join([link.url for parsed, link, version in dependency_versions])) + file_versions = list(self._package_versions( + [Link(url) for url in file_locations], req.name.lower())) + if not found_versions and not page_versions and not dependency_versions and not file_versions: + logger.fatal('Could not find any downloads that satisfy the requirement %s' % req) + + if self.need_warn_external: + logger.warn("Some externally hosted files were ignored (use " + "--allow-external %s to allow)." % req.name) + + if self.need_warn_unverified: + logger.warn("Some insecure and unverifiable files were ignored" + " (use --allow-unverified %s to allow)." % + req.name) + + raise DistributionNotFound('No distributions at all found for %s' % req) + installed_version = [] + if req.satisfied_by is not None: + installed_version = [(req.satisfied_by.parsed_version, INSTALLED_VERSION, req.satisfied_by.version)] + if file_versions: + file_versions.sort(reverse=True) + logger.info('Local files found: %s' % ', '.join([url_to_path(link.url) for parsed, link, version in file_versions])) + #this is an intentional priority ordering + all_versions = installed_version + file_versions + found_versions + page_versions + dependency_versions + applicable_versions = [] + for (parsed_version, link, version) in all_versions: + if version not in req.req: + logger.info("Ignoring link %s, version %s doesn't match %s" + % (link, version, ','.join([''.join(s) for s in req.req.specs]))) + continue + elif is_prerelease(version) and not (self.allow_all_prereleases or req.prereleases): + # If this version isn't the already installed one, then + # ignore it if it's a pre-release. + if link is not INSTALLED_VERSION: + logger.info("Ignoring link %s, version %s is a pre-release (use --pre to allow)." % (link, version)) + continue + applicable_versions.append((parsed_version, link, version)) + applicable_versions = self._sort_versions(applicable_versions) + existing_applicable = bool([link for parsed_version, link, version in applicable_versions if link is INSTALLED_VERSION]) + if not upgrade and existing_applicable: + if applicable_versions[0][1] is INSTALLED_VERSION: + logger.info('Existing installed version (%s) is most up-to-date and satisfies requirement' + % req.satisfied_by.version) + else: + logger.info('Existing installed version (%s) satisfies requirement (most up-to-date version is %s)' + % (req.satisfied_by.version, applicable_versions[0][2])) + return None + if not applicable_versions: + logger.fatal('Could not find a version that satisfies the requirement %s (from versions: %s)' + % (req, ', '.join([version for parsed_version, link, version in all_versions]))) + + if self.need_warn_external: + logger.warn("Some externally hosted files were ignored (use " + "--allow-external to allow).") + + if self.need_warn_unverified: + logger.warn("Some insecure and unverifiable files were ignored" + " (use --allow-unverified %s to allow)." % + req.name) + + raise DistributionNotFound('No distributions matching the version for %s' % req) + if applicable_versions[0][1] is INSTALLED_VERSION: + # We have an existing version, and its the best version + logger.info('Installed version (%s) is most up-to-date (past versions: %s)' + % (req.satisfied_by.version, ', '.join([version for parsed_version, link, version in applicable_versions[1:]]) or 'none')) + raise BestVersionAlreadyInstalled + if len(applicable_versions) > 1: + logger.info('Using version %s (newest of versions: %s)' % + (applicable_versions[0][2], ', '.join([version for parsed_version, link, version in applicable_versions]))) + + selected_version = applicable_versions[0][1] + + if (selected_version.internal is not None + and not selected_version.internal): + logger.warn("%s an externally hosted file and may be " + "unreliable" % req.name) + + if (selected_version.verifiable is not None + and not selected_version.verifiable): + logger.warn("%s is potentially insecure and " + "unverifiable." % req.name) + + if selected_version._deprecated_regex: + logger.deprecated( + "1.7", + "%s discovered using a deprecated method of parsing, " + "in the future it will no longer be discovered" % req.name + ) + + return selected_version + + + def _find_url_name(self, index_url, url_name, req): + """Finds the true URL name of a package, when the given name isn't quite correct. + This is usually used to implement case-insensitivity.""" + if not index_url.url.endswith('/'): + # Vaguely part of the PyPI API... weird but true. + ## FIXME: bad to modify this? + index_url.url += '/' + page = self._get_page(index_url, req) + if page is None: + logger.fatal('Cannot fetch index base URL %s' % index_url) + return + norm_name = normalize_name(req.url_name) + for link in page.links: + base = posixpath.basename(link.path.rstrip('/')) + if norm_name == normalize_name(base): + logger.notify('Real name of requirement %s is %s' % (url_name, base)) + return base + return None + + def _get_pages(self, locations, req): + """ + Yields (page, page_url) from the given locations, skipping + locations that have errors, and adding download/homepage links + """ + all_locations = list(locations) + seen = set() + + while all_locations: + location = all_locations.pop(0) + if location in seen: + continue + seen.add(location) + + page = self._get_page(location, req) + if page is None: + continue + + yield page + + for link in page.rel_links(): + normalized = normalize_name(req.name).lower() + + if (not normalized in self.allow_external + and not self.allow_all_external): + self.need_warn_external = True + logger.debug("Not searching %s for files because external " + "urls are disallowed." % link) + continue + + if (link.trusted is not None + and not link.trusted + and not normalized in self.allow_unverified): + logger.debug("Not searching %s for urls, it is an " + "untrusted link and cannot produce safe or " + "verifiable files." % link) + self.need_warn_unverified = True + continue + + all_locations.append(link) + + _egg_fragment_re = re.compile(r'#egg=([^&]*)') + _egg_info_re = re.compile(r'([a-z0-9_.]+)-([a-z0-9_.-]+)', re.I) + _py_version_re = re.compile(r'-py([123]\.?[0-9]?)$') + + def _sort_links(self, links): + "Returns elements of links in order, non-egg links first, egg links second, while eliminating duplicates" + eggs, no_eggs = [], [] + seen = set() + for link in links: + if link not in seen: + seen.add(link) + if link.egg_fragment: + eggs.append(link) + else: + no_eggs.append(link) + return no_eggs + eggs + + def _package_versions(self, links, search_name): + for link in self._sort_links(links): + for v in self._link_package_versions(link, search_name): + yield v + + def _known_extensions(self): + extensions = ('.tar.gz', '.tar.bz2', '.tar', '.tgz', '.zip') + if self.use_wheel: + return extensions + (wheel_ext,) + return extensions + + def _link_package_versions(self, link, search_name): + """ + Return an iterable of triples (pkg_resources_version_key, + link, python_version) that can be extracted from the given + link. + + Meant to be overridden by subclasses, not called by clients. + """ + platform = get_platform() + + version = None + if link.egg_fragment: + egg_info = link.egg_fragment + else: + egg_info, ext = link.splitext() + if not ext: + if link not in self.logged_links: + logger.debug('Skipping link %s; not a file' % link) + self.logged_links.add(link) + return [] + if egg_info.endswith('.tar'): + # Special double-extension case: + egg_info = egg_info[:-4] + ext = '.tar' + ext + if ext not in self._known_extensions(): + if link not in self.logged_links: + logger.debug('Skipping link %s; unknown archive format: %s' % (link, ext)) + self.logged_links.add(link) + return [] + if "macosx10" in link.path and ext == '.zip': + if link not in self.logged_links: + logger.debug('Skipping link %s; macosx10 one' % (link)) + self.logged_links.add(link) + return [] + if ext == wheel_ext: + try: + wheel = Wheel(link.filename) + except InvalidWheelFilename: + logger.debug('Skipping %s because the wheel filename is invalid' % link) + return [] + if wheel.name.lower() != search_name.lower(): + logger.debug('Skipping link %s; wrong project name (not %s)' % (link, search_name)) + return [] + if not wheel.supported(): + logger.debug('Skipping %s because it is not compatible with this Python' % link) + return [] + # This is a dirty hack to prevent installing Binary Wheels from + # PyPI unless it is a Windows or Mac Binary Wheel. This is + # paired with a change to PyPI disabling uploads for the + # same. Once we have a mechanism for enabling support for binary + # wheels on linux that deals with the inherent problems of + # binary distribution this can be removed. + comes_from = getattr(link, "comes_from", None) + if (( + not platform.startswith('win') + and not platform.startswith('macosx') + ) + and comes_from is not None + and urlparse.urlparse(comes_from.url).netloc.endswith( + "pypi.python.org")): + if not wheel.supported(tags=supported_tags_noarch): + logger.debug( + "Skipping %s because it is a pypi-hosted binary " + "Wheel on an unsupported platform" % link + ) + return [] + version = wheel.version + + if not version: + version = self._egg_info_matches(egg_info, search_name, link) + if version is None: + logger.debug('Skipping link %s; wrong project name (not %s)' % (link, search_name)) + return [] + + if (link.internal is not None + and not link.internal + and not normalize_name(search_name).lower() in self.allow_external + and not self.allow_all_external): + # We have a link that we are sure is external, so we should skip + # it unless we are allowing externals + logger.debug("Skipping %s because it is externally hosted." % link) + self.need_warn_external = True + return [] + + if (link.verifiable is not None + and not link.verifiable + and not (normalize_name(search_name).lower() + in self.allow_unverified)): + # We have a link that we are sure we cannot verify it's integrity, + # so we should skip it unless we are allowing unsafe installs + # for this requirement. + logger.debug("Skipping %s because it is an insecure and " + "unverifiable file." % link) + self.need_warn_unverified = True + return [] + + match = self._py_version_re.search(version) + if match: + version = version[:match.start()] + py_version = match.group(1) + if py_version != sys.version[:3]: + logger.debug('Skipping %s because Python version is incorrect' % link) + return [] + logger.debug('Found link %s, version: %s' % (link, version)) + return [(pkg_resources.parse_version(version), + link, + version)] + + def _egg_info_matches(self, egg_info, search_name, link): + match = self._egg_info_re.search(egg_info) + if not match: + logger.debug('Could not parse version from link: %s' % link) + return None + name = match.group(0).lower() + # To match the "safe" name that pkg_resources creates: + name = name.replace('_', '-') + # project name and version must be separated by a dash + look_for = search_name.lower() + "-" + if name.startswith(look_for): + return match.group(0)[len(look_for):] + else: + return None + + def _get_page(self, link, req): + return HTMLPage.get_page(link, req, + cache=self.cache, + session=self.session, + ) + + +class PageCache(object): + """Cache of HTML pages""" + + failure_limit = 3 + + def __init__(self): + self._failures = {} + self._pages = {} + self._archives = {} + + def too_many_failures(self, url): + return self._failures.get(url, 0) >= self.failure_limit + + def get_page(self, url): + return self._pages.get(url) + + def is_archive(self, url): + return self._archives.get(url, False) + + def set_is_archive(self, url, value=True): + self._archives[url] = value + + def add_page_failure(self, url, level): + self._failures[url] = self._failures.get(url, 0)+level + + def add_page(self, urls, page): + for url in urls: + self._pages[url] = page + + +class HTMLPage(object): + """Represents one page, along with its URL""" + + ## FIXME: these regexes are horrible hacks: + _homepage_re = re.compile(r'\s*home\s*page', re.I) + _download_re = re.compile(r'\s*download\s+url', re.I) + _href_re = re.compile('href=(?:"([^"]*)"|\'([^\']*)\'|([^>\\s\\n]*))', re.I|re.S) + + def __init__(self, content, url, headers=None, trusted=None): + self.content = content + self.parsed = html5lib.parse(self.content, namespaceHTMLElements=False) + self.url = url + self.headers = headers + self.trusted = trusted + + def __str__(self): + return self.url + + @classmethod + def get_page(cls, link, req, cache=None, skip_archives=True, session=None): + if session is None: + session = PipSession() + + url = link.url + url = url.split('#', 1)[0] + if cache.too_many_failures(url): + return None + + # Check for VCS schemes that do not support lookup as web pages. + from pip.vcs import VcsSupport + for scheme in VcsSupport.schemes: + if url.lower().startswith(scheme) and url[len(scheme)] in '+:': + logger.debug('Cannot look at %(scheme)s URL %(link)s' % locals()) + return None + + if cache is not None: + inst = cache.get_page(url) + if inst is not None: + return inst + try: + if skip_archives: + if cache is not None: + if cache.is_archive(url): + return None + filename = link.filename + for bad_ext in ['.tar', '.tar.gz', '.tar.bz2', '.tgz', '.zip']: + if filename.endswith(bad_ext): + content_type = cls._get_content_type(url, + session=session, + ) + if content_type.lower().startswith('text/html'): + break + else: + logger.debug('Skipping page %s because of Content-Type: %s' % (link, content_type)) + if cache is not None: + cache.set_is_archive(url) + return None + logger.debug('Getting page %s' % url) + + # Tack index.html onto file:// URLs that point to directories + (scheme, netloc, path, params, query, fragment) = urlparse.urlparse(url) + if scheme == 'file' and os.path.isdir(url2pathname(path)): + # add trailing slash if not present so urljoin doesn't trim final segment + if not url.endswith('/'): + url += '/' + url = urlparse.urljoin(url, 'index.html') + logger.debug(' file: URL is directory, getting %s' % url) + + resp = session.get(url, headers={"Accept": "text/html"}) + resp.raise_for_status() + + # The check for archives above only works if the url ends with + # something that looks like an archive. However that is not a + # requirement. For instance http://sourceforge.net/projects/docutils/files/docutils/0.8.1/docutils-0.8.1.tar.gz/download + # redirects to http://superb-dca3.dl.sourceforge.net/project/docutils/docutils/0.8.1/docutils-0.8.1.tar.gz + # Unless we issue a HEAD request on every url we cannot know + # ahead of time for sure if something is HTML or not. However we + # can check after we've downloaded it. + content_type = resp.headers.get('Content-Type', 'unknown') + if not content_type.lower().startswith("text/html"): + logger.debug('Skipping page %s because of Content-Type: %s' % + (link, content_type)) + if cache is not None: + cache.set_is_archive(url) + return None + + inst = cls(resp.text, resp.url, resp.headers, trusted=link.trusted) + except requests.HTTPError as exc: + level = 2 if exc.response.status_code == 404 else 1 + cls._handle_fail(req, link, exc, url, cache=cache, level=level) + except requests.ConnectionError as exc: + cls._handle_fail( + req, link, "connection error: %s" % exc, url, + cache=cache, + ) + except requests.Timeout: + cls._handle_fail(req, link, "timed out", url, cache=cache) + except SSLError as exc: + reason = ("There was a problem confirming the ssl certificate: " + "%s" % exc) + cls._handle_fail(req, link, reason, url, + cache=cache, + level=2, + meth=logger.notify, + ) + else: + if cache is not None: + cache.add_page([url, resp.url], inst) + return inst + + @staticmethod + def _handle_fail(req, link, reason, url, cache=None, level=1, meth=None): + if meth is None: + meth = logger.info + + meth("Could not fetch URL %s: %s", link, reason) + meth("Will skip URL %s when looking for download links for %s" % + (link.url, req)) + + if cache is not None: + cache.add_page_failure(url, level) + + @staticmethod + def _get_content_type(url, session=None): + """Get the Content-Type of the given url, using a HEAD request""" + if session is None: + session = PipSession() + + scheme, netloc, path, query, fragment = urlparse.urlsplit(url) + if not scheme in ('http', 'https', 'ftp', 'ftps'): + ## FIXME: some warning or something? + ## assertion error? + return '' + + resp = session.head(url, allow_redirects=True) + resp.raise_for_status() + + return resp.headers.get("Content-Type", "") + + @property + def api_version(self): + if not hasattr(self, "_api_version"): + _api_version = None + + metas = [x for x in self.parsed.findall(".//meta") + if x.get("name", "").lower() == "api-version"] + if metas: + try: + _api_version = int(metas[0].get("value", None)) + except (TypeError, ValueError): + _api_version = None + self._api_version = _api_version + return self._api_version + + @property + def base_url(self): + if not hasattr(self, "_base_url"): + base = self.parsed.find(".//base") + if base is not None and base.get("href"): + self._base_url = base.get("href") + else: + self._base_url = self.url + return self._base_url + + @property + def links(self): + """Yields all links in the page""" + for anchor in self.parsed.findall(".//a"): + if anchor.get("href"): + href = anchor.get("href") + url = self.clean_link(urlparse.urljoin(self.base_url, href)) + + # Determine if this link is internal. If that distinction + # doesn't make sense in this context, then we don't make + # any distinction. + internal = None + if self.api_version and self.api_version >= 2: + # Only api_versions >= 2 have a distinction between + # external and internal links + internal = bool(anchor.get("rel") + and "internal" in anchor.get("rel").split()) + + yield Link(url, self, internal=internal) + + def rel_links(self): + for url in self.explicit_rel_links(): + yield url + for url in self.scraped_rel_links(): + yield url + + def explicit_rel_links(self, rels=('homepage', 'download')): + """Yields all links with the given relations""" + rels = set(rels) + + for anchor in self.parsed.findall(".//a"): + if anchor.get("rel") and anchor.get("href"): + found_rels = set(anchor.get("rel").split()) + # Determine the intersection between what rels were found and + # what rels were being looked for + if found_rels & rels: + href = anchor.get("href") + url = self.clean_link(urlparse.urljoin(self.base_url, href)) + yield Link(url, self, trusted=False) + + def scraped_rel_links(self): + # Can we get rid of this horrible horrible method? + for regex in (self._homepage_re, self._download_re): + match = regex.search(self.content) + if not match: + continue + href_match = self._href_re.search(self.content, pos=match.end()) + if not href_match: + continue + url = href_match.group(1) or href_match.group(2) or href_match.group(3) + if not url: + continue + url = self.clean_link(urlparse.urljoin(self.base_url, url)) + yield Link(url, self, trusted=False, _deprecated_regex=True) + + _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I) + + def clean_link(self, url): + """Makes sure a link is fully encoded. That is, if a ' ' shows up in + the link, it will be rewritten to %20 (while not over-quoting + % or other characters).""" + return self._clean_re.sub( + lambda match: '%%%2x' % ord(match.group(0)), url) + + +class Link(object): + + def __init__(self, url, comes_from=None, internal=None, trusted=None, + _deprecated_regex=False): + self.url = url + self.comes_from = comes_from + self.internal = internal + self.trusted = trusted + self._deprecated_regex = _deprecated_regex + + def __str__(self): + if self.comes_from: + return '%s (from %s)' % (self.url, self.comes_from) + else: + return str(self.url) + + def __repr__(self): + return '' % self + + def __eq__(self, other): + return self.url == other.url + + def __ne__(self, other): + return self.url != other.url + + def __lt__(self, other): + return self.url < other.url + + def __le__(self, other): + return self.url <= other.url + + def __gt__(self, other): + return self.url > other.url + + def __ge__(self, other): + return self.url >= other.url + + def __hash__(self): + return hash(self.url) + + @property + def filename(self): + _, netloc, path, _, _ = urlparse.urlsplit(self.url) + name = posixpath.basename(path.rstrip('/')) or netloc + assert name, ('URL %r produced no filename' % self.url) + return name + + @property + def scheme(self): + return urlparse.urlsplit(self.url)[0] + + @property + def path(self): + return urlparse.urlsplit(self.url)[2] + + def splitext(self): + return splitext(posixpath.basename(self.path.rstrip('/'))) + + @property + def ext(self): + return self.splitext()[1] + + @property + def url_without_fragment(self): + scheme, netloc, path, query, fragment = urlparse.urlsplit(self.url) + return urlparse.urlunsplit((scheme, netloc, path, query, None)) + + _egg_fragment_re = re.compile(r'#egg=([^&]*)') + + @property + def egg_fragment(self): + match = self._egg_fragment_re.search(self.url) + if not match: + return None + return match.group(1) + + _hash_re = re.compile(r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)') + + @property + def hash(self): + match = self._hash_re.search(self.url) + if match: + return match.group(2) + return None + + @property + def hash_name(self): + match = self._hash_re.search(self.url) + if match: + return match.group(1) + return None + + @property + def show_url(self): + return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0]) + + @property + def verifiable(self): + """ + Returns True if this link can be verified after download, False if it + cannot, and None if we cannot determine. + """ + trusted = self.trusted or getattr(self.comes_from, "trusted", None) + if trusted is not None and trusted: + # This link came from a trusted source. It *may* be verifiable but + # first we need to see if this page is operating under the new + # API version. + try: + api_version = getattr(self.comes_from, "api_version", None) + api_version = int(api_version) + except (ValueError, TypeError): + api_version = None + + if api_version is None or api_version <= 1: + # This link is either trusted, or it came from a trusted, + # however it is not operating under the API version 2 so + # we can't make any claims about if it's safe or not + return + + if self.hash: + # This link came from a trusted source and it has a hash, so we + # can consider it safe. + return True + else: + # This link came from a trusted source, using the new API + # version, and it does not have a hash. It is NOT verifiable + return False + elif trusted is not None: + # This link came from an untrusted source and we cannot trust it + return False + + +# An object to represent the "link" for the installed version of a requirement. +# Using Inf as the url makes it sort higher. +INSTALLED_VERSION = Link(Inf) + + +def get_requirement_from_url(url): + """Get a requirement from the URL, if possible. This looks for #egg + in the URL""" + link = Link(url) + egg_info = link.egg_fragment + if not egg_info: + egg_info = splitext(link.filename)[0] + return package_to_requirement(egg_info) + + +def package_to_requirement(package_name): + """Translate a name like Foo-1.2 to Foo==1.3""" + match = re.search(r'^(.*?)-(dev|\d.*)', package_name) + if match: + name = match.group(1) + version = match.group(2) + else: + name = package_name + version = '' + if version: + return '%s==%s' % (name, version) + else: + return name diff --git a/lib/python3.4/site-packages/pip/locations.py b/lib/python3.4/site-packages/pip/locations.py new file mode 100644 index 0000000..1d40265 --- /dev/null +++ b/lib/python3.4/site-packages/pip/locations.py @@ -0,0 +1,172 @@ +"""Locations where we look for configs, install stuff, etc""" + +import sys +import site +import os +import tempfile +from distutils.command.install import install, SCHEME_KEYS +import getpass +from pip.backwardcompat import get_python_lib, get_path_uid, user_site +import pip.exceptions + + +DELETE_MARKER_MESSAGE = '''\ +This file is placed here by pip to indicate the source was put +here by pip. + +Once this package is successfully installed this source code will be +deleted (unless you remove this file). +''' +PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt' + +def write_delete_marker_file(directory): + """ + Write the pip delete marker file into this directory. + """ + filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME) + marker_fp = open(filepath, 'w') + marker_fp.write(DELETE_MARKER_MESSAGE) + marker_fp.close() + + +def running_under_virtualenv(): + """ + Return True if we're running inside a virtualenv, False otherwise. + + """ + if hasattr(sys, 'real_prefix'): + return True + elif sys.prefix != getattr(sys, "base_prefix", sys.prefix): + return True + + return False + + +def virtualenv_no_global(): + """ + Return True if in a venv and no system site packages. + """ + #this mirrors the logic in virtualenv.py for locating the no-global-site-packages.txt file + site_mod_dir = os.path.dirname(os.path.abspath(site.__file__)) + no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt') + if running_under_virtualenv() and os.path.isfile(no_global_file): + return True + +def __get_username(): + """ Returns the effective username of the current process. """ + if sys.platform == 'win32': + return getpass.getuser() + import pwd + return pwd.getpwuid(os.geteuid()).pw_name + +def _get_build_prefix(): + """ Returns a safe build_prefix """ + path = os.path.join(tempfile.gettempdir(), 'pip_build_%s' % + __get_username()) + if sys.platform == 'win32': + """ on windows(tested on 7) temp dirs are isolated """ + return path + try: + os.mkdir(path) + write_delete_marker_file(path) + except OSError: + file_uid = None + try: + # raises OSError for symlinks + # https://github.com/pypa/pip/pull/935#discussion_r5307003 + file_uid = get_path_uid(path) + except OSError: + file_uid = None + + if file_uid != os.geteuid(): + msg = "The temporary folder for building (%s) is either not owned by you, or is a symlink." \ + % path + print (msg) + print("pip will not work until the temporary folder is " + \ + "either deleted or is a real directory owned by your user account.") + raise pip.exceptions.InstallationError(msg) + return path + +if running_under_virtualenv(): + build_prefix = os.path.join(sys.prefix, 'build') + src_prefix = os.path.join(sys.prefix, 'src') +else: + # Note: intentionally NOT using mkdtemp + # See https://github.com/pypa/pip/issues/906 for plan to move to mkdtemp + build_prefix = _get_build_prefix() + + ## FIXME: keep src in cwd for now (it is not a temporary folder) + try: + src_prefix = os.path.join(os.getcwd(), 'src') + except OSError: + # In case the current working directory has been renamed or deleted + sys.exit("The folder you are executing pip from can no longer be found.") + +# under Mac OS X + virtualenv sys.prefix is not properly resolved +# it is something like /path/to/python/bin/.. +# Note: using realpath due to tmp dirs on OSX being symlinks +build_prefix = os.path.abspath(os.path.realpath(build_prefix)) +src_prefix = os.path.abspath(src_prefix) + +# FIXME doesn't account for venv linked to global site-packages + +site_packages = get_python_lib() +user_dir = os.path.expanduser('~') +if sys.platform == 'win32': + bin_py = os.path.join(sys.prefix, 'Scripts') + bin_user = os.path.join(user_site, 'Scripts') if user_site else None + # buildout uses 'bin' on Windows too? + if not os.path.exists(bin_py): + bin_py = os.path.join(sys.prefix, 'bin') + bin_user = os.path.join(user_site, 'bin') if user_site else None + default_storage_dir = os.path.join(user_dir, 'pip') + default_config_file = os.path.join(default_storage_dir, 'pip.ini') + default_log_file = os.path.join(default_storage_dir, 'pip.log') +else: + bin_py = os.path.join(sys.prefix, 'bin') + bin_user = os.path.join(user_site, 'bin') if user_site else None + default_storage_dir = os.path.join(user_dir, '.pip') + default_config_file = os.path.join(default_storage_dir, 'pip.conf') + default_log_file = os.path.join(default_storage_dir, 'pip.log') + + # Forcing to use /usr/local/bin for standard Mac OS X framework installs + # Also log to ~/Library/Logs/ for use with the Console.app log viewer + if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/': + bin_py = '/usr/local/bin' + default_log_file = os.path.join(user_dir, 'Library/Logs/pip.log') + + +def distutils_scheme(dist_name, user=False, home=None, root=None): + """ + Return a distutils install scheme + """ + from distutils.dist import Distribution + + scheme = {} + d = Distribution({'name': dist_name}) + d.parse_config_files() + i = d.get_command_obj('install', create=True) + # NOTE: setting user or home has the side-effect of creating the home dir or + # user base for installations during finalize_options() + # ideally, we'd prefer a scheme class that has no side-effects. + i.user = user or i.user + i.home = home or i.home + i.root = root or i.root + i.finalize_options() + for key in SCHEME_KEYS: + scheme[key] = getattr(i, 'install_'+key) + + if running_under_virtualenv(): + scheme['headers'] = os.path.join(sys.prefix, + 'include', + 'site', + 'python' + sys.version[:3], + dist_name) + + if root is not None: + scheme["headers"] = os.path.join( + root, + os.path.abspath(scheme["headers"])[1:], + ) + + return scheme diff --git a/lib/python3.4/site-packages/pip/log.py b/lib/python3.4/site-packages/pip/log.py new file mode 100644 index 0000000..a17ae4b --- /dev/null +++ b/lib/python3.4/site-packages/pip/log.py @@ -0,0 +1,276 @@ +"""Logging +""" + +import sys +import os +import logging + +from pip import backwardcompat +import colorama, pkg_resources + + +def _color_wrap(*colors): + def wrapped(inp): + return "".join(list(colors) + [inp, colorama.Style.RESET_ALL]) + return wrapped + + +def should_color(consumer, environ, std=(sys.stdout, sys.stderr)): + real_consumer = (consumer if not isinstance(consumer, colorama.AnsiToWin32) + else consumer.wrapped) + + # If consumer isn't stdout or stderr we shouldn't colorize it + if real_consumer not in std: + return False + + # If consumer is a tty we should color it + if hasattr(real_consumer, "isatty") and real_consumer.isatty(): + return True + + # If we have an ASNI term we should color it + if environ.get("TERM") == "ANSI": + return True + + # If anything else we should not color it + return False + + +def should_warn(current_version, removal_version): + # Our Significant digits on versions is 2, so remove everything but the + # first two places. + current_version = ".".join(current_version.split(".")[:2]) + removal_version = ".".join(removal_version.split(".")[:2]) + + # Our warning threshold is one minor version before removal, so we + # decrement the minor version by one + major, minor = removal_version.split(".") + minor = str(int(minor) - 1) + warn_version = ".".join([major, minor]) + + # Test if our current_version should be a warn + return (pkg_resources.parse_version(current_version) + < pkg_resources.parse_version(warn_version)) + + +class Logger(object): + """ + Logging object for use in command-line script. Allows ranges of + levels, to avoid some redundancy of displayed information. + """ + VERBOSE_DEBUG = logging.DEBUG - 1 + DEBUG = logging.DEBUG + INFO = logging.INFO + NOTIFY = (logging.INFO + logging.WARN) / 2 + WARN = WARNING = logging.WARN + ERROR = logging.ERROR + FATAL = logging.FATAL + + LEVELS = [VERBOSE_DEBUG, DEBUG, INFO, NOTIFY, WARN, ERROR, FATAL] + + COLORS = { + WARN: _color_wrap(colorama.Fore.YELLOW), + ERROR: _color_wrap(colorama.Fore.RED), + FATAL: _color_wrap(colorama.Fore.RED), + } + + def __init__(self): + self.consumers = [] + self.indent = 0 + self.explicit_levels = False + self.in_progress = None + self.in_progress_hanging = False + + def add_consumers(self, *consumers): + if sys.platform.startswith("win"): + for level, consumer in consumers: + if hasattr(consumer, "write"): + self.consumers.append( + (level, colorama.AnsiToWin32(consumer)), + ) + else: + self.consumers.append((level, consumer)) + else: + self.consumers.extend(consumers) + + def debug(self, msg, *args, **kw): + self.log(self.DEBUG, msg, *args, **kw) + + def info(self, msg, *args, **kw): + self.log(self.INFO, msg, *args, **kw) + + def notify(self, msg, *args, **kw): + self.log(self.NOTIFY, msg, *args, **kw) + + def warn(self, msg, *args, **kw): + self.log(self.WARN, msg, *args, **kw) + + def error(self, msg, *args, **kw): + self.log(self.ERROR, msg, *args, **kw) + + def fatal(self, msg, *args, **kw): + self.log(self.FATAL, msg, *args, **kw) + + def deprecated(self, removal_version, msg, *args, **kwargs): + """ + Logs deprecation message which is log level WARN if the + ``removal_version`` is > 1 minor release away and log level ERROR + otherwise. + + removal_version should be the version that the deprecated feature is + expected to be removed in, so something that will not exist in + version 1.7, but will in 1.6 would have a removal_version of 1.7. + """ + from pip import __version__ + + if should_warn(__version__, removal_version): + self.warn(msg, *args, **kwargs) + else: + self.error(msg, *args, **kwargs) + + def log(self, level, msg, *args, **kw): + if args: + if kw: + raise TypeError( + "You may give positional or keyword arguments, not both") + args = args or kw + + # render + if args: + rendered = msg % args + else: + rendered = msg + rendered = ' ' * self.indent + rendered + if self.explicit_levels: + ## FIXME: should this be a name, not a level number? + rendered = '%02i %s' % (level, rendered) + + for consumer_level, consumer in self.consumers: + if self.level_matches(level, consumer_level): + if (self.in_progress_hanging + and consumer in (sys.stdout, sys.stderr)): + self.in_progress_hanging = False + sys.stdout.write('\n') + sys.stdout.flush() + if hasattr(consumer, 'write'): + write_content = rendered + '\n' + if should_color(consumer, os.environ): + # We are printing to stdout or stderr and it supports + # colors so render our text colored + colorizer = self.COLORS.get(level, lambda x: x) + write_content = colorizer(write_content) + + consumer.write(write_content) + if hasattr(consumer, 'flush'): + consumer.flush() + else: + consumer(rendered) + + def _show_progress(self): + """Should we display download progress?""" + return (self.stdout_level_matches(self.NOTIFY) and sys.stdout.isatty()) + + def start_progress(self, msg): + assert not self.in_progress, ( + "Tried to start_progress(%r) while in_progress %r" + % (msg, self.in_progress)) + if self._show_progress(): + sys.stdout.write(' ' * self.indent + msg) + sys.stdout.flush() + self.in_progress_hanging = True + else: + self.in_progress_hanging = False + self.in_progress = msg + self.last_message = None + + def end_progress(self, msg='done.'): + assert self.in_progress, ( + "Tried to end_progress without start_progress") + if self._show_progress(): + if not self.in_progress_hanging: + # Some message has been printed out since start_progress + sys.stdout.write('...' + self.in_progress + msg + '\n') + sys.stdout.flush() + else: + # These erase any messages shown with show_progress (besides .'s) + logger.show_progress('') + logger.show_progress('') + sys.stdout.write(msg + '\n') + sys.stdout.flush() + self.in_progress = None + self.in_progress_hanging = False + + def show_progress(self, message=None): + """If we are in a progress scope, and no log messages have been + shown, write out another '.'""" + if self.in_progress_hanging: + if message is None: + sys.stdout.write('.') + sys.stdout.flush() + else: + if self.last_message: + padding = ' ' * max(0, len(self.last_message) - len(message)) + else: + padding = '' + sys.stdout.write('\r%s%s%s%s' % + (' ' * self.indent, self.in_progress, message, padding)) + sys.stdout.flush() + self.last_message = message + + def stdout_level_matches(self, level): + """Returns true if a message at this level will go to stdout""" + return self.level_matches(level, self._stdout_level()) + + def _stdout_level(self): + """Returns the level that stdout runs at""" + for level, consumer in self.consumers: + if consumer is sys.stdout: + return level + return self.FATAL + + def level_matches(self, level, consumer_level): + """ + >>> l = Logger() + >>> l.level_matches(3, 4) + False + >>> l.level_matches(3, 2) + True + >>> l.level_matches(slice(None, 3), 3) + False + >>> l.level_matches(slice(None, 3), 2) + True + >>> l.level_matches(slice(1, 3), 1) + True + >>> l.level_matches(slice(2, 3), 1) + False + """ + if isinstance(level, slice): + start, stop = level.start, level.stop + if start is not None and start > consumer_level: + return False + if stop is not None or stop <= consumer_level: + return False + return True + else: + return level >= consumer_level + + @classmethod + def level_for_integer(cls, level): + levels = cls.LEVELS + if level < 0: + return levels[0] + if level >= len(levels): + return levels[-1] + return levels[level] + + def move_stdout_to_stderr(self): + to_remove = [] + to_add = [] + for consumer_level, consumer in self.consumers: + if consumer == sys.stdout: + to_remove.append((consumer_level, consumer)) + to_add.append((consumer_level, sys.stderr)) + for item in to_remove: + self.consumers.remove(item) + self.consumers.extend(to_add) + +logger = Logger() diff --git a/lib/python3.4/site-packages/pip/pep425tags.py b/lib/python3.4/site-packages/pip/pep425tags.py new file mode 100644 index 0000000..95d3753 --- /dev/null +++ b/lib/python3.4/site-packages/pip/pep425tags.py @@ -0,0 +1,102 @@ +"""Generate and work with PEP 425 Compatibility Tags.""" + +import sys +import warnings + +try: + import sysconfig +except ImportError: # pragma nocover + # Python < 2.7 + import distutils.sysconfig as sysconfig +import distutils.util + + +def get_abbr_impl(): + """Return abbreviated implementation name.""" + if hasattr(sys, 'pypy_version_info'): + pyimpl = 'pp' + elif sys.platform.startswith('java'): + pyimpl = 'jy' + elif sys.platform == 'cli': + pyimpl = 'ip' + else: + pyimpl = 'cp' + return pyimpl + + +def get_impl_ver(): + """Return implementation version.""" + return ''.join(map(str, sys.version_info[:2])) + + +def get_platform(): + """Return our platform name 'win32', 'linux_x86_64'""" + # XXX remove distutils dependency + return distutils.util.get_platform().replace('.', '_').replace('-', '_') + + +def get_supported(versions=None, noarch=False): + """Return a list of supported tags for each version specified in + `versions`. + + :param versions: a list of string versions, of the form ["33", "32"], + or None. The first version will be assumed to support our ABI. + """ + supported = [] + + # Versions must be given with respect to the preference + if versions is None: + versions = [] + major = sys.version_info[0] + # Support all previous minor Python versions. + for minor in range(sys.version_info[1], -1, -1): + versions.append(''.join(map(str, (major, minor)))) + + impl = get_abbr_impl() + + abis = [] + + try: + soabi = sysconfig.get_config_var('SOABI') + except IOError as e: # Issue #1074 + warnings.warn("{0}".format(e), RuntimeWarning) + soabi = None + + if soabi and soabi.startswith('cpython-'): + abis[0:0] = ['cp' + soabi.split('-', 1)[-1]] + + abi3s = set() + import imp + for suffix in imp.get_suffixes(): + if suffix[0].startswith('.abi'): + abi3s.add(suffix[0].split('.', 2)[1]) + + abis.extend(sorted(list(abi3s))) + + abis.append('none') + + if not noarch: + arch = get_platform() + + # Current version, current API (built specifically for our Python): + for abi in abis: + supported.append(('%s%s' % (impl, versions[0]), abi, arch)) + + # No abi / arch, but requires our implementation: + for i, version in enumerate(versions): + supported.append(('%s%s' % (impl, version), 'none', 'any')) + if i == 0: + # Tagged specifically as being cross-version compatible + # (with just the major version specified) + supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any')) + + # No abi / arch, generic Python + for i, version in enumerate(versions): + supported.append(('py%s' % (version,), 'none', 'any')) + if i == 0: + supported.append(('py%s' % (version[0]), 'none', 'any')) + + return supported + +supported_tags = get_supported() +supported_tags_noarch = get_supported(noarch=True) diff --git a/lib/python3.4/site-packages/pip/req.py b/lib/python3.4/site-packages/pip/req.py new file mode 100644 index 0000000..3b72a16 --- /dev/null +++ b/lib/python3.4/site-packages/pip/req.py @@ -0,0 +1,1940 @@ +from email.parser import FeedParser +import os +import imp +import locale +import re +import sys +import shutil +import tempfile +import textwrap +import zipfile + +from distutils.util import change_root +from pip.locations import (bin_py, running_under_virtualenv,PIP_DELETE_MARKER_FILENAME, + write_delete_marker_file, bin_user) +from pip.exceptions import (InstallationError, UninstallationError, UnsupportedWheel, + BestVersionAlreadyInstalled, InvalidWheelFilename, + DistributionNotFound, PreviousBuildDirError) +from pip.vcs import vcs +from pip.log import logger +from pip.locations import running_under_virtualenv +from pip.util import (display_path, rmtree, ask, ask_path_exists, backup_dir, + is_installable_dir, is_local, dist_is_local, + dist_in_usersite, dist_in_site_packages, renames, + normalize_path, egg_link_path, make_path_relative, + call_subprocess, is_prerelease, normalize_name) +from pip.backwardcompat import (urlparse, urllib, uses_pycache, + ConfigParser, string_types, HTTPError, + get_python_version, b) +from pip.index import Link +from pip.locations import build_prefix +from pip.download import (PipSession, get_file_content, is_url, url_to_path, + path_to_url, is_archive_file, + unpack_vcs_link, is_vcs_url, is_file_url, + unpack_file_url, unpack_http_url) +import pip.wheel +from pip.wheel import move_wheel_files, Wheel, wheel_ext +import pkg_resources, six + + +def read_text_file(filename): + """Return the contents of *filename*. + + Try to decode the file contents with utf-8, the preffered system encoding + (e.g., cp1252 on some Windows machines) and latin1, in that order. Decoding + a byte string with latin1 will never raise an error. In the worst case, the + returned string will contain some garbage characters. + + """ + with open(filename, 'rb') as fp: + data = fp.read() + + encodings = ['utf-8', locale.getpreferredencoding(False), 'latin1'] + for enc in encodings: + try: + data = data.decode(enc) + except UnicodeDecodeError: + continue + break + + assert type(data) != bytes # Latin1 should have worked. + return data + + +class InstallRequirement(object): + + def __init__(self, req, comes_from, source_dir=None, editable=False, + url=None, as_egg=False, update=True, prereleases=None, + editable_options=None, from_bundle=False, pycompile=True): + self.extras = () + if isinstance(req, string_types): + req = pkg_resources.Requirement.parse(req) + self.extras = req.extras + self.req = req + self.comes_from = comes_from + self.source_dir = source_dir + self.editable = editable + + if editable_options is None: + editable_options = {} + + self.editable_options = editable_options + self.url = url + self.as_egg = as_egg + self._egg_info_path = None + # This holds the pkg_resources.Distribution object if this requirement + # is already available: + self.satisfied_by = None + # This hold the pkg_resources.Distribution object if this requirement + # conflicts with another installed distribution: + self.conflicts_with = None + self._temp_build_dir = None + self._is_bundle = None + # True if the editable should be updated: + self.update = update + # Set to True after successful installation + self.install_succeeded = None + # UninstallPathSet of uninstalled distribution (for possible rollback) + self.uninstalled = None + self.use_user_site = False + self.target_dir = None + self.from_bundle = from_bundle + + self.pycompile = pycompile + + # True if pre-releases are acceptable + if prereleases: + self.prereleases = True + elif self.req is not None: + self.prereleases = any([is_prerelease(x[1]) and x[0] != "!=" for x in self.req.specs]) + else: + self.prereleases = False + + @classmethod + def from_editable(cls, editable_req, comes_from=None, default_vcs=None): + name, url, extras_override = parse_editable(editable_req, default_vcs) + if url.startswith('file:'): + source_dir = url_to_path(url) + else: + source_dir = None + + res = cls(name, comes_from, source_dir=source_dir, + editable=True, + url=url, + editable_options=extras_override, + prereleases=True) + + if extras_override is not None: + res.extras = extras_override + + return res + + @classmethod + def from_line(cls, name, comes_from=None, prereleases=None): + """Creates an InstallRequirement from a name, which might be a + requirement, directory containing 'setup.py', filename, or URL. + """ + url = None + name = name.strip() + req = None + path = os.path.normpath(os.path.abspath(name)) + link = None + + if is_url(name): + link = Link(name) + elif os.path.isdir(path) and (os.path.sep in name or name.startswith('.')): + if not is_installable_dir(path): + raise InstallationError("Directory %r is not installable. File 'setup.py' not found." % name) + link = Link(path_to_url(name)) + elif is_archive_file(path): + if not os.path.isfile(path): + logger.warn('Requirement %r looks like a filename, but the file does not exist', name) + link = Link(path_to_url(name)) + + # If the line has an egg= definition, but isn't editable, pull the requirement out. + # Otherwise, assume the name is the req for the non URL/path/archive case. + if link and req is None: + url = link.url_without_fragment + req = link.egg_fragment #when fragment is None, this will become an 'unnamed' requirement + + # Handle relative file URLs + if link.scheme == 'file' and re.search(r'\.\./', url): + url = path_to_url(os.path.normpath(os.path.abspath(link.path))) + + # fail early for invalid or unsupported wheels + if link.ext == wheel_ext: + wheel = Wheel(link.filename) # can raise InvalidWheelFilename + if not wheel.supported(): + raise UnsupportedWheel("%s is not a supported wheel on this platform." % wheel.filename) + + else: + req = name + + return cls(req, comes_from, url=url, prereleases=prereleases) + + def __str__(self): + if self.req: + s = str(self.req) + if self.url: + s += ' from %s' % self.url + else: + s = self.url + if self.satisfied_by is not None: + s += ' in %s' % display_path(self.satisfied_by.location) + if self.comes_from: + if isinstance(self.comes_from, string_types): + comes_from = self.comes_from + else: + comes_from = self.comes_from.from_path() + if comes_from: + s += ' (from %s)' % comes_from + return s + + def from_path(self): + if self.req is None: + return None + s = str(self.req) + if self.comes_from: + if isinstance(self.comes_from, string_types): + comes_from = self.comes_from + else: + comes_from = self.comes_from.from_path() + if comes_from: + s += '->' + comes_from + return s + + def build_location(self, build_dir, unpack=True): + if self._temp_build_dir is not None: + return self._temp_build_dir + if self.req is None: + self._temp_build_dir = tempfile.mkdtemp('-build', 'pip-') + self._ideal_build_dir = build_dir + return self._temp_build_dir + if self.editable: + name = self.name.lower() + else: + name = self.name + # FIXME: Is there a better place to create the build_dir? (hg and bzr need this) + if not os.path.exists(build_dir): + _make_build_dir(build_dir) + return os.path.join(build_dir, name) + + def correct_build_location(self): + """If the build location was a temporary directory, this will move it + to a new more permanent location""" + if self.source_dir is not None: + return + assert self.req is not None + assert self._temp_build_dir + old_location = self._temp_build_dir + new_build_dir = self._ideal_build_dir + del self._ideal_build_dir + if self.editable: + name = self.name.lower() + else: + name = self.name + new_location = os.path.join(new_build_dir, name) + if not os.path.exists(new_build_dir): + logger.debug('Creating directory %s' % new_build_dir) + _make_build_dir(new_build_dir) + if os.path.exists(new_location): + raise InstallationError( + 'A package already exists in %s; please remove it to continue' + % display_path(new_location)) + logger.debug('Moving package %s from %s to new location %s' + % (self, display_path(old_location), display_path(new_location))) + shutil.move(old_location, new_location) + self._temp_build_dir = new_location + self.source_dir = new_location + self._egg_info_path = None + + @property + def name(self): + if self.req is None: + return None + return self.req.project_name + + @property + def url_name(self): + if self.req is None: + return None + return urllib.quote(self.req.unsafe_name) + + @property + def setup_py(self): + try: + import setuptools + except ImportError: + # Setuptools is not available + raise InstallationError( + "setuptools must be installed to install from a source " + "distribution" + ) + + setup_file = 'setup.py' + + if self.editable_options and 'subdirectory' in self.editable_options: + setup_py = os.path.join(self.source_dir, + self.editable_options['subdirectory'], + setup_file) + + else: + setup_py = os.path.join(self.source_dir, setup_file) + + # Python2 __file__ should not be unicode + if six.PY2 and isinstance(setup_py, six.text_type): + setup_py = setup_py.encode(sys.getfilesystemencoding()) + + return setup_py + + def run_egg_info(self, force_root_egg_info=False): + assert self.source_dir + if self.name: + logger.notify('Running setup.py (path:%s) egg_info for package %s' % (self.setup_py, self.name)) + else: + logger.notify('Running setup.py (path:%s) egg_info for package from %s' % (self.setup_py, self.url)) + logger.indent += 2 + try: + + # if it's distribute>=0.7, it won't contain an importable + # setuptools, and having an egg-info dir blocks the ability of + # setup.py to find setuptools plugins, so delete the egg-info dir if + # no setuptools. it will get recreated by the run of egg_info + # NOTE: this self.name check only works when installing from a specifier + # (not archive path/urls) + # TODO: take this out later + if self.name == 'distribute' and not os.path.isdir(os.path.join(self.source_dir, 'setuptools')): + rmtree(os.path.join(self.source_dir, 'distribute.egg-info')) + + script = self._run_setup_py + script = script.replace('__SETUP_PY__', repr(self.setup_py)) + script = script.replace('__PKG_NAME__', repr(self.name)) + egg_info_cmd = [sys.executable, '-c', script, 'egg_info'] + # We can't put the .egg-info files at the root, because then the source code will be mistaken + # for an installed egg, causing problems + if self.editable or force_root_egg_info: + egg_base_option = [] + else: + egg_info_dir = os.path.join(self.source_dir, 'pip-egg-info') + if not os.path.exists(egg_info_dir): + os.makedirs(egg_info_dir) + egg_base_option = ['--egg-base', 'pip-egg-info'] + call_subprocess( + egg_info_cmd + egg_base_option, + cwd=self.source_dir, filter_stdout=self._filter_install, show_stdout=False, + command_level=logger.VERBOSE_DEBUG, + command_desc='python setup.py egg_info') + finally: + logger.indent -= 2 + if not self.req: + self.req = pkg_resources.Requirement.parse( + "%(Name)s==%(Version)s" % self.pkg_info()) + self.correct_build_location() + + ## FIXME: This is a lame hack, entirely for PasteScript which has + ## a self-provided entry point that causes this awkwardness + _run_setup_py = """ +__file__ = __SETUP_PY__ +from setuptools.command import egg_info +import pkg_resources +import os +import tokenize +def replacement_run(self): + self.mkpath(self.egg_info) + installer = self.distribution.fetch_build_egg + for ep in pkg_resources.iter_entry_points('egg_info.writers'): + # require=False is the change we're making: + writer = ep.load(require=False) + if writer: + writer(self, ep.name, os.path.join(self.egg_info,ep.name)) + self.find_sources() +egg_info.egg_info.run = replacement_run +exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n', '\\n'), __file__, 'exec')) +""" + + def egg_info_data(self, filename): + if self.satisfied_by is not None: + if not self.satisfied_by.has_metadata(filename): + return None + return self.satisfied_by.get_metadata(filename) + assert self.source_dir + filename = self.egg_info_path(filename) + if not os.path.exists(filename): + return None + data = read_text_file(filename) + return data + + def egg_info_path(self, filename): + if self._egg_info_path is None: + if self.editable: + base = self.source_dir + else: + base = os.path.join(self.source_dir, 'pip-egg-info') + filenames = os.listdir(base) + if self.editable: + filenames = [] + for root, dirs, files in os.walk(base): + for dir in vcs.dirnames: + if dir in dirs: + dirs.remove(dir) + # Iterate over a copy of ``dirs``, since mutating + # a list while iterating over it can cause trouble. + # (See https://github.com/pypa/pip/pull/462.) + for dir in list(dirs): + # Don't search in anything that looks like a virtualenv environment + if (os.path.exists(os.path.join(root, dir, 'bin', 'python')) + or os.path.exists(os.path.join(root, dir, 'Scripts', 'Python.exe'))): + dirs.remove(dir) + # Also don't search through tests + if dir == 'test' or dir == 'tests': + dirs.remove(dir) + filenames.extend([os.path.join(root, dir) + for dir in dirs]) + filenames = [f for f in filenames if f.endswith('.egg-info')] + + if not filenames: + raise InstallationError('No files/directories in %s (from %s)' % (base, filename)) + assert filenames, "No files/directories in %s (from %s)" % (base, filename) + + # if we have more than one match, we pick the toplevel one. This can + # easily be the case if there is a dist folder which contains an + # extracted tarball for testing purposes. + if len(filenames) > 1: + filenames.sort(key=lambda x: x.count(os.path.sep) + + (os.path.altsep and + x.count(os.path.altsep) or 0)) + self._egg_info_path = os.path.join(base, filenames[0]) + return os.path.join(self._egg_info_path, filename) + + def egg_info_lines(self, filename): + data = self.egg_info_data(filename) + if not data: + return [] + result = [] + for line in data.splitlines(): + line = line.strip() + if not line or line.startswith('#'): + continue + result.append(line) + return result + + def pkg_info(self): + p = FeedParser() + data = self.egg_info_data('PKG-INFO') + if not data: + logger.warn('No PKG-INFO file found in %s' % display_path(self.egg_info_path('PKG-INFO'))) + p.feed(data or '') + return p.close() + + @property + def dependency_links(self): + return self.egg_info_lines('dependency_links.txt') + + _requirements_section_re = re.compile(r'\[(.*?)\]') + + def requirements(self, extras=()): + in_extra = None + for line in self.egg_info_lines('requires.txt'): + match = self._requirements_section_re.match(line.lower()) + if match: + in_extra = match.group(1) + continue + if in_extra and in_extra not in extras: + logger.debug('skipping extra %s' % in_extra) + # Skip requirement for an extra we aren't requiring + continue + yield line + + @property + def absolute_versions(self): + for qualifier, version in self.req.specs: + if qualifier == '==': + yield version + + @property + def installed_version(self): + return self.pkg_info()['version'] + + def assert_source_matches_version(self): + assert self.source_dir + version = self.installed_version + if version not in self.req: + logger.warn('Requested %s, but installing version %s' % (self, self.installed_version)) + else: + logger.debug('Source in %s has version %s, which satisfies requirement %s' + % (display_path(self.source_dir), version, self)) + + def update_editable(self, obtain=True): + if not self.url: + logger.info("Cannot update repository at %s; repository location is unknown" % self.source_dir) + return + assert self.editable + assert self.source_dir + if self.url.startswith('file:'): + # Static paths don't get updated + return + assert '+' in self.url, "bad url: %r" % self.url + if not self.update: + return + vc_type, url = self.url.split('+', 1) + backend = vcs.get_backend(vc_type) + if backend: + vcs_backend = backend(self.url) + if obtain: + vcs_backend.obtain(self.source_dir) + else: + vcs_backend.export(self.source_dir) + else: + assert 0, ( + 'Unexpected version control type (in %s): %s' + % (self.url, vc_type)) + + def uninstall(self, auto_confirm=False): + """ + Uninstall the distribution currently satisfying this requirement. + + Prompts before removing or modifying files unless + ``auto_confirm`` is True. + + Refuses to delete or modify files outside of ``sys.prefix`` - + thus uninstallation within a virtual environment can only + modify that virtual environment, even if the virtualenv is + linked to global site-packages. + + """ + if not self.check_if_exists(): + raise UninstallationError("Cannot uninstall requirement %s, not installed" % (self.name,)) + dist = self.satisfied_by or self.conflicts_with + + paths_to_remove = UninstallPathSet(dist) + + pip_egg_info_path = os.path.join(dist.location, + dist.egg_name()) + '.egg-info' + dist_info_path = os.path.join(dist.location, + '-'.join(dist.egg_name().split('-')[:2]) + ) + '.dist-info' + # workaround for http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=618367 + debian_egg_info_path = pip_egg_info_path.replace( + '-py%s' % pkg_resources.PY_MAJOR, '') + easy_install_egg = dist.egg_name() + '.egg' + develop_egg_link = egg_link_path(dist) + + pip_egg_info_exists = os.path.exists(pip_egg_info_path) + debian_egg_info_exists = os.path.exists(debian_egg_info_path) + dist_info_exists = os.path.exists(dist_info_path) + if pip_egg_info_exists or debian_egg_info_exists: + # package installed by pip + if pip_egg_info_exists: + egg_info_path = pip_egg_info_path + else: + egg_info_path = debian_egg_info_path + paths_to_remove.add(egg_info_path) + if dist.has_metadata('installed-files.txt'): + for installed_file in dist.get_metadata('installed-files.txt').splitlines(): + path = os.path.normpath(os.path.join(egg_info_path, installed_file)) + paths_to_remove.add(path) + #FIXME: need a test for this elif block + #occurs with --single-version-externally-managed/--record outside of pip + elif dist.has_metadata('top_level.txt'): + if dist.has_metadata('namespace_packages.txt'): + namespaces = dist.get_metadata('namespace_packages.txt') + else: + namespaces = [] + for top_level_pkg in [p for p + in dist.get_metadata('top_level.txt').splitlines() + if p and p not in namespaces]: + path = os.path.join(dist.location, top_level_pkg) + paths_to_remove.add(path) + paths_to_remove.add(path + '.py') + paths_to_remove.add(path + '.pyc') + + elif dist.location.endswith(easy_install_egg): + # package installed by easy_install + paths_to_remove.add(dist.location) + easy_install_pth = os.path.join(os.path.dirname(dist.location), + 'easy-install.pth') + paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg) + + elif develop_egg_link: + # develop egg + fh = open(develop_egg_link, 'r') + link_pointer = os.path.normcase(fh.readline().strip()) + fh.close() + assert (link_pointer == dist.location), 'Egg-link %s does not match installed location of %s (at %s)' % (link_pointer, self.name, dist.location) + paths_to_remove.add(develop_egg_link) + easy_install_pth = os.path.join(os.path.dirname(develop_egg_link), + 'easy-install.pth') + paths_to_remove.add_pth(easy_install_pth, dist.location) + elif dist_info_exists: + for path in pip.wheel.uninstallation_paths(dist): + paths_to_remove.add(path) + + # find distutils scripts= scripts + if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'): + for script in dist.metadata_listdir('scripts'): + if dist_in_usersite(dist): + bin_dir = bin_user + else: + bin_dir = bin_py + paths_to_remove.add(os.path.join(bin_dir, script)) + if sys.platform == 'win32': + paths_to_remove.add(os.path.join(bin_dir, script) + '.bat') + + # find console_scripts + if dist.has_metadata('entry_points.txt'): + config = ConfigParser.SafeConfigParser() + config.readfp(FakeFile(dist.get_metadata_lines('entry_points.txt'))) + if config.has_section('console_scripts'): + for name, value in config.items('console_scripts'): + if dist_in_usersite(dist): + bin_dir = bin_user + else: + bin_dir = bin_py + paths_to_remove.add(os.path.join(bin_dir, name)) + if sys.platform == 'win32': + paths_to_remove.add(os.path.join(bin_dir, name) + '.exe') + paths_to_remove.add(os.path.join(bin_dir, name) + '.exe.manifest') + paths_to_remove.add(os.path.join(bin_dir, name) + '-script.py') + + paths_to_remove.remove(auto_confirm) + self.uninstalled = paths_to_remove + + def rollback_uninstall(self): + if self.uninstalled: + self.uninstalled.rollback() + else: + logger.error("Can't rollback %s, nothing uninstalled." + % (self.project_name,)) + + def commit_uninstall(self): + if self.uninstalled: + self.uninstalled.commit() + else: + logger.error("Can't commit %s, nothing uninstalled." + % (self.project_name,)) + + def archive(self, build_dir): + assert self.source_dir + create_archive = True + archive_name = '%s-%s.zip' % (self.name, self.installed_version) + archive_path = os.path.join(build_dir, archive_name) + if os.path.exists(archive_path): + response = ask_path_exists( + 'The file %s exists. (i)gnore, (w)ipe, (b)ackup ' % + display_path(archive_path), ('i', 'w', 'b')) + if response == 'i': + create_archive = False + elif response == 'w': + logger.warn('Deleting %s' % display_path(archive_path)) + os.remove(archive_path) + elif response == 'b': + dest_file = backup_dir(archive_path) + logger.warn('Backing up %s to %s' + % (display_path(archive_path), display_path(dest_file))) + shutil.move(archive_path, dest_file) + if create_archive: + zip = zipfile.ZipFile(archive_path, 'w', zipfile.ZIP_DEFLATED) + dir = os.path.normcase(os.path.abspath(self.source_dir)) + for dirpath, dirnames, filenames in os.walk(dir): + if 'pip-egg-info' in dirnames: + dirnames.remove('pip-egg-info') + for dirname in dirnames: + dirname = os.path.join(dirpath, dirname) + name = self._clean_zip_name(dirname, dir) + zipdir = zipfile.ZipInfo(self.name + '/' + name + '/') + zipdir.external_attr = 0x1ED << 16 # 0o755 + zip.writestr(zipdir, '') + for filename in filenames: + if filename == PIP_DELETE_MARKER_FILENAME: + continue + filename = os.path.join(dirpath, filename) + name = self._clean_zip_name(filename, dir) + zip.write(filename, self.name + '/' + name) + zip.close() + logger.indent -= 2 + logger.notify('Saved %s' % display_path(archive_path)) + + def _clean_zip_name(self, name, prefix): + assert name.startswith(prefix+os.path.sep), ( + "name %r doesn't start with prefix %r" % (name, prefix)) + name = name[len(prefix)+1:] + name = name.replace(os.path.sep, '/') + return name + + def install(self, install_options, global_options=(), root=None): + if self.editable: + self.install_editable(install_options, global_options) + return + if self.is_wheel: + version = pip.wheel.wheel_version(self.source_dir) + pip.wheel.check_compatibility(version, self.name) + + self.move_wheel_files(self.source_dir, root=root) + self.install_succeeded = True + return + + temp_location = tempfile.mkdtemp('-record', 'pip-') + record_filename = os.path.join(temp_location, 'install-record.txt') + try: + install_args = [sys.executable] + install_args.append('-c') + install_args.append( + "import setuptools, tokenize;__file__=%r;"\ + "exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n', '\\n'), __file__, 'exec'))" % self.setup_py) + install_args += list(global_options) + ['install','--record', record_filename] + + if not self.as_egg: + install_args += ['--single-version-externally-managed'] + + if root is not None: + install_args += ['--root', root] + + if self.pycompile: + install_args += ["--compile"] + else: + install_args += ["--no-compile"] + + if running_under_virtualenv(): + ## FIXME: I'm not sure if this is a reasonable location; probably not + ## but we can't put it in the default location, as that is a virtualenv symlink that isn't writable + install_args += ['--install-headers', + os.path.join(sys.prefix, 'include', 'site', + 'python' + get_python_version())] + logger.notify('Running setup.py install for %s' % self.name) + logger.indent += 2 + try: + call_subprocess(install_args + install_options, + cwd=self.source_dir, filter_stdout=self._filter_install, show_stdout=False) + finally: + logger.indent -= 2 + if not os.path.exists(record_filename): + logger.notify('Record file %s not found' % record_filename) + return + self.install_succeeded = True + if self.as_egg: + # there's no --always-unzip option we can pass to install command + # so we unable to save the installed-files.txt + return + + def prepend_root(path): + if root is None or not os.path.isabs(path): + return path + else: + return change_root(root, path) + + f = open(record_filename) + for line in f: + line = line.strip() + if line.endswith('.egg-info'): + egg_info_dir = prepend_root(line) + break + else: + logger.warn('Could not find .egg-info directory in install record for %s' % self) + ## FIXME: put the record somewhere + ## FIXME: should this be an error? + return + f.close() + new_lines = [] + f = open(record_filename) + for line in f: + filename = line.strip() + if os.path.isdir(filename): + filename += os.path.sep + new_lines.append(make_path_relative(prepend_root(filename), egg_info_dir)) + f.close() + f = open(os.path.join(egg_info_dir, 'installed-files.txt'), 'w') + f.write('\n'.join(new_lines)+'\n') + f.close() + finally: + if os.path.exists(record_filename): + os.remove(record_filename) + os.rmdir(temp_location) + + def remove_temporary_source(self): + """Remove the source files from this requirement, if they are marked + for deletion""" + if self.is_bundle or os.path.exists(self.delete_marker_filename): + logger.info('Removing source in %s' % self.source_dir) + if self.source_dir: + rmtree(self.source_dir) + self.source_dir = None + if self._temp_build_dir and os.path.exists(self._temp_build_dir): + rmtree(self._temp_build_dir) + self._temp_build_dir = None + + def install_editable(self, install_options, global_options=()): + logger.notify('Running setup.py develop for %s' % self.name) + logger.indent += 2 + try: + ## FIXME: should we do --install-headers here too? + call_subprocess( + [sys.executable, '-c', + "import setuptools, tokenize; __file__=%r; exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n', '\\n'), __file__, 'exec'))" % self.setup_py] + + list(global_options) + ['develop', '--no-deps'] + list(install_options), + + cwd=self.source_dir, filter_stdout=self._filter_install, + show_stdout=False) + finally: + logger.indent -= 2 + self.install_succeeded = True + + def _filter_install(self, line): + level = logger.NOTIFY + for regex in [r'^running .*', r'^writing .*', '^creating .*', '^[Cc]opying .*', + r'^reading .*', r"^removing .*\.egg-info' \(and everything under it\)$", + r'^byte-compiling ', + # Not sure what this warning is, but it seems harmless: + r"^warning: manifest_maker: standard file '-c' not found$"]: + if re.search(regex, line.strip()): + level = logger.INFO + break + return (level, line) + + def check_if_exists(self): + """Find an installed distribution that satisfies or conflicts + with this requirement, and set self.satisfied_by or + self.conflicts_with appropriately.""" + + if self.req is None: + return False + try: + # DISTRIBUTE TO SETUPTOOLS UPGRADE HACK (1 of 3 parts) + # if we've already set distribute as a conflict to setuptools + # then this check has already run before. we don't want it to + # run again, and return False, since it would block the uninstall + # TODO: remove this later + if (self.req.project_name == 'setuptools' + and self.conflicts_with + and self.conflicts_with.project_name == 'distribute'): + return True + else: + self.satisfied_by = pkg_resources.get_distribution(self.req) + except pkg_resources.DistributionNotFound: + return False + except pkg_resources.VersionConflict: + existing_dist = pkg_resources.get_distribution(self.req.project_name) + if self.use_user_site: + if dist_in_usersite(existing_dist): + self.conflicts_with = existing_dist + elif running_under_virtualenv() and dist_in_site_packages(existing_dist): + raise InstallationError("Will not install to the user site because it will lack sys.path precedence to %s in %s" + %(existing_dist.project_name, existing_dist.location)) + else: + self.conflicts_with = existing_dist + return True + + @property + def is_wheel(self): + return self.url and '.whl' in self.url + + @property + def is_bundle(self): + if self._is_bundle is not None: + return self._is_bundle + base = self._temp_build_dir + if not base: + ## FIXME: this doesn't seem right: + return False + self._is_bundle = (os.path.exists(os.path.join(base, 'pip-manifest.txt')) + or os.path.exists(os.path.join(base, 'pyinstall-manifest.txt'))) + return self._is_bundle + + def bundle_requirements(self): + for dest_dir in self._bundle_editable_dirs: + package = os.path.basename(dest_dir) + ## FIXME: svnism: + for vcs_backend in vcs.backends: + url = rev = None + vcs_bundle_file = os.path.join( + dest_dir, vcs_backend.bundle_file) + if os.path.exists(vcs_bundle_file): + vc_type = vcs_backend.name + fp = open(vcs_bundle_file) + content = fp.read() + fp.close() + url, rev = vcs_backend().parse_vcs_bundle_file(content) + break + if url: + url = '%s+%s@%s' % (vc_type, url, rev) + else: + url = None + yield InstallRequirement( + package, self, editable=True, url=url, + update=False, source_dir=dest_dir, from_bundle=True) + for dest_dir in self._bundle_build_dirs: + package = os.path.basename(dest_dir) + yield InstallRequirement(package, self,source_dir=dest_dir, from_bundle=True) + + def move_bundle_files(self, dest_build_dir, dest_src_dir): + base = self._temp_build_dir + assert base + src_dir = os.path.join(base, 'src') + build_dir = os.path.join(base, 'build') + bundle_build_dirs = [] + bundle_editable_dirs = [] + for source_dir, dest_dir, dir_collection in [ + (src_dir, dest_src_dir, bundle_editable_dirs), + (build_dir, dest_build_dir, bundle_build_dirs)]: + if os.path.exists(source_dir): + for dirname in os.listdir(source_dir): + dest = os.path.join(dest_dir, dirname) + dir_collection.append(dest) + if os.path.exists(dest): + logger.warn('The directory %s (containing package %s) already exists; cannot move source from bundle %s' + % (dest, dirname, self)) + continue + if not os.path.exists(dest_dir): + logger.info('Creating directory %s' % dest_dir) + os.makedirs(dest_dir) + shutil.move(os.path.join(source_dir, dirname), dest) + if not os.listdir(source_dir): + os.rmdir(source_dir) + self._temp_build_dir = None + self._bundle_build_dirs = bundle_build_dirs + self._bundle_editable_dirs = bundle_editable_dirs + + def move_wheel_files(self, wheeldir, root=None): + move_wheel_files( + self.name, self.req, wheeldir, + user=self.use_user_site, + home=self.target_dir, + root=root, + pycompile=self.pycompile, + ) + + @property + def delete_marker_filename(self): + assert self.source_dir + return os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME) + + +class Requirements(object): + + def __init__(self): + self._keys = [] + self._dict = {} + + def keys(self): + return self._keys + + def values(self): + return [self._dict[key] for key in self._keys] + + def __contains__(self, item): + return item in self._keys + + def __setitem__(self, key, value): + if key not in self._keys: + self._keys.append(key) + self._dict[key] = value + + def __getitem__(self, key): + return self._dict[key] + + def __repr__(self): + values = ['%s: %s' % (repr(k), repr(self[k])) for k in self.keys()] + return 'Requirements({%s})' % ', '.join(values) + + +class RequirementSet(object): + + def __init__(self, build_dir, src_dir, download_dir, download_cache=None, + upgrade=False, ignore_installed=False, as_egg=False, + target_dir=None, ignore_dependencies=False, + force_reinstall=False, use_user_site=False, session=None, + pycompile=True, wheel_download_dir=None): + self.build_dir = build_dir + self.src_dir = src_dir + self.download_dir = download_dir + if download_cache: + download_cache = os.path.expanduser(download_cache) + self.download_cache = download_cache + self.upgrade = upgrade + self.ignore_installed = ignore_installed + self.force_reinstall = force_reinstall + self.requirements = Requirements() + # Mapping of alias: real_name + self.requirement_aliases = {} + self.unnamed_requirements = [] + self.ignore_dependencies = ignore_dependencies + self.successfully_downloaded = [] + self.successfully_installed = [] + self.reqs_to_cleanup = [] + self.as_egg = as_egg + self.use_user_site = use_user_site + self.target_dir = target_dir #set from --target option + self.session = session or PipSession() + self.pycompile = pycompile + self.wheel_download_dir = wheel_download_dir + + def __str__(self): + reqs = [req for req in self.requirements.values() + if not req.comes_from] + reqs.sort(key=lambda req: req.name.lower()) + return ' '.join([str(req.req) for req in reqs]) + + def add_requirement(self, install_req): + name = install_req.name + install_req.as_egg = self.as_egg + install_req.use_user_site = self.use_user_site + install_req.target_dir = self.target_dir + install_req.pycompile = self.pycompile + if not name: + #url or path requirement w/o an egg fragment + self.unnamed_requirements.append(install_req) + else: + if self.has_requirement(name): + raise InstallationError( + 'Double requirement given: %s (already in %s, name=%r)' + % (install_req, self.get_requirement(name), name)) + self.requirements[name] = install_req + ## FIXME: what about other normalizations? E.g., _ vs. -? + if name.lower() != name: + self.requirement_aliases[name.lower()] = name + + def has_requirement(self, project_name): + for name in project_name, project_name.lower(): + if name in self.requirements or name in self.requirement_aliases: + return True + return False + + @property + def has_requirements(self): + return list(self.requirements.values()) or self.unnamed_requirements + + @property + def has_editables(self): + if any(req.editable for req in self.requirements.values()): + return True + if any(req.editable for req in self.unnamed_requirements): + return True + return False + + @property + def is_download(self): + if self.download_dir: + self.download_dir = os.path.expanduser(self.download_dir) + if os.path.exists(self.download_dir): + return True + else: + logger.fatal('Could not find download directory') + raise InstallationError( + "Could not find or access download directory '%s'" + % display_path(self.download_dir)) + return False + + def get_requirement(self, project_name): + for name in project_name, project_name.lower(): + if name in self.requirements: + return self.requirements[name] + if name in self.requirement_aliases: + return self.requirements[self.requirement_aliases[name]] + raise KeyError("No project with the name %r" % project_name) + + def uninstall(self, auto_confirm=False): + for req in self.requirements.values(): + req.uninstall(auto_confirm=auto_confirm) + req.commit_uninstall() + + def locate_files(self): + ## FIXME: duplicates code from prepare_files; relevant code should + ## probably be factored out into a separate method + unnamed = list(self.unnamed_requirements) + reqs = list(self.requirements.values()) + while reqs or unnamed: + if unnamed: + req_to_install = unnamed.pop(0) + else: + req_to_install = reqs.pop(0) + install_needed = True + if not self.ignore_installed and not req_to_install.editable: + req_to_install.check_if_exists() + if req_to_install.satisfied_by: + if self.upgrade: + #don't uninstall conflict if user install and and conflict is not user install + if not (self.use_user_site and not dist_in_usersite(req_to_install.satisfied_by)): + req_to_install.conflicts_with = req_to_install.satisfied_by + req_to_install.satisfied_by = None + else: + install_needed = False + if req_to_install.satisfied_by: + logger.notify('Requirement already satisfied ' + '(use --upgrade to upgrade): %s' + % req_to_install) + + if req_to_install.editable: + if req_to_install.source_dir is None: + req_to_install.source_dir = req_to_install.build_location(self.src_dir) + elif install_needed: + req_to_install.source_dir = req_to_install.build_location(self.build_dir, not self.is_download) + + if req_to_install.source_dir is not None and not os.path.isdir(req_to_install.source_dir): + raise InstallationError('Could not install requirement %s ' + 'because source folder %s does not exist ' + '(perhaps --no-download was used without first running ' + 'an equivalent install with --no-install?)' + % (req_to_install, req_to_install.source_dir)) + + def prepare_files(self, finder, force_root_egg_info=False, bundle=False): + """Prepare process. Create temp directories, download and/or unpack files.""" + unnamed = list(self.unnamed_requirements) + reqs = list(self.requirements.values()) + while reqs or unnamed: + if unnamed: + req_to_install = unnamed.pop(0) + else: + req_to_install = reqs.pop(0) + install = True + best_installed = False + not_found = None + if not self.ignore_installed and not req_to_install.editable: + req_to_install.check_if_exists() + if req_to_install.satisfied_by: + if self.upgrade: + if not self.force_reinstall and not req_to_install.url: + try: + url = finder.find_requirement( + req_to_install, self.upgrade) + except BestVersionAlreadyInstalled: + best_installed = True + install = False + except DistributionNotFound: + not_found = sys.exc_info()[1] + else: + # Avoid the need to call find_requirement again + req_to_install.url = url.url + + if not best_installed: + #don't uninstall conflict if user install and conflict is not user install + if not (self.use_user_site and not dist_in_usersite(req_to_install.satisfied_by)): + req_to_install.conflicts_with = req_to_install.satisfied_by + req_to_install.satisfied_by = None + else: + install = False + if req_to_install.satisfied_by: + if best_installed: + logger.notify('Requirement already up-to-date: %s' + % req_to_install) + else: + logger.notify('Requirement already satisfied ' + '(use --upgrade to upgrade): %s' + % req_to_install) + if req_to_install.editable: + logger.notify('Obtaining %s' % req_to_install) + elif install: + if req_to_install.url and req_to_install.url.lower().startswith('file:'): + logger.notify('Unpacking %s' % display_path(url_to_path(req_to_install.url))) + else: + logger.notify('Downloading/unpacking %s' % req_to_install) + logger.indent += 2 + try: + is_bundle = False + is_wheel = False + if req_to_install.editable: + if req_to_install.source_dir is None: + location = req_to_install.build_location(self.src_dir) + req_to_install.source_dir = location + else: + location = req_to_install.source_dir + if not os.path.exists(self.build_dir): + _make_build_dir(self.build_dir) + req_to_install.update_editable(not self.is_download) + if self.is_download: + req_to_install.run_egg_info() + req_to_install.archive(self.download_dir) + else: + req_to_install.run_egg_info() + elif install: + ##@@ if filesystem packages are not marked + ##editable in a req, a non deterministic error + ##occurs when the script attempts to unpack the + ##build directory + + # NB: This call can result in the creation of a temporary build directory + location = req_to_install.build_location(self.build_dir, not self.is_download) + unpack = True + url = None + + # In the case where the req comes from a bundle, we should + # assume a build dir exists and move on + if req_to_install.from_bundle: + pass + # If a checkout exists, it's unwise to keep going. version + # inconsistencies are logged later, but do not fail the + # installation. + elif os.path.exists(os.path.join(location, 'setup.py')): + raise PreviousBuildDirError(textwrap.dedent(""" + pip can't proceed with requirement '%s' due to a pre-existing build directory. + location: %s + This is likely due to a previous installation that failed. + pip is being responsible and not assuming it can delete this. + Please delete it and try again. + """ % (req_to_install, location))) + else: + ## FIXME: this won't upgrade when there's an existing package unpacked in `location` + if req_to_install.url is None: + if not_found: + raise not_found + url = finder.find_requirement(req_to_install, upgrade=self.upgrade) + else: + ## FIXME: should req_to_install.url already be a link? + url = Link(req_to_install.url) + assert url + if url: + try: + + if ( + url.filename.endswith(wheel_ext) + and self.wheel_download_dir + ): + # when doing 'pip wheel` + download_dir = self.wheel_download_dir + do_download = True + else: + download_dir = self.download_dir + do_download = self.is_download + self.unpack_url( + url, location, download_dir, + do_download, + ) + except HTTPError as exc: + logger.fatal( + 'Could not install requirement %s because ' + 'of error %s' % (req_to_install, exc) + ) + raise InstallationError( + 'Could not install requirement %s because of HTTP error %s for URL %s' + % (req_to_install, e, url)) + else: + unpack = False + if unpack: + is_bundle = req_to_install.is_bundle + is_wheel = url and url.filename.endswith(wheel_ext) + if is_bundle: + req_to_install.move_bundle_files(self.build_dir, self.src_dir) + for subreq in req_to_install.bundle_requirements(): + reqs.append(subreq) + self.add_requirement(subreq) + elif self.is_download: + req_to_install.source_dir = location + if not is_wheel: + # FIXME: see https://github.com/pypa/pip/issues/1112 + req_to_install.run_egg_info() + if url and url.scheme in vcs.all_schemes: + req_to_install.archive(self.download_dir) + elif is_wheel: + req_to_install.source_dir = location + req_to_install.url = url.url + else: + req_to_install.source_dir = location + req_to_install.run_egg_info() + if force_root_egg_info: + # We need to run this to make sure that the .egg-info/ + # directory is created for packing in the bundle + req_to_install.run_egg_info(force_root_egg_info=True) + req_to_install.assert_source_matches_version() + #@@ sketchy way of identifying packages not grabbed from an index + if bundle and req_to_install.url: + self.copy_to_build_dir(req_to_install) + install = False + # req_to_install.req is only avail after unpack for URL pkgs + # repeat check_if_exists to uninstall-on-upgrade (#14) + if not self.ignore_installed: + req_to_install.check_if_exists() + if req_to_install.satisfied_by: + if self.upgrade or self.ignore_installed: + #don't uninstall conflict if user install and and conflict is not user install + if not (self.use_user_site and not dist_in_usersite(req_to_install.satisfied_by)): + req_to_install.conflicts_with = req_to_install.satisfied_by + req_to_install.satisfied_by = None + else: + logger.notify( + 'Requirement already satisfied (use ' + '--upgrade to upgrade): %s' % + req_to_install + ) + install = False + if is_wheel: + dist = list( + pkg_resources.find_distributions(location) + )[0] + if not req_to_install.req: + req_to_install.req = dist.as_requirement() + self.add_requirement(req_to_install) + if not self.ignore_dependencies: + for subreq in dist.requires( + req_to_install.extras): + if self.has_requirement( + subreq.project_name): + continue + subreq = InstallRequirement(str(subreq), + req_to_install) + reqs.append(subreq) + self.add_requirement(subreq) + + # sdists + elif not is_bundle: + ## FIXME: shouldn't be globally added: + finder.add_dependency_links(req_to_install.dependency_links) + if (req_to_install.extras): + logger.notify("Installing extra requirements: %r" % ','.join(req_to_install.extras)) + if not self.ignore_dependencies: + for req in req_to_install.requirements(req_to_install.extras): + try: + name = pkg_resources.Requirement.parse(req).project_name + except ValueError: + e = sys.exc_info()[1] + ## FIXME: proper warning + logger.error('Invalid requirement: %r (%s) in requirement %s' % (req, e, req_to_install)) + continue + if self.has_requirement(name): + ## FIXME: check for conflict + continue + subreq = InstallRequirement(req, req_to_install) + reqs.append(subreq) + self.add_requirement(subreq) + if not self.has_requirement(req_to_install.name): + #'unnamed' requirements will get added here + self.add_requirement(req_to_install) + + # cleanup tmp src + if not is_bundle: + if ( + self.is_download or + req_to_install._temp_build_dir is not None + ): + self.reqs_to_cleanup.append(req_to_install) + + if install: + self.successfully_downloaded.append(req_to_install) + if bundle and (req_to_install.url and req_to_install.url.startswith('file:///')): + self.copy_to_build_dir(req_to_install) + finally: + logger.indent -= 2 + + def cleanup_files(self, bundle=False): + """Clean up files, remove builds.""" + logger.notify('Cleaning up...') + logger.indent += 2 + for req in self.reqs_to_cleanup: + req.remove_temporary_source() + + remove_dir = [] + if self._pip_has_created_build_dir(): + remove_dir.append(self.build_dir) + + # The source dir of a bundle can always be removed. + # FIXME: not if it pre-existed the bundle! + if bundle: + remove_dir.append(self.src_dir) + + for dir in remove_dir: + if os.path.exists(dir): + logger.info('Removing temporary dir %s...' % dir) + rmtree(dir) + + logger.indent -= 2 + + def _pip_has_created_build_dir(self): + return (self.build_dir == build_prefix and + os.path.exists(os.path.join(self.build_dir, PIP_DELETE_MARKER_FILENAME))) + + def copy_to_build_dir(self, req_to_install): + target_dir = req_to_install.editable and self.src_dir or self.build_dir + logger.info("Copying %s to %s" % (req_to_install.name, target_dir)) + dest = os.path.join(target_dir, req_to_install.name) + shutil.copytree(req_to_install.source_dir, dest) + call_subprocess(["python", "%s/setup.py" % dest, "clean"], cwd=dest, + command_desc='python setup.py clean') + + def unpack_url(self, link, location, download_dir=None, + only_download=False): + if download_dir is None: + download_dir = self.download_dir + + # non-editable vcs urls + if is_vcs_url(link): + if only_download: + loc = download_dir + else: + loc = location + unpack_vcs_link(link, loc, only_download) + + # file urls + elif is_file_url(link): + unpack_file_url(link, location, download_dir) + if only_download: + write_delete_marker_file(location) + + # http urls + else: + unpack_http_url( + link, + location, + self.download_cache, + download_dir, + self.session, + ) + if only_download: + write_delete_marker_file(location) + + def install(self, install_options, global_options=(), *args, **kwargs): + """Install everything in this set (after having downloaded and unpacked the packages)""" + to_install = [r for r in self.requirements.values() + if not r.satisfied_by] + + # DISTRIBUTE TO SETUPTOOLS UPGRADE HACK (1 of 3 parts) + # move the distribute-0.7.X wrapper to the end because it does not + # install a setuptools package. by moving it to the end, we ensure it's + # setuptools dependency is handled first, which will provide the + # setuptools package + # TODO: take this out later + distribute_req = pkg_resources.Requirement.parse("distribute>=0.7") + for req in to_install: + if req.name == 'distribute' and req.installed_version in distribute_req: + to_install.remove(req) + to_install.append(req) + + if to_install: + logger.notify('Installing collected packages: %s' % ', '.join([req.name for req in to_install])) + logger.indent += 2 + try: + for requirement in to_install: + + # DISTRIBUTE TO SETUPTOOLS UPGRADE HACK (1 of 3 parts) + # when upgrading from distribute-0.6.X to the new merged + # setuptools in py2, we need to force setuptools to uninstall + # distribute. In py3, which is always using distribute, this + # conversion is already happening in distribute's pkg_resources. + # It's ok *not* to check if setuptools>=0.7 because if someone + # were actually trying to ugrade from distribute to setuptools + # 0.6.X, then all this could do is actually help, although that + # upgade path was certainly never "supported" + # TODO: remove this later + if requirement.name == 'setuptools': + try: + # only uninstall distribute<0.7. For >=0.7, setuptools + # will also be present, and that's what we need to + # uninstall + distribute_requirement = pkg_resources.Requirement.parse("distribute<0.7") + existing_distribute = pkg_resources.get_distribution("distribute") + if existing_distribute in distribute_requirement: + requirement.conflicts_with = existing_distribute + except pkg_resources.DistributionNotFound: + # distribute wasn't installed, so nothing to do + pass + + if requirement.conflicts_with: + logger.notify('Found existing installation: %s' + % requirement.conflicts_with) + logger.indent += 2 + try: + requirement.uninstall(auto_confirm=True) + finally: + logger.indent -= 2 + try: + requirement.install(install_options, global_options, *args, **kwargs) + except: + # if install did not succeed, rollback previous uninstall + if requirement.conflicts_with and not requirement.install_succeeded: + requirement.rollback_uninstall() + raise + else: + if requirement.conflicts_with and requirement.install_succeeded: + requirement.commit_uninstall() + requirement.remove_temporary_source() + finally: + logger.indent -= 2 + self.successfully_installed = to_install + + def create_bundle(self, bundle_filename): + ## FIXME: can't decide which is better; zip is easier to read + ## random files from, but tar.bz2 is smaller and not as lame a + ## format. + + ## FIXME: this file should really include a manifest of the + ## packages, maybe some other metadata files. It would make + ## it easier to detect as well. + zip = zipfile.ZipFile(bundle_filename, 'w', zipfile.ZIP_DEFLATED) + vcs_dirs = [] + for dir, basename in (self.build_dir, 'build'), (self.src_dir, 'src'): + dir = os.path.normcase(os.path.abspath(dir)) + for dirpath, dirnames, filenames in os.walk(dir): + for backend in vcs.backends: + vcs_backend = backend() + vcs_url = vcs_rev = None + if vcs_backend.dirname in dirnames: + for vcs_dir in vcs_dirs: + if dirpath.startswith(vcs_dir): + # vcs bundle file already in parent directory + break + else: + vcs_url, vcs_rev = vcs_backend.get_info( + os.path.join(dir, dirpath)) + vcs_dirs.append(dirpath) + vcs_bundle_file = vcs_backend.bundle_file + vcs_guide = vcs_backend.guide % {'url': vcs_url, + 'rev': vcs_rev} + dirnames.remove(vcs_backend.dirname) + break + if 'pip-egg-info' in dirnames: + dirnames.remove('pip-egg-info') + for dirname in dirnames: + dirname = os.path.join(dirpath, dirname) + name = self._clean_zip_name(dirname, dir) + zip.writestr(basename + '/' + name + '/', '') + for filename in filenames: + if filename == PIP_DELETE_MARKER_FILENAME: + continue + filename = os.path.join(dirpath, filename) + name = self._clean_zip_name(filename, dir) + zip.write(filename, basename + '/' + name) + if vcs_url: + name = os.path.join(dirpath, vcs_bundle_file) + name = self._clean_zip_name(name, dir) + zip.writestr(basename + '/' + name, vcs_guide) + + zip.writestr('pip-manifest.txt', self.bundle_requirements()) + zip.close() + + BUNDLE_HEADER = '''\ +# This is a pip bundle file, that contains many source packages +# that can be installed as a group. You can install this like: +# pip this_file.zip +# The rest of the file contains a list of all the packages included: +''' + + def bundle_requirements(self): + parts = [self.BUNDLE_HEADER] + for req in [req for req in self.requirements.values() + if not req.comes_from]: + parts.append('%s==%s\n' % (req.name, req.installed_version)) + parts.append('# These packages were installed to satisfy the above requirements:\n') + for req in [req for req in self.requirements.values() + if req.comes_from]: + parts.append('%s==%s\n' % (req.name, req.installed_version)) + ## FIXME: should we do something with self.unnamed_requirements? + return ''.join(parts) + + def _clean_zip_name(self, name, prefix): + assert name.startswith(prefix+os.path.sep), ( + "name %r doesn't start with prefix %r" % (name, prefix)) + name = name[len(prefix)+1:] + name = name.replace(os.path.sep, '/') + return name + + +def _make_build_dir(build_dir): + os.makedirs(build_dir) + write_delete_marker_file(build_dir) + + +_scheme_re = re.compile(r'^(http|https|file):', re.I) + + +def parse_requirements(filename, finder=None, comes_from=None, options=None, + session=None): + if session is None: + session = PipSession() + + skip_match = None + skip_regex = options.skip_requirements_regex if options else None + if skip_regex: + skip_match = re.compile(skip_regex) + reqs_file_dir = os.path.dirname(os.path.abspath(filename)) + filename, content = get_file_content(filename, + comes_from=comes_from, + session=session, + ) + for line_number, line in enumerate(content.splitlines()): + line_number += 1 + line = line.strip() + + # Remove comments from file + line = re.sub(r"(^|\s)#.*$", "", line) + + if not line or line.startswith('#'): + continue + if skip_match and skip_match.search(line): + continue + if line.startswith('-r') or line.startswith('--requirement'): + if line.startswith('-r'): + req_url = line[2:].strip() + else: + req_url = line[len('--requirement'):].strip().strip('=') + if _scheme_re.search(filename): + # Relative to a URL + req_url = urlparse.urljoin(filename, req_url) + elif not _scheme_re.search(req_url): + req_url = os.path.join(os.path.dirname(filename), req_url) + for item in parse_requirements(req_url, finder, comes_from=filename, options=options, session=session): + yield item + elif line.startswith('-Z') or line.startswith('--always-unzip'): + # No longer used, but previously these were used in + # requirement files, so we'll ignore. + pass + elif line.startswith('-f') or line.startswith('--find-links'): + if line.startswith('-f'): + line = line[2:].strip() + else: + line = line[len('--find-links'):].strip().lstrip('=') + ## FIXME: it would be nice to keep track of the source of + ## the find_links: + # support a find-links local path relative to a requirements file + relative_to_reqs_file = os.path.join(reqs_file_dir, line) + if os.path.exists(relative_to_reqs_file): + line = relative_to_reqs_file + if finder: + finder.find_links.append(line) + elif line.startswith('-i') or line.startswith('--index-url'): + if line.startswith('-i'): + line = line[2:].strip() + else: + line = line[len('--index-url'):].strip().lstrip('=') + if finder: + finder.index_urls = [line] + elif line.startswith('--extra-index-url'): + line = line[len('--extra-index-url'):].strip().lstrip('=') + if finder: + finder.index_urls.append(line) + elif line.startswith('--use-wheel'): + finder.use_wheel = True + elif line.startswith('--no-index'): + finder.index_urls = [] + elif line.startswith("--allow-external"): + line = line[len("--allow-external"):].strip().lstrip("=") + finder.allow_external |= set([normalize_name(line).lower()]) + elif line.startswith("--allow-all-external"): + finder.allow_all_external = True + # Remove in 1.7 + elif line.startswith("--no-allow-external"): + pass + # Remove in 1.7 + elif line.startswith("--no-allow-insecure"): + pass + # Remove after 1.7 + elif line.startswith("--allow-insecure"): + line = line[len("--allow-insecure"):].strip().lstrip("=") + finder.allow_unverified |= set([normalize_name(line).lower()]) + elif line.startswith("--allow-unverified"): + line = line[len("--allow-unverified"):].strip().lstrip("=") + finder.allow_unverified |= set([normalize_name(line).lower()]) + else: + comes_from = '-r %s (line %s)' % (filename, line_number) + if line.startswith('-e') or line.startswith('--editable'): + if line.startswith('-e'): + line = line[2:].strip() + else: + line = line[len('--editable'):].strip().lstrip('=') + req = InstallRequirement.from_editable( + line, comes_from=comes_from, default_vcs=options.default_vcs if options else None) + else: + req = InstallRequirement.from_line(line, comes_from, prereleases=getattr(options, "pre", None)) + yield req + +def _strip_postfix(req): + """ + Strip req postfix ( -dev, 0.2, etc ) + """ + ## FIXME: use package_to_requirement? + match = re.search(r'^(.*?)(?:-dev|-\d.*)$', req) + if match: + # Strip off -dev, -0.2, etc. + req = match.group(1) + return req + +def _build_req_from_url(url): + + parts = [p for p in url.split('#', 1)[0].split('/') if p] + + req = None + if parts[-2] in ('tags', 'branches', 'tag', 'branch'): + req = parts[-3] + elif parts[-1] == 'trunk': + req = parts[-2] + return req + +def _build_editable_options(req): + + """ + This method generates a dictionary of the query string + parameters contained in a given editable URL. + """ + regexp = re.compile(r"[\?#&](?P[^&=]+)=(?P[^&=]+)") + matched = regexp.findall(req) + + if matched: + ret = dict() + for option in matched: + (name, value) = option + if name in ret: + raise Exception("%s option already defined" % name) + ret[name] = value + return ret + return None + + +def parse_editable(editable_req, default_vcs=None): + """Parses svn+http://blahblah@rev#egg=Foobar into a requirement + (Foobar) and a URL""" + + url = editable_req + extras = None + + # If a file path is specified with extras, strip off the extras. + m = re.match(r'^(.+)(\[[^\]]+\])$', url) + if m: + url_no_extras = m.group(1) + extras = m.group(2) + else: + url_no_extras = url + + if os.path.isdir(url_no_extras): + if not os.path.exists(os.path.join(url_no_extras, 'setup.py')): + raise InstallationError("Directory %r is not installable. File 'setup.py' not found." % url_no_extras) + # Treating it as code that has already been checked out + url_no_extras = path_to_url(url_no_extras) + + if url_no_extras.lower().startswith('file:'): + if extras: + return None, url_no_extras, pkg_resources.Requirement.parse('__placeholder__' + extras).extras + else: + return None, url_no_extras, None + + for version_control in vcs: + if url.lower().startswith('%s:' % version_control): + url = '%s+%s' % (version_control, url) + break + + if '+' not in url: + if default_vcs: + url = default_vcs + '+' + url + else: + raise InstallationError( + '%s should either be a path to a local project or a VCS url beginning with svn+, git+, hg+, or bzr+' % editable_req) + + vc_type = url.split('+', 1)[0].lower() + + if not vcs.get_backend(vc_type): + error_message = 'For --editable=%s only ' % editable_req + \ + ', '.join([backend.name + '+URL' for backend in vcs.backends]) + \ + ' is currently supported' + raise InstallationError(error_message) + + try: + options = _build_editable_options(editable_req) + except Exception: + message = sys.exc_info()[1] + raise InstallationError( + '--editable=%s error in editable options:%s' % (editable_req, message)) + + if not options or 'egg' not in options: + req = _build_req_from_url(editable_req) + if not req: + raise InstallationError('--editable=%s is not the right format; it must have #egg=Package' % editable_req) + else: + req = options['egg'] + + package = _strip_postfix(req) + return package, url, options + + +class UninstallPathSet(object): + """A set of file paths to be removed in the uninstallation of a + requirement.""" + def __init__(self, dist): + self.paths = set() + self._refuse = set() + self.pth = {} + self.dist = dist + self.save_dir = None + self._moved_paths = [] + + def _permitted(self, path): + """ + Return True if the given path is one we are permitted to + remove/modify, False otherwise. + + """ + return is_local(path) + + def _can_uninstall(self): + if not dist_is_local(self.dist): + if running_under_virtualenv(): + reason = "outside environment %s" % (sys.prefix,) + else: + reason = "owned by OS" + logger.notify( + "Not uninstalling %s at %s, %s", + self.dist.project_name, + normalize_path(self.dist.location), + reason + ) + return False + return True + + def add(self, path): + path = normalize_path(path) + if not os.path.exists(path): + return + if self._permitted(path): + self.paths.add(path) + else: + self._refuse.add(path) + + # __pycache__ files can show up after 'installed-files.txt' is created, due to imports + if os.path.splitext(path)[1] == '.py' and uses_pycache: + self.add(imp.cache_from_source(path)) + + + def add_pth(self, pth_file, entry): + pth_file = normalize_path(pth_file) + if self._permitted(pth_file): + if pth_file not in self.pth: + self.pth[pth_file] = UninstallPthEntries(pth_file) + self.pth[pth_file].add(entry) + else: + self._refuse.add(pth_file) + + def compact(self, paths): + """Compact a path set to contain the minimal number of paths + necessary to contain all paths in the set. If /a/path/ and + /a/path/to/a/file.txt are both in the set, leave only the + shorter path.""" + short_paths = set() + for path in sorted(paths, key=len): + if not any([(path.startswith(shortpath) and + path[len(shortpath.rstrip(os.path.sep))] == os.path.sep) + for shortpath in short_paths]): + short_paths.add(path) + return short_paths + + def _stash(self, path): + return os.path.join( + self.save_dir, os.path.splitdrive(path)[1].lstrip(os.path.sep)) + + def remove(self, auto_confirm=False): + """Remove paths in ``self.paths`` with confirmation (unless + ``auto_confirm`` is True).""" + if not self._can_uninstall(): + return + if not self.paths: + logger.notify("Can't uninstall '%s'. No files were found to uninstall." % self.dist.project_name) + return + logger.notify('Uninstalling %s:' % self.dist.project_name) + logger.indent += 2 + paths = sorted(self.compact(self.paths)) + try: + if auto_confirm: + response = 'y' + else: + for path in paths: + logger.notify(path) + response = ask('Proceed (y/n)? ', ('y', 'n')) + if self._refuse: + logger.notify('Not removing or modifying (outside of prefix):') + for path in self.compact(self._refuse): + logger.notify(path) + if response == 'y': + self.save_dir = tempfile.mkdtemp(suffix='-uninstall', + prefix='pip-') + for path in paths: + new_path = self._stash(path) + logger.info('Removing file or directory %s' % path) + self._moved_paths.append(path) + renames(path, new_path) + for pth in self.pth.values(): + pth.remove() + logger.notify('Successfully uninstalled %s' % self.dist.project_name) + + finally: + logger.indent -= 2 + + def rollback(self): + """Rollback the changes previously made by remove().""" + if self.save_dir is None: + logger.error("Can't roll back %s; was not uninstalled" % self.dist.project_name) + return False + logger.notify('Rolling back uninstall of %s' % self.dist.project_name) + for path in self._moved_paths: + tmp_path = self._stash(path) + logger.info('Replacing %s' % path) + renames(tmp_path, path) + for pth in self.pth: + pth.rollback() + + def commit(self): + """Remove temporary save dir: rollback will no longer be possible.""" + if self.save_dir is not None: + rmtree(self.save_dir) + self.save_dir = None + self._moved_paths = [] + + +class UninstallPthEntries(object): + def __init__(self, pth_file): + if not os.path.isfile(pth_file): + raise UninstallationError("Cannot remove entries from nonexistent file %s" % pth_file) + self.file = pth_file + self.entries = set() + self._saved_lines = None + + def add(self, entry): + entry = os.path.normcase(entry) + # On Windows, os.path.normcase converts the entry to use + # backslashes. This is correct for entries that describe absolute + # paths outside of site-packages, but all the others use forward + # slashes. + if sys.platform == 'win32' and not os.path.splitdrive(entry)[0]: + entry = entry.replace('\\', '/') + self.entries.add(entry) + + def remove(self): + logger.info('Removing pth entries from %s:' % self.file) + fh = open(self.file, 'rb') + # windows uses '\r\n' with py3k, but uses '\n' with py2.x + lines = fh.readlines() + self._saved_lines = lines + fh.close() + if any(b('\r\n') in line for line in lines): + endline = '\r\n' + else: + endline = '\n' + for entry in self.entries: + try: + logger.info('Removing entry: %s' % entry) + lines.remove(b(entry + endline)) + except ValueError: + pass + fh = open(self.file, 'wb') + fh.writelines(lines) + fh.close() + + def rollback(self): + if self._saved_lines is None: + logger.error('Cannot roll back changes to %s, none were made' % self.file) + return False + logger.info('Rolling %s back to previous state' % self.file) + fh = open(self.file, 'wb') + fh.writelines(self._saved_lines) + fh.close() + return True + + +class FakeFile(object): + """Wrap a list of lines in an object with readline() to make + ConfigParser happy.""" + def __init__(self, lines): + self._gen = (l for l in lines) + + def readline(self): + try: + try: + return next(self._gen) + except NameError: + return self._gen.next() + except StopIteration: + return '' + + def __iter__(self): + return self._gen diff --git a/lib/python3.4/site-packages/pip/runner.py b/lib/python3.4/site-packages/pip/runner.py new file mode 100644 index 0000000..be830ad --- /dev/null +++ b/lib/python3.4/site-packages/pip/runner.py @@ -0,0 +1,18 @@ +import sys +import os + + +def run(): + base = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + ## FIXME: this is kind of crude; if we could create a fake pip + ## module, then exec into it and update pip.__path__ properly, we + ## wouldn't have to update sys.path: + sys.path.insert(0, base) + import pip + return pip.main() + + +if __name__ == '__main__': + exit = run() + if exit: + sys.exit(exit) diff --git a/lib/python3.4/site-packages/pip/status_codes.py b/lib/python3.4/site-packages/pip/status_codes.py new file mode 100644 index 0000000..5e29502 --- /dev/null +++ b/lib/python3.4/site-packages/pip/status_codes.py @@ -0,0 +1,6 @@ +SUCCESS = 0 +ERROR = 1 +UNKNOWN_ERROR = 2 +VIRTUALENV_NOT_FOUND = 3 +PREVIOUS_BUILD_DIR_ERROR = 4 +NO_MATCHES_FOUND = 23 diff --git a/lib/python3.4/site-packages/pip/util.py b/lib/python3.4/site-packages/pip/util.py new file mode 100644 index 0000000..7deb87b --- /dev/null +++ b/lib/python3.4/site-packages/pip/util.py @@ -0,0 +1,771 @@ +import sys +import shutil +import os +import stat +import re +import posixpath +import zipfile +import tarfile +import subprocess +import textwrap +import tempfile + +from pip.exceptions import InstallationError, BadCommand, PipError +from pip.backwardcompat import(WindowsError, string_types, raw_input, + console_to_str, user_site, PermissionError) +from pip.locations import (site_packages, running_under_virtualenv, virtualenv_no_global, + distutils_scheme) +from pip.log import logger +import pkg_resources +from distlib import version + +__all__ = ['rmtree', 'display_path', 'backup_dir', + 'find_command', 'ask', 'Inf', + 'normalize_name', 'splitext', + 'format_size', 'is_installable_dir', + 'is_svn_page', 'file_contents', + 'split_leading_dir', 'has_leading_dir', + 'make_path_relative', 'normalize_path', + 'renames', 'get_terminal_size', 'get_prog', + 'unzip_file', 'untar_file', 'create_download_cache_folder', + 'cache_download', 'unpack_file', 'call_subprocess'] + + +def get_prog(): + try: + if os.path.basename(sys.argv[0]) in ('__main__.py', '-c'): + return "%s -m pip" % sys.executable + except (AttributeError, TypeError, IndexError): + pass + return 'pip' + + +def rmtree(dir, ignore_errors=False): + shutil.rmtree(dir, ignore_errors=ignore_errors, + onerror=rmtree_errorhandler) + + +def rmtree_errorhandler(func, path, exc_info): + """On Windows, the files in .svn are read-only, so when rmtree() tries to + remove them, an exception is thrown. We catch that here, remove the + read-only attribute, and hopefully continue without problems.""" + exctype, value = exc_info[:2] + if not ((exctype is WindowsError and value.args[0] == 5) or #others + (exctype is OSError and value.args[0] == 13) or #python2.4 + (exctype is PermissionError and value.args[3] == 5) #python3.3 + ): + raise + # file type should currently be read only + if ((os.stat(path).st_mode & stat.S_IREAD) != stat.S_IREAD): + raise + # convert to read/write + os.chmod(path, stat.S_IWRITE) + # use the original function to repeat the operation + func(path) + + +def display_path(path): + """Gives the display value for a given path, making it relative to cwd + if possible.""" + path = os.path.normcase(os.path.abspath(path)) + if path.startswith(os.getcwd() + os.path.sep): + path = '.' + path[len(os.getcwd()):] + return path + + +def backup_dir(dir, ext='.bak'): + """Figure out the name of a directory to back up the given dir to + (adding .bak, .bak2, etc)""" + n = 1 + extension = ext + while os.path.exists(dir + extension): + n += 1 + extension = ext + str(n) + return dir + extension + + +def find_command(cmd, paths=None, pathext=None): + """Searches the PATH for the given command and returns its path""" + if paths is None: + paths = os.environ.get('PATH', '').split(os.pathsep) + if isinstance(paths, string_types): + paths = [paths] + # check if there are funny path extensions for executables, e.g. Windows + if pathext is None: + pathext = get_pathext() + pathext = [ext for ext in pathext.lower().split(os.pathsep) if len(ext)] + # don't use extensions if the command ends with one of them + if os.path.splitext(cmd)[1].lower() in pathext: + pathext = [''] + # check if we find the command on PATH + for path in paths: + # try without extension first + cmd_path = os.path.join(path, cmd) + for ext in pathext: + # then including the extension + cmd_path_ext = cmd_path + ext + if os.path.isfile(cmd_path_ext): + return cmd_path_ext + if os.path.isfile(cmd_path): + return cmd_path + raise BadCommand('Cannot find command %r' % cmd) + + +def get_pathext(default_pathext=None): + """Returns the path extensions from environment or a default""" + if default_pathext is None: + default_pathext = os.pathsep.join(['.COM', '.EXE', '.BAT', '.CMD']) + pathext = os.environ.get('PATHEXT', default_pathext) + return pathext + + +def ask_path_exists(message, options): + for action in os.environ.get('PIP_EXISTS_ACTION', '').split(): + if action in options: + return action + return ask(message, options) + + +def ask(message, options): + """Ask the message interactively, with the given possible responses""" + while 1: + if os.environ.get('PIP_NO_INPUT'): + raise Exception('No input was expected ($PIP_NO_INPUT set); question: %s' % message) + response = raw_input(message) + response = response.strip().lower() + if response not in options: + print('Your response (%r) was not one of the expected responses: %s' % ( + response, ', '.join(options))) + else: + return response + + +class _Inf(object): + """I am bigger than everything!""" + + def __eq__(self, other): + if self is other: + return True + else: + return False + + def __ne__(self, other): + return not self.__eq__(other) + + def __lt__(self, other): + return False + + def __le__(self, other): + return False + + def __gt__(self, other): + return True + + def __ge__(self, other): + return True + + def __repr__(self): + return 'Inf' + + +Inf = _Inf() #this object is not currently used as a sortable in our code +del _Inf + + +_normalize_re = re.compile(r'[^a-z]', re.I) + + +def normalize_name(name): + return _normalize_re.sub('-', name.lower()) + + +def format_size(bytes): + if bytes > 1000*1000: + return '%.1fMB' % (bytes/1000.0/1000) + elif bytes > 10*1000: + return '%ikB' % (bytes/1000) + elif bytes > 1000: + return '%.1fkB' % (bytes/1000.0) + else: + return '%ibytes' % bytes + + +def is_installable_dir(path): + """Return True if `path` is a directory containing a setup.py file.""" + if not os.path.isdir(path): + return False + setup_py = os.path.join(path, 'setup.py') + if os.path.isfile(setup_py): + return True + return False + + +def is_svn_page(html): + """Returns true if the page appears to be the index page of an svn repository""" + return (re.search(r'[^<]*Revision \d+:', html) + and re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I)) + + +def file_contents(filename): + fp = open(filename, 'rb') + try: + return fp.read().decode('utf-8') + finally: + fp.close() + + +def split_leading_dir(path): + path = str(path) + path = path.lstrip('/').lstrip('\\') + if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) + or '\\' not in path): + return path.split('/', 1) + elif '\\' in path: + return path.split('\\', 1) + else: + return path, '' + + +def has_leading_dir(paths): + """Returns true if all the paths have the same leading path name + (i.e., everything is in one subdirectory in an archive)""" + common_prefix = None + for path in paths: + prefix, rest = split_leading_dir(path) + if not prefix: + return False + elif common_prefix is None: + common_prefix = prefix + elif prefix != common_prefix: + return False + return True + + +def make_path_relative(path, rel_to): + """ + Make a filename relative, where the filename path, and it is + relative to rel_to + + >>> make_relative_path('/usr/share/something/a-file.pth', + ... '/usr/share/another-place/src/Directory') + '../../../something/a-file.pth' + >>> make_relative_path('/usr/share/something/a-file.pth', + ... '/home/user/src/Directory') + '../../../usr/share/something/a-file.pth' + >>> make_relative_path('/usr/share/a-file.pth', '/usr/share/') + 'a-file.pth' + """ + path_filename = os.path.basename(path) + path = os.path.dirname(path) + path = os.path.normpath(os.path.abspath(path)) + rel_to = os.path.normpath(os.path.abspath(rel_to)) + path_parts = path.strip(os.path.sep).split(os.path.sep) + rel_to_parts = rel_to.strip(os.path.sep).split(os.path.sep) + while path_parts and rel_to_parts and path_parts[0] == rel_to_parts[0]: + path_parts.pop(0) + rel_to_parts.pop(0) + full_parts = ['..']*len(rel_to_parts) + path_parts + [path_filename] + if full_parts == ['']: + return '.' + os.path.sep + return os.path.sep.join(full_parts) + + +def normalize_path(path): + """ + Convert a path to its canonical, case-normalized, absolute version. + + """ + return os.path.normcase(os.path.realpath(os.path.expanduser(path))) + + +def splitext(path): + """Like os.path.splitext, but take off .tar too""" + base, ext = posixpath.splitext(path) + if base.lower().endswith('.tar'): + ext = base[-4:] + ext + base = base[:-4] + return base, ext + + +def renames(old, new): + """Like os.renames(), but handles renaming across devices.""" + # Implementation borrowed from os.renames(). + head, tail = os.path.split(new) + if head and tail and not os.path.exists(head): + os.makedirs(head) + + shutil.move(old, new) + + head, tail = os.path.split(old) + if head and tail: + try: + os.removedirs(head) + except OSError: + pass + + +def is_local(path): + """ + Return True if this is a path pip is allowed to modify. + + If we're in a virtualenv, sys.prefix points to the virtualenv's + prefix; only sys.prefix is considered local. + + If we're not in a virtualenv, in general we can modify anything. + However, if the OS vendor has configured distutils to install + somewhere other than sys.prefix (which could be a subdirectory of + sys.prefix, e.g. /usr/local), we consider sys.prefix itself nonlocal + and the domain of the OS vendor. (In other words, everything _other + than_ sys.prefix is considered local.) + + """ + + path = normalize_path(path) + prefix = normalize_path(sys.prefix) + + if running_under_virtualenv(): + return path.startswith(normalize_path(sys.prefix)) + else: + if path.startswith(prefix): + for local_path in distutils_scheme("").values(): + if path.startswith(normalize_path(local_path)): + return True + return False + else: + return True + + +def dist_is_local(dist): + """ + Return True if given Distribution object is installed somewhere pip + is allowed to modify. + + """ + return is_local(dist_location(dist)) + + +def dist_in_usersite(dist): + """ + Return True if given Distribution is installed in user site. + """ + if user_site: + return normalize_path(dist_location(dist)).startswith(normalize_path(user_site)) + else: + return False + +def dist_in_site_packages(dist): + """ + Return True if given Distribution is installed in distutils.sysconfig.get_python_lib(). + """ + return normalize_path(dist_location(dist)).startswith(normalize_path(site_packages)) + + +def dist_is_editable(dist): + """Is distribution an editable install?""" + #TODO: factor out determining editableness out of FrozenRequirement + from pip import FrozenRequirement + req = FrozenRequirement.from_dist(dist, []) + return req.editable + +def get_installed_distributions(local_only=True, + skip=('setuptools', 'pip', 'python', 'distribute'), + include_editables=True, + editables_only=False): + """ + Return a list of installed Distribution objects. + + If ``local_only`` is True (default), only return installations + local to the current virtualenv, if in a virtualenv. + + ``skip`` argument is an iterable of lower-case project names to + ignore; defaults to ('setuptools', 'pip', 'python'). [FIXME also + skip virtualenv?] + + If ``editables`` is False, don't report editables. + + If ``editables_only`` is True , only report editables. + + """ + if local_only: + local_test = dist_is_local + else: + local_test = lambda d: True + + if include_editables: + editable_test = lambda d: True + else: + editable_test = lambda d: not dist_is_editable(d) + + if editables_only: + editables_only_test = lambda d: dist_is_editable(d) + else: + editables_only_test = lambda d: True + + return [d for d in pkg_resources.working_set + if local_test(d) + and d.key not in skip + and editable_test(d) + and editables_only_test(d) + ] + + +def egg_link_path(dist): + """ + Return the path for the .egg-link file if it exists, otherwise, None. + + There's 3 scenarios: + 1) not in a virtualenv + try to find in site.USER_SITE, then site_packages + 2) in a no-global virtualenv + try to find in site_packages + 3) in a yes-global virtualenv + try to find in site_packages, then site.USER_SITE (don't look in global location) + + For #1 and #3, there could be odd cases, where there's an egg-link in 2 locations. + This method will just return the first one found. + """ + sites = [] + if running_under_virtualenv(): + if virtualenv_no_global(): + sites.append(site_packages) + else: + sites.append(site_packages) + if user_site: + sites.append(user_site) + else: + if user_site: + sites.append(user_site) + sites.append(site_packages) + + for site in sites: + egglink = os.path.join(site, dist.project_name) + '.egg-link' + if os.path.isfile(egglink): + return egglink + + +def dist_location(dist): + """ + Get the site-packages location of this distribution. Generally + this is dist.location, except in the case of develop-installed + packages, where dist.location is the source code location, and we + want to know where the egg-link file is. + + """ + egg_link = egg_link_path(dist) + if egg_link: + return egg_link + return dist.location + + +def get_terminal_size(): + """Returns a tuple (x, y) representing the width(x) and the height(x) + in characters of the terminal window.""" + def ioctl_GWINSZ(fd): + try: + import fcntl + import termios + import struct + cr = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, + '1234')) + except: + return None + if cr == (0, 0): + return None + if cr == (0, 0): + return None + return cr + cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2) + if not cr: + try: + fd = os.open(os.ctermid(), os.O_RDONLY) + cr = ioctl_GWINSZ(fd) + os.close(fd) + except: + pass + if not cr: + cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80)) + return int(cr[1]), int(cr[0]) + + +def current_umask(): + """Get the current umask which involves having to set it temporarily.""" + mask = os.umask(0) + os.umask(mask) + return mask + + +def unzip_file(filename, location, flatten=True): + """ + Unzip the file (with path `filename`) to the destination `location`. All + files are written based on system defaults and umask (i.e. permissions are + not preserved), except that regular file members with any execute + permissions (user, group, or world) have "chmod +x" applied after being + written. Note that for windows, any execute changes using os.chmod are + no-ops per the python docs. + """ + if not os.path.exists(location): + os.makedirs(location) + zipfp = open(filename, 'rb') + try: + zip = zipfile.ZipFile(zipfp) + leading = has_leading_dir(zip.namelist()) and flatten + for info in zip.infolist(): + name = info.filename + data = zip.read(name) + fn = name + if leading: + fn = split_leading_dir(name)[1] + fn = os.path.join(location, fn) + dir = os.path.dirname(fn) + if not os.path.exists(dir): + os.makedirs(dir) + if fn.endswith('/') or fn.endswith('\\'): + # A directory + if not os.path.exists(fn): + os.makedirs(fn) + else: + fp = open(fn, 'wb') + try: + fp.write(data) + finally: + fp.close() + mode = info.external_attr >> 16 + # if mode and regular file and any execute permissions for user/group/world? + if mode and stat.S_ISREG(mode) and mode & 0o111: + # make dest file have execute for user/group/world (chmod +x) + # no-op on windows per python docs + os.chmod(fn, (0o777-current_umask() | 0o111)) + finally: + zipfp.close() + + +def untar_file(filename, location): + """ + Untar the file (with path `filename`) to the destination `location`. + All files are written based on system defaults and umask (i.e. permissions + are not preserved), except that regular file members with any execute + permissions (user, group, or world) have "chmod +x" applied after being + written. Note that for windows, any execute changes using os.chmod are + no-ops per the python docs. + """ + if not os.path.exists(location): + os.makedirs(location) + if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'): + mode = 'r:gz' + elif filename.lower().endswith('.bz2') or filename.lower().endswith('.tbz'): + mode = 'r:bz2' + elif filename.lower().endswith('.tar'): + mode = 'r' + else: + logger.warn('Cannot determine compression type for file %s' % filename) + mode = 'r:*' + tar = tarfile.open(filename, mode) + try: + # note: python<=2.5 doesnt seem to know about pax headers, filter them + leading = has_leading_dir([ + member.name for member in tar.getmembers() + if member.name != 'pax_global_header' + ]) + for member in tar.getmembers(): + fn = member.name + if fn == 'pax_global_header': + continue + if leading: + fn = split_leading_dir(fn)[1] + path = os.path.join(location, fn) + if member.isdir(): + if not os.path.exists(path): + os.makedirs(path) + elif member.issym(): + try: + tar._extract_member(member, path) + except: + e = sys.exc_info()[1] + # Some corrupt tar files seem to produce this + # (specifically bad symlinks) + logger.warn( + 'In the tar file %s the member %s is invalid: %s' + % (filename, member.name, e)) + continue + else: + try: + fp = tar.extractfile(member) + except (KeyError, AttributeError): + e = sys.exc_info()[1] + # Some corrupt tar files seem to produce this + # (specifically bad symlinks) + logger.warn( + 'In the tar file %s the member %s is invalid: %s' + % (filename, member.name, e)) + continue + if not os.path.exists(os.path.dirname(path)): + os.makedirs(os.path.dirname(path)) + destfp = open(path, 'wb') + try: + shutil.copyfileobj(fp, destfp) + finally: + destfp.close() + fp.close() + # member have any execute permissions for user/group/world? + if member.mode & 0o111: + # make dest file have execute for user/group/world + # no-op on windows per python docs + os.chmod(path, (0o777-current_umask() | 0o111)) + finally: + tar.close() + + +def create_download_cache_folder(folder): + logger.indent -= 2 + logger.notify('Creating supposed download cache at %s' % folder) + logger.indent += 2 + os.makedirs(folder) + + +def cache_download(target_file, temp_location, content_type): + logger.notify('Storing download in cache at %s' % display_path(target_file)) + shutil.copyfile(temp_location, target_file) + fp = open(target_file+'.content-type', 'w') + fp.write(content_type) + fp.close() + + +def unpack_file(filename, location, content_type, link): + filename = os.path.realpath(filename) + if (content_type == 'application/zip' + or filename.endswith('.zip') + or filename.endswith('.pybundle') + or filename.endswith('.whl') + or zipfile.is_zipfile(filename)): + unzip_file(filename, location, flatten=not filename.endswith(('.pybundle', '.whl'))) + elif (content_type == 'application/x-gzip' + or tarfile.is_tarfile(filename) + or splitext(filename)[1].lower() in ('.tar', '.tar.gz', '.tar.bz2', '.tgz', '.tbz')): + untar_file(filename, location) + elif (content_type and content_type.startswith('text/html') + and is_svn_page(file_contents(filename))): + # We don't really care about this + from pip.vcs.subversion import Subversion + Subversion('svn+' + link.url).unpack(location) + else: + ## FIXME: handle? + ## FIXME: magic signatures? + logger.fatal('Cannot unpack file %s (downloaded from %s, content-type: %s); cannot detect archive format' + % (filename, location, content_type)) + raise InstallationError('Cannot determine archive format of %s' % location) + + +def call_subprocess(cmd, show_stdout=True, + filter_stdout=None, cwd=None, + raise_on_returncode=True, + command_level=logger.DEBUG, command_desc=None, + extra_environ=None): + if command_desc is None: + cmd_parts = [] + for part in cmd: + if ' ' in part or '\n' in part or '"' in part or "'" in part: + part = '"%s"' % part.replace('"', '\\"') + cmd_parts.append(part) + command_desc = ' '.join(cmd_parts) + if show_stdout: + stdout = None + else: + stdout = subprocess.PIPE + logger.log(command_level, "Running command %s" % command_desc) + env = os.environ.copy() + if extra_environ: + env.update(extra_environ) + try: + proc = subprocess.Popen( + cmd, stderr=subprocess.STDOUT, stdin=None, stdout=stdout, + cwd=cwd, env=env) + except Exception: + e = sys.exc_info()[1] + logger.fatal( + "Error %s while executing command %s" % (e, command_desc)) + raise + all_output = [] + if stdout is not None: + stdout = proc.stdout + while 1: + line = console_to_str(stdout.readline()) + if not line: + break + line = line.rstrip() + all_output.append(line + '\n') + if filter_stdout: + level = filter_stdout(line) + if isinstance(level, tuple): + level, line = level + logger.log(level, line) + if not logger.stdout_level_matches(level): + logger.show_progress() + else: + logger.info(line) + else: + returned_stdout, returned_stderr = proc.communicate() + all_output = [returned_stdout or ''] + proc.wait() + if proc.returncode: + if raise_on_returncode: + if all_output: + logger.notify('Complete output from command %s:' % command_desc) + logger.notify('\n'.join(all_output) + '\n----------------------------------------') + raise InstallationError( + "Command %s failed with error code %s in %s" + % (command_desc, proc.returncode, cwd)) + else: + logger.warn( + "Command %s had error code %s in %s" + % (command_desc, proc.returncode, cwd)) + if stdout is not None: + return ''.join(all_output) + + +def is_prerelease(vers): + """ + Attempt to determine if this is a pre-release using PEP386/PEP426 rules. + + Will return True if it is a pre-release and False if not. Versions are + assumed to be a pre-release if they cannot be parsed. + """ + normalized = version._suggest_normalized_version(vers) + + if normalized is None: + # Cannot normalize, assume it is a pre-release + return True + + parsed = version._normalized_key(normalized) + return any([any([y in set(["a", "b", "c", "rc", "dev"]) for y in x]) for x in parsed]) + + +class BuildDirectory(object): + + def __init__(self, name=None, delete=None): + # If we were not given an explicit directory, and we were not given an + # explicit delete option, then we'll default to deleting. + if name is None and delete is None: + delete = True + + if name is None: + name = tempfile.mkdtemp(prefix="pip-build-") + # If we were not given an explicit directory, and we were not given + # an explicit delete option, then we'll default to deleting. + if delete is None: + delete = True + + self.name = name + self.delete = delete + + def __repr__(self): + return "<{} {!r}>".format(self.__class__.__name__, self.name) + + def __enter__(self): + return self.name + + def __exit__(self, exc, value, tb): + self.cleanup() + + def cleanup(self): + if self.delete: + rmtree(self.name) diff --git a/lib/python3.4/site-packages/pip/vcs/__init__.py b/lib/python3.4/site-packages/pip/vcs/__init__.py new file mode 100644 index 0000000..a56dd20 --- /dev/null +++ b/lib/python3.4/site-packages/pip/vcs/__init__.py @@ -0,0 +1,251 @@ +"""Handles all VCS (version control) support""" + +import os +import shutil + +from pip.backwardcompat import urlparse, urllib +from pip.log import logger +from pip.util import (display_path, backup_dir, find_command, + rmtree, ask_path_exists) + + +__all__ = ['vcs', 'get_src_requirement'] + + +class VcsSupport(object): + _registry = {} + schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn'] + + def __init__(self): + # Register more schemes with urlparse for various version control systems + urlparse.uses_netloc.extend(self.schemes) + # Python >= 2.7.4, 3.3 doesn't have uses_fragment + if getattr(urlparse, 'uses_fragment', None): + urlparse.uses_fragment.extend(self.schemes) + super(VcsSupport, self).__init__() + + def __iter__(self): + return self._registry.__iter__() + + @property + def backends(self): + return list(self._registry.values()) + + @property + def dirnames(self): + return [backend.dirname for backend in self.backends] + + @property + def all_schemes(self): + schemes = [] + for backend in self.backends: + schemes.extend(backend.schemes) + return schemes + + def register(self, cls): + if not hasattr(cls, 'name'): + logger.warn('Cannot register VCS %s' % cls.__name__) + return + if cls.name not in self._registry: + self._registry[cls.name] = cls + + def unregister(self, cls=None, name=None): + if name in self._registry: + del self._registry[name] + elif cls in self._registry.values(): + del self._registry[cls.name] + else: + logger.warn('Cannot unregister because no class or name given') + + def get_backend_name(self, location): + """ + Return the name of the version control backend if found at given + location, e.g. vcs.get_backend_name('/path/to/vcs/checkout') + """ + for vc_type in self._registry.values(): + path = os.path.join(location, vc_type.dirname) + if os.path.exists(path): + return vc_type.name + return None + + def get_backend(self, name): + name = name.lower() + if name in self._registry: + return self._registry[name] + + def get_backend_from_location(self, location): + vc_type = self.get_backend_name(location) + if vc_type: + return self.get_backend(vc_type) + return None + + +vcs = VcsSupport() + + +class VersionControl(object): + name = '' + dirname = '' + + def __init__(self, url=None, *args, **kwargs): + self.url = url + self._cmd = None + super(VersionControl, self).__init__(*args, **kwargs) + + def _filter(self, line): + return (logger.INFO, line) + + def _is_local_repository(self, repo): + """ + posix absolute paths start with os.path.sep, + win32 ones ones start with drive (like c:\\folder) + """ + drive, tail = os.path.splitdrive(repo) + return repo.startswith(os.path.sep) or drive + + @property + def cmd(self): + if self._cmd is not None: + return self._cmd + command = find_command(self.name) + logger.info('Found command %r at %r' % (self.name, command)) + self._cmd = command + return command + + def get_url_rev(self): + """ + Returns the correct repository URL and revision by parsing the given + repository URL + """ + error_message = ( + "Sorry, '%s' is a malformed VCS url. " + "The format is <vcs>+<protocol>://<url>, " + "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp") + assert '+' in self.url, error_message % self.url + url = self.url.split('+', 1)[1] + scheme, netloc, path, query, frag = urlparse.urlsplit(url) + rev = None + if '@' in path: + path, rev = path.rsplit('@', 1) + url = urlparse.urlunsplit((scheme, netloc, path, query, '')) + return url, rev + + def get_info(self, location): + """ + Returns (url, revision), where both are strings + """ + assert not location.rstrip('/').endswith(self.dirname), 'Bad directory: %s' % location + return self.get_url(location), self.get_revision(location) + + def normalize_url(self, url): + """ + Normalize a URL for comparison by unquoting it and removing any trailing slash. + """ + return urllib.unquote(url).rstrip('/') + + def compare_urls(self, url1, url2): + """ + Compare two repo URLs for identity, ignoring incidental differences. + """ + return (self.normalize_url(url1) == self.normalize_url(url2)) + + def parse_vcs_bundle_file(self, content): + """ + Takes the contents of the bundled text file that explains how to revert + the stripped off version control data of the given package and returns + the URL and revision of it. + """ + raise NotImplementedError + + def obtain(self, dest): + """ + Called when installing or updating an editable package, takes the + source path of the checkout. + """ + raise NotImplementedError + + def switch(self, dest, url, rev_options): + """ + Switch the repo at ``dest`` to point to ``URL``. + """ + raise NotImplemented + + def update(self, dest, rev_options): + """ + Update an already-existing repo to the given ``rev_options``. + """ + raise NotImplementedError + + def check_destination(self, dest, url, rev_options, rev_display): + """ + Prepare a location to receive a checkout/clone. + + Return True if the location is ready for (and requires) a + checkout/clone, False otherwise. + """ + checkout = True + prompt = False + if os.path.exists(dest): + checkout = False + if os.path.exists(os.path.join(dest, self.dirname)): + existing_url = self.get_url(dest) + if self.compare_urls(existing_url, url): + logger.info('%s in %s exists, and has correct URL (%s)' % + (self.repo_name.title(), display_path(dest), + url)) + logger.notify('Updating %s %s%s' % + (display_path(dest), self.repo_name, + rev_display)) + self.update(dest, rev_options) + else: + logger.warn('%s %s in %s exists with URL %s' % + (self.name, self.repo_name, + display_path(dest), existing_url)) + prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', + ('s', 'i', 'w', 'b')) + else: + logger.warn('Directory %s already exists, ' + 'and is not a %s %s.' % + (dest, self.name, self.repo_name)) + prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b')) + if prompt: + logger.warn('The plan is to install the %s repository %s' % + (self.name, url)) + response = ask_path_exists('What to do? %s' % prompt[0], + prompt[1]) + + if response == 's': + logger.notify('Switching %s %s to %s%s' % + (self.repo_name, display_path(dest), url, + rev_display)) + self.switch(dest, url, rev_options) + elif response == 'i': + # do nothing + pass + elif response == 'w': + logger.warn('Deleting %s' % display_path(dest)) + rmtree(dest) + checkout = True + elif response == 'b': + dest_dir = backup_dir(dest) + logger.warn('Backing up %s to %s' + % (display_path(dest), dest_dir)) + shutil.move(dest, dest_dir) + checkout = True + return checkout + + def unpack(self, location): + if os.path.exists(location): + rmtree(location) + self.obtain(location) + + def get_src_requirement(self, dist, location, find_tags=False): + raise NotImplementedError + + +def get_src_requirement(dist, location, find_tags): + version_control = vcs.get_backend_from_location(location) + if version_control: + return version_control().get_src_requirement(dist, location, find_tags) + logger.warn('cannot determine version of editable source in %s (is not SVN checkout, Git clone, Mercurial clone or Bazaar branch)' % location) + return dist.as_requirement() diff --git a/lib/python3.4/site-packages/pip/vcs/bazaar.py b/lib/python3.4/site-packages/pip/vcs/bazaar.py new file mode 100644 index 0000000..c62c9c8 --- /dev/null +++ b/lib/python3.4/site-packages/pip/vcs/bazaar.py @@ -0,0 +1,131 @@ +import os +import tempfile +import re +from pip.backwardcompat import urlparse +from pip.log import logger +from pip.util import rmtree, display_path, call_subprocess +from pip.vcs import vcs, VersionControl +from pip.download import path_to_url + + +class Bazaar(VersionControl): + name = 'bzr' + dirname = '.bzr' + repo_name = 'branch' + bundle_file = 'bzr-branch.txt' + schemes = ('bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp', 'bzr+lp') + guide = ('# This was a Bazaar branch; to make it a branch again run:\n' + 'bzr branch -r %(rev)s %(url)s .\n') + + def __init__(self, url=None, *args, **kwargs): + super(Bazaar, self).__init__(url, *args, **kwargs) + # Python >= 2.7.4, 3.3 doesn't have uses_fragment or non_hierarchical + # Register lp but do not expose as a scheme to support bzr+lp. + if getattr(urlparse, 'uses_fragment', None): + urlparse.uses_fragment.extend(['lp']) + urlparse.non_hierarchical.extend(['lp']) + + def parse_vcs_bundle_file(self, content): + url = rev = None + for line in content.splitlines(): + if not line.strip() or line.strip().startswith('#'): + continue + match = re.search(r'^bzr\s*branch\s*-r\s*(\d*)', line) + if match: + rev = match.group(1).strip() + url = line[match.end():].strip().split(None, 1)[0] + if url and rev: + return url, rev + return None, None + + def export(self, location): + """Export the Bazaar repository at the url to the destination location""" + temp_dir = tempfile.mkdtemp('-export', 'pip-') + self.unpack(temp_dir) + if os.path.exists(location): + # Remove the location to make sure Bazaar can export it correctly + rmtree(location) + try: + call_subprocess([self.cmd, 'export', location], cwd=temp_dir, + filter_stdout=self._filter, show_stdout=False) + finally: + rmtree(temp_dir) + + def switch(self, dest, url, rev_options): + call_subprocess([self.cmd, 'switch', url], cwd=dest) + + def update(self, dest, rev_options): + call_subprocess( + [self.cmd, 'pull', '-q'] + rev_options, cwd=dest) + + def obtain(self, dest): + url, rev = self.get_url_rev() + if rev: + rev_options = ['-r', rev] + rev_display = ' (to revision %s)' % rev + else: + rev_options = [] + rev_display = '' + if self.check_destination(dest, url, rev_options, rev_display): + logger.notify('Checking out %s%s to %s' + % (url, rev_display, display_path(dest))) + call_subprocess( + [self.cmd, 'branch', '-q'] + rev_options + [url, dest]) + + def get_url_rev(self): + # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it + url, rev = super(Bazaar, self).get_url_rev() + if url.startswith('ssh://'): + url = 'bzr+' + url + return url, rev + + def get_url(self, location): + urls = call_subprocess( + [self.cmd, 'info'], show_stdout=False, cwd=location) + for line in urls.splitlines(): + line = line.strip() + for x in ('checkout of branch: ', + 'parent branch: '): + if line.startswith(x): + repo = line.split(x)[1] + if self._is_local_repository(repo): + return path_to_url(repo) + return repo + return None + + def get_revision(self, location): + revision = call_subprocess( + [self.cmd, 'revno'], show_stdout=False, cwd=location) + return revision.splitlines()[-1] + + def get_tag_revs(self, location): + tags = call_subprocess( + [self.cmd, 'tags'], show_stdout=False, cwd=location) + tag_revs = [] + for line in tags.splitlines(): + tags_match = re.search(r'([.\w-]+)\s*(.*)$', line) + if tags_match: + tag = tags_match.group(1) + rev = tags_match.group(2) + tag_revs.append((rev.strip(), tag.strip())) + return dict(tag_revs) + + def get_src_requirement(self, dist, location, find_tags): + repo = self.get_url(location) + if not repo.lower().startswith('bzr:'): + repo = 'bzr+' + repo + egg_project_name = dist.egg_name().split('-', 1)[0] + if not repo: + return None + current_rev = self.get_revision(location) + tag_revs = self.get_tag_revs(location) + + if current_rev in tag_revs: + # It's a tag + full_egg_name = '%s-%s' % (egg_project_name, tag_revs[current_rev]) + else: + full_egg_name = '%s-dev_r%s' % (dist.egg_name(), current_rev) + return '%s@%s#egg=%s' % (repo, current_rev, full_egg_name) + + +vcs.register(Bazaar) diff --git a/lib/python3.4/site-packages/pip/vcs/git.py b/lib/python3.4/site-packages/pip/vcs/git.py new file mode 100644 index 0000000..16acebd --- /dev/null +++ b/lib/python3.4/site-packages/pip/vcs/git.py @@ -0,0 +1,194 @@ +import tempfile +import re +import os.path +from pip.util import call_subprocess +from pip.util import display_path, rmtree +from pip.vcs import vcs, VersionControl +from pip.log import logger +from pip.backwardcompat import url2pathname, urlparse +urlsplit = urlparse.urlsplit +urlunsplit = urlparse.urlunsplit + + +class Git(VersionControl): + name = 'git' + dirname = '.git' + repo_name = 'clone' + schemes = ('git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file') + bundle_file = 'git-clone.txt' + guide = ('# This was a Git repo; to make it a repo again run:\n' + 'git init\ngit remote add origin %(url)s -f\ngit checkout %(rev)s\n') + + def __init__(self, url=None, *args, **kwargs): + + # Works around an apparent Git bug + # (see http://article.gmane.org/gmane.comp.version-control.git/146500) + if url: + scheme, netloc, path, query, fragment = urlsplit(url) + if scheme.endswith('file'): + initial_slashes = path[:-len(path.lstrip('/'))] + newpath = initial_slashes + url2pathname(path).replace('\\', '/').lstrip('/') + url = urlunsplit((scheme, netloc, newpath, query, fragment)) + after_plus = scheme.find('+') + 1 + url = scheme[:after_plus] + urlunsplit((scheme[after_plus:], netloc, newpath, query, fragment)) + + super(Git, self).__init__(url, *args, **kwargs) + + def parse_vcs_bundle_file(self, content): + url = rev = None + for line in content.splitlines(): + if not line.strip() or line.strip().startswith('#'): + continue + url_match = re.search(r'git\s*remote\s*add\s*origin(.*)\s*-f', line) + if url_match: + url = url_match.group(1).strip() + rev_match = re.search(r'^git\s*checkout\s*-q\s*(.*)\s*', line) + if rev_match: + rev = rev_match.group(1).strip() + if url and rev: + return url, rev + return None, None + + def export(self, location): + """Export the Git repository at the url to the destination location""" + temp_dir = tempfile.mkdtemp('-export', 'pip-') + self.unpack(temp_dir) + try: + if not location.endswith('/'): + location = location + '/' + call_subprocess( + [self.cmd, 'checkout-index', '-a', '-f', '--prefix', location], + filter_stdout=self._filter, show_stdout=False, cwd=temp_dir) + finally: + rmtree(temp_dir) + + def check_rev_options(self, rev, dest, rev_options): + """Check the revision options before checkout to compensate that tags + and branches may need origin/ as a prefix. + Returns the SHA1 of the branch or tag if found. + """ + revisions = self.get_refs(dest) + + origin_rev = 'origin/%s' % rev + if origin_rev in revisions: + # remote branch + return [revisions[origin_rev]] + elif rev in revisions: + # a local tag or branch name + return [revisions[rev]] + else: + logger.warn("Could not find a tag or branch '%s', assuming commit." % rev) + return rev_options + + def switch(self, dest, url, rev_options): + call_subprocess( + [self.cmd, 'config', 'remote.origin.url', url], cwd=dest) + call_subprocess( + [self.cmd, 'checkout', '-q'] + rev_options, cwd=dest) + + self.update_submodules(dest) + + def update(self, dest, rev_options): + # First fetch changes from the default remote + call_subprocess([self.cmd, 'fetch', '-q'], cwd=dest) + # Then reset to wanted revision (maby even origin/master) + if rev_options: + rev_options = self.check_rev_options(rev_options[0], dest, rev_options) + call_subprocess([self.cmd, 'reset', '--hard', '-q'] + rev_options, cwd=dest) + #: update submodules + self.update_submodules(dest) + + def obtain(self, dest): + url, rev = self.get_url_rev() + if rev: + rev_options = [rev] + rev_display = ' (to %s)' % rev + else: + rev_options = ['origin/master'] + rev_display = '' + if self.check_destination(dest, url, rev_options, rev_display): + logger.notify('Cloning %s%s to %s' % (url, rev_display, display_path(dest))) + call_subprocess([self.cmd, 'clone', '-q', url, dest]) + #: repo may contain submodules + self.update_submodules(dest) + if rev: + rev_options = self.check_rev_options(rev, dest, rev_options) + # Only do a checkout if rev_options differs from HEAD + if not self.get_revision(dest).startswith(rev_options[0]): + call_subprocess([self.cmd, 'checkout', '-q'] + rev_options, cwd=dest) + + def get_url(self, location): + url = call_subprocess( + [self.cmd, 'config', 'remote.origin.url'], + show_stdout=False, cwd=location) + return url.strip() + + def get_revision(self, location): + current_rev = call_subprocess( + [self.cmd, 'rev-parse', 'HEAD'], show_stdout=False, cwd=location) + return current_rev.strip() + + def get_refs(self, location): + """Return map of named refs (branches or tags) to commit hashes.""" + output = call_subprocess([self.cmd, 'show-ref'], + show_stdout=False, cwd=location) + rv = {} + for line in output.strip().splitlines(): + commit, ref = line.split(' ', 1) + ref = ref.strip() + ref_name = None + if ref.startswith('refs/remotes/'): + ref_name = ref[len('refs/remotes/'):] + elif ref.startswith('refs/heads/'): + ref_name = ref[len('refs/heads/'):] + elif ref.startswith('refs/tags/'): + ref_name = ref[len('refs/tags/'):] + if ref_name is not None: + rv[ref_name] = commit.strip() + return rv + + def get_src_requirement(self, dist, location, find_tags): + repo = self.get_url(location) + if not repo.lower().startswith('git:'): + repo = 'git+' + repo + egg_project_name = dist.egg_name().split('-', 1)[0] + if not repo: + return None + current_rev = self.get_revision(location) + refs = self.get_refs(location) + # refs maps names to commit hashes; we need the inverse + # if multiple names map to a single commit, this arbitrarily picks one + names_by_commit = dict((commit, ref) for ref, commit in refs.items()) + + if current_rev in names_by_commit: + # It's a tag + full_egg_name = '%s-%s' % (egg_project_name, names_by_commit[current_rev]) + else: + full_egg_name = '%s-dev' % egg_project_name + + return '%s@%s#egg=%s' % (repo, current_rev, full_egg_name) + + def get_url_rev(self): + """ + Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'. + That's required because although they use SSH they sometimes doesn't + work with a ssh:// scheme (e.g. Github). But we need a scheme for + parsing. Hence we remove it again afterwards and return it as a stub. + """ + if not '://' in self.url: + assert not 'file:' in self.url + self.url = self.url.replace('git+', 'git+ssh://') + url, rev = super(Git, self).get_url_rev() + url = url.replace('ssh://', '') + else: + url, rev = super(Git, self).get_url_rev() + + return url, rev + + def update_submodules(self, location): + if not os.path.exists(os.path.join(location, '.gitmodules')): + return + call_subprocess([self.cmd, 'submodule', 'update', '--init', '--recursive', '-q'], + cwd=location) + +vcs.register(Git) diff --git a/lib/python3.4/site-packages/pip/vcs/mercurial.py b/lib/python3.4/site-packages/pip/vcs/mercurial.py new file mode 100644 index 0000000..2dbe3fc --- /dev/null +++ b/lib/python3.4/site-packages/pip/vcs/mercurial.py @@ -0,0 +1,151 @@ +import os +import tempfile +import re +import sys +from pip.util import call_subprocess +from pip.util import display_path, rmtree +from pip.log import logger +from pip.vcs import vcs, VersionControl +from pip.download import path_to_url +from pip.backwardcompat import ConfigParser + + +class Mercurial(VersionControl): + name = 'hg' + dirname = '.hg' + repo_name = 'clone' + schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http') + bundle_file = 'hg-clone.txt' + guide = ('# This was a Mercurial repo; to make it a repo again run:\n' + 'hg init\nhg pull %(url)s\nhg update -r %(rev)s\n') + + def parse_vcs_bundle_file(self, content): + url = rev = None + for line in content.splitlines(): + if not line.strip() or line.strip().startswith('#'): + continue + url_match = re.search(r'hg\s*pull\s*(.*)\s*', line) + if url_match: + url = url_match.group(1).strip() + rev_match = re.search(r'^hg\s*update\s*-r\s*(.*)\s*', line) + if rev_match: + rev = rev_match.group(1).strip() + if url and rev: + return url, rev + return None, None + + def export(self, location): + """Export the Hg repository at the url to the destination location""" + temp_dir = tempfile.mkdtemp('-export', 'pip-') + self.unpack(temp_dir) + try: + call_subprocess( + [self.cmd, 'archive', location], + filter_stdout=self._filter, show_stdout=False, cwd=temp_dir) + finally: + rmtree(temp_dir) + + def switch(self, dest, url, rev_options): + repo_config = os.path.join(dest, self.dirname, 'hgrc') + config = ConfigParser.SafeConfigParser() + try: + config.read(repo_config) + config.set('paths', 'default', url) + config_file = open(repo_config, 'w') + config.write(config_file) + config_file.close() + except (OSError, ConfigParser.NoSectionError): + e = sys.exc_info()[1] + logger.warn( + 'Could not switch Mercurial repository to %s: %s' + % (url, e)) + else: + call_subprocess([self.cmd, 'update', '-q'] + rev_options, cwd=dest) + + def update(self, dest, rev_options): + call_subprocess([self.cmd, 'pull', '-q'], cwd=dest) + call_subprocess( + [self.cmd, 'update', '-q'] + rev_options, cwd=dest) + + def obtain(self, dest): + url, rev = self.get_url_rev() + if rev: + rev_options = [rev] + rev_display = ' (to revision %s)' % rev + else: + rev_options = [] + rev_display = '' + if self.check_destination(dest, url, rev_options, rev_display): + logger.notify('Cloning hg %s%s to %s' + % (url, rev_display, display_path(dest))) + call_subprocess([self.cmd, 'clone', '--noupdate', '-q', url, dest]) + call_subprocess([self.cmd, 'update', '-q'] + rev_options, cwd=dest) + + def get_url(self, location): + url = call_subprocess( + [self.cmd, 'showconfig', 'paths.default'], + show_stdout=False, cwd=location).strip() + if self._is_local_repository(url): + url = path_to_url(url) + return url.strip() + + def get_tag_revs(self, location): + tags = call_subprocess( + [self.cmd, 'tags'], show_stdout=False, cwd=location) + tag_revs = [] + for line in tags.splitlines(): + tags_match = re.search(r'([\w\d\.-]+)\s*([\d]+):.*$', line) + if tags_match: + tag = tags_match.group(1) + rev = tags_match.group(2) + if "tip" != tag: + tag_revs.append((rev.strip(), tag.strip())) + return dict(tag_revs) + + def get_branch_revs(self, location): + branches = call_subprocess( + [self.cmd, 'branches'], show_stdout=False, cwd=location) + branch_revs = [] + for line in branches.splitlines(): + branches_match = re.search(r'([\w\d\.-]+)\s*([\d]+):.*$', line) + if branches_match: + branch = branches_match.group(1) + rev = branches_match.group(2) + if "default" != branch: + branch_revs.append((rev.strip(), branch.strip())) + return dict(branch_revs) + + def get_revision(self, location): + current_revision = call_subprocess( + [self.cmd, 'parents', '--template={rev}'], + show_stdout=False, cwd=location).strip() + return current_revision + + def get_revision_hash(self, location): + current_rev_hash = call_subprocess( + [self.cmd, 'parents', '--template={node}'], + show_stdout=False, cwd=location).strip() + return current_rev_hash + + def get_src_requirement(self, dist, location, find_tags): + repo = self.get_url(location) + if not repo.lower().startswith('hg:'): + repo = 'hg+' + repo + egg_project_name = dist.egg_name().split('-', 1)[0] + if not repo: + return None + current_rev = self.get_revision(location) + current_rev_hash = self.get_revision_hash(location) + tag_revs = self.get_tag_revs(location) + branch_revs = self.get_branch_revs(location) + if current_rev in tag_revs: + # It's a tag + full_egg_name = '%s-%s' % (egg_project_name, tag_revs[current_rev]) + elif current_rev in branch_revs: + # It's the tip of a branch + full_egg_name = '%s-%s' % (egg_project_name, branch_revs[current_rev]) + else: + full_egg_name = '%s-dev' % egg_project_name + return '%s@%s#egg=%s' % (repo, current_rev_hash, full_egg_name) + +vcs.register(Mercurial) diff --git a/lib/python3.4/site-packages/pip/vcs/subversion.py b/lib/python3.4/site-packages/pip/vcs/subversion.py new file mode 100644 index 0000000..88163ff --- /dev/null +++ b/lib/python3.4/site-packages/pip/vcs/subversion.py @@ -0,0 +1,273 @@ +import os +import re +from pip.backwardcompat import urlparse +from pip.index import Link +from pip.util import rmtree, display_path, call_subprocess +from pip.log import logger +from pip.vcs import vcs, VersionControl + +_svn_xml_url_re = re.compile('url="([^"]+)"') +_svn_rev_re = re.compile('committed-rev="(\d+)"') +_svn_url_re = re.compile(r'URL: (.+)') +_svn_revision_re = re.compile(r'Revision: (.+)') +_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"') +_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>') + + +class Subversion(VersionControl): + name = 'svn' + dirname = '.svn' + repo_name = 'checkout' + schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn') + bundle_file = 'svn-checkout.txt' + guide = ('# This was an svn checkout; to make it a checkout again run:\n' + 'svn checkout --force -r %(rev)s %(url)s .\n') + + def get_info(self, location): + """Returns (url, revision), where both are strings""" + assert not location.rstrip('/').endswith(self.dirname), 'Bad directory: %s' % location + output = call_subprocess( + [self.cmd, 'info', location], show_stdout=False, extra_environ={'LANG': 'C'}) + match = _svn_url_re.search(output) + if not match: + logger.warn('Cannot determine URL of svn checkout %s' % display_path(location)) + logger.info('Output that cannot be parsed: \n%s' % output) + return None, None + url = match.group(1).strip() + match = _svn_revision_re.search(output) + if not match: + logger.warn('Cannot determine revision of svn checkout %s' % display_path(location)) + logger.info('Output that cannot be parsed: \n%s' % output) + return url, None + return url, match.group(1) + + def parse_vcs_bundle_file(self, content): + for line in content.splitlines(): + if not line.strip() or line.strip().startswith('#'): + continue + match = re.search(r'^-r\s*([^ ])?', line) + if not match: + return None, None + rev = match.group(1) + rest = line[match.end():].strip().split(None, 1)[0] + return rest, rev + return None, None + + def export(self, location): + """Export the svn repository at the url to the destination location""" + url, rev = self.get_url_rev() + rev_options = get_rev_options(url, rev) + logger.notify('Exporting svn repository %s to %s' % (url, location)) + logger.indent += 2 + try: + if os.path.exists(location): + # Subversion doesn't like to check out over an existing directory + # --force fixes this, but was only added in svn 1.5 + rmtree(location) + call_subprocess( + [self.cmd, 'export'] + rev_options + [url, location], + filter_stdout=self._filter, show_stdout=False) + finally: + logger.indent -= 2 + + def switch(self, dest, url, rev_options): + call_subprocess( + [self.cmd, 'switch'] + rev_options + [url, dest]) + + def update(self, dest, rev_options): + call_subprocess( + [self.cmd, 'update'] + rev_options + [dest]) + + def obtain(self, dest): + url, rev = self.get_url_rev() + rev_options = get_rev_options(url, rev) + if rev: + rev_display = ' (to revision %s)' % rev + else: + rev_display = '' + if self.check_destination(dest, url, rev_options, rev_display): + logger.notify('Checking out %s%s to %s' + % (url, rev_display, display_path(dest))) + call_subprocess( + [self.cmd, 'checkout', '-q'] + rev_options + [url, dest]) + + def get_location(self, dist, dependency_links): + for url in dependency_links: + egg_fragment = Link(url).egg_fragment + if not egg_fragment: + continue + if '-' in egg_fragment: + ## FIXME: will this work when a package has - in the name? + key = '-'.join(egg_fragment.split('-')[:-1]).lower() + else: + key = egg_fragment + if key == dist.key: + return url.split('#', 1)[0] + return None + + def get_revision(self, location): + """ + Return the maximum revision for all files under a given location + """ + # Note: taken from setuptools.command.egg_info + revision = 0 + + for base, dirs, files in os.walk(location): + if self.dirname not in dirs: + dirs[:] = [] + continue # no sense walking uncontrolled subdirs + dirs.remove(self.dirname) + entries_fn = os.path.join(base, self.dirname, 'entries') + if not os.path.exists(entries_fn): + ## FIXME: should we warn? + continue + + dirurl, localrev = self._get_svn_url_rev(base) + + if base == location: + base_url = dirurl + '/' # save the root url + elif not dirurl or not dirurl.startswith(base_url): + dirs[:] = [] + continue # not part of the same svn tree, skip it + revision = max(revision, localrev) + return revision + + def get_url_rev(self): + # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it + url, rev = super(Subversion, self).get_url_rev() + if url.startswith('ssh://'): + url = 'svn+' + url + return url, rev + + def get_url(self, location): + # In cases where the source is in a subdirectory, not alongside setup.py + # we have to look up in the location until we find a real setup.py + orig_location = location + while not os.path.exists(os.path.join(location, 'setup.py')): + last_location = location + location = os.path.dirname(location) + if location == last_location: + # We've traversed up to the root of the filesystem without finding setup.py + logger.warn("Could not find setup.py for directory %s (tried all parent directories)" + % orig_location) + return None + + return self._get_svn_url_rev(location)[0] + + def _get_svn_url_rev(self, location): + from pip.exceptions import InstallationError + + f = open(os.path.join(location, self.dirname, 'entries')) + data = f.read() + f.close() + if data.startswith('8') or data.startswith('9') or data.startswith('10'): + data = list(map(str.splitlines, data.split('\n\x0c\n'))) + del data[0][0] # get rid of the '8' + url = data[0][3] + revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0] + elif data.startswith('<?xml'): + match = _svn_xml_url_re.search(data) + if not match: + raise ValueError('Badly formatted data: %r' % data) + url = match.group(1) # get repository URL + revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0] + else: + try: + # subversion >= 1.7 + xml = call_subprocess([self.cmd, 'info', '--xml', location], show_stdout=False) + url = _svn_info_xml_url_re.search(xml).group(1) + revs = [int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)] + except InstallationError: + url, revs = None, [] + + if revs: + rev = max(revs) + else: + rev = 0 + + return url, rev + + def get_tag_revs(self, svn_tag_url): + stdout = call_subprocess( + [self.cmd, 'ls', '-v', svn_tag_url], show_stdout=False) + results = [] + for line in stdout.splitlines(): + parts = line.split() + rev = int(parts[0]) + tag = parts[-1].strip('/') + results.append((tag, rev)) + return results + + def find_tag_match(self, rev, tag_revs): + best_match_rev = None + best_tag = None + for tag, tag_rev in tag_revs: + if (tag_rev > rev and + (best_match_rev is None or best_match_rev > tag_rev)): + # FIXME: Is best_match > tag_rev really possible? + # or is it a sign something is wacky? + best_match_rev = tag_rev + best_tag = tag + return best_tag + + def get_src_requirement(self, dist, location, find_tags=False): + repo = self.get_url(location) + if repo is None: + return None + parts = repo.split('/') + ## FIXME: why not project name? + egg_project_name = dist.egg_name().split('-', 1)[0] + rev = self.get_revision(location) + if parts[-2] in ('tags', 'tag'): + # It's a tag, perfect! + full_egg_name = '%s-%s' % (egg_project_name, parts[-1]) + elif parts[-2] in ('branches', 'branch'): + # It's a branch :( + full_egg_name = '%s-%s-r%s' % (dist.egg_name(), parts[-1], rev) + elif parts[-1] == 'trunk': + # Trunk :-/ + full_egg_name = '%s-dev_r%s' % (dist.egg_name(), rev) + if find_tags: + tag_url = '/'.join(parts[:-1]) + '/tags' + tag_revs = self.get_tag_revs(tag_url) + match = self.find_tag_match(rev, tag_revs) + if match: + logger.notify('trunk checkout %s seems to be equivalent to tag %s' % match) + repo = '%s/%s' % (tag_url, match) + full_egg_name = '%s-%s' % (egg_project_name, match) + else: + # Don't know what it is + logger.warn('svn URL does not fit normal structure (tags/branches/trunk): %s' % repo) + full_egg_name = '%s-dev_r%s' % (egg_project_name, rev) + return 'svn+%s@%s#egg=%s' % (repo, rev, full_egg_name) + + +def get_rev_options(url, rev): + if rev: + rev_options = ['-r', rev] + else: + rev_options = [] + + r = urlparse.urlsplit(url) + if hasattr(r, 'username'): + # >= Python-2.5 + username, password = r.username, r.password + else: + netloc = r[1] + if '@' in netloc: + auth = netloc.split('@')[0] + if ':' in auth: + username, password = auth.split(':', 1) + else: + username, password = auth, None + else: + username, password = None, None + + if username: + rev_options += ['--username', username] + if password: + rev_options += ['--password', password] + return rev_options + + +vcs.register(Subversion) diff --git a/lib/python3.4/site-packages/pip/wheel.py b/lib/python3.4/site-packages/pip/wheel.py new file mode 100644 index 0000000..1c937ff --- /dev/null +++ b/lib/python3.4/site-packages/pip/wheel.py @@ -0,0 +1,559 @@ +""" +Support for installing and building the "wheel" binary package format. +""" +from __future__ import with_statement + +import compileall +import csv +import functools +import hashlib +import os +import re +import shutil +import sys + +from base64 import urlsafe_b64encode +from email.parser import Parser + +from pip.backwardcompat import ConfigParser, StringIO +from pip.exceptions import InvalidWheelFilename, UnsupportedWheel +from pip.locations import distutils_scheme +from pip.log import logger +from pip import pep425tags +from pip.util import call_subprocess, normalize_path, make_path_relative +import pkg_resources +from distlib.scripts import ScriptMaker + + +wheel_ext = '.whl' + +VERSION_COMPATIBLE = (1, 0) + + +def rehash(path, algo='sha256', blocksize=1<<20): + """Return (hash, length) for path using hashlib.new(algo)""" + h = hashlib.new(algo) + length = 0 + with open(path, 'rb') as f: + block = f.read(blocksize) + while block: + length += len(block) + h.update(block) + block = f.read(blocksize) + digest = 'sha256='+urlsafe_b64encode(h.digest()).decode('latin1').rstrip('=') + return (digest, length) + +try: + unicode + def binary(s): + if isinstance(s, unicode): + return s.encode('ascii') + return s +except NameError: + def binary(s): + if isinstance(s, str): + return s.encode('ascii') + +def open_for_csv(name, mode): + if sys.version_info[0] < 3: + nl = {} + bin = 'b' + else: + nl = { 'newline': '' } + bin = '' + return open(name, mode + bin, **nl) + +def fix_script(path): + """Replace #!python with #!/path/to/python + Return True if file was changed.""" + # XXX RECORD hashes will need to be updated + if os.path.isfile(path): + script = open(path, 'rb') + try: + firstline = script.readline() + if not firstline.startswith(binary('#!python')): + return False + exename = sys.executable.encode(sys.getfilesystemencoding()) + firstline = binary('#!') + exename + binary(os.linesep) + rest = script.read() + finally: + script.close() + script = open(path, 'wb') + try: + script.write(firstline) + script.write(rest) + finally: + script.close() + return True + +dist_info_re = re.compile(r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>\d.+?))?) + \.dist-info$""", re.VERBOSE) + +def root_is_purelib(name, wheeldir): + """ + Return True if the extracted wheel in wheeldir should go into purelib. + """ + name_folded = name.replace("-", "_") + for item in os.listdir(wheeldir): + match = dist_info_re.match(item) + if match and match.group('name') == name_folded: + with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel: + for line in wheel: + line = line.lower().rstrip() + if line == "root-is-purelib: true": + return True + return False + + +def get_entrypoints(filename): + if not os.path.exists(filename): + return {}, {} + + # This is done because you can pass a string to entry_points wrappers which + # means that they may or may not be valid INI files. The attempt here is to + # strip leading and trailing whitespace in order to make them valid INI + # files. + with open(filename) as fp: + data = StringIO() + for line in fp: + data.write(line.strip()) + data.write("\n") + data.seek(0) + + cp = ConfigParser.RawConfigParser() + cp.readfp(data) + + console = {} + gui = {} + if cp.has_section('console_scripts'): + console = dict(cp.items('console_scripts')) + if cp.has_section('gui_scripts'): + gui = dict(cp.items('gui_scripts')) + return console, gui + + +def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None, + pycompile=True, scheme=None): + """Install a wheel""" + + if not scheme: + scheme = distutils_scheme(name, user=user, home=home, root=root) + + if root_is_purelib(name, wheeldir): + lib_dir = scheme['purelib'] + else: + lib_dir = scheme['platlib'] + + info_dir = [] + data_dirs = [] + source = wheeldir.rstrip(os.path.sep) + os.path.sep + + # Record details of the files moved + # installed = files copied from the wheel to the destination + # changed = files changed while installing (scripts #! line typically) + # generated = files newly generated during the install (script wrappers) + installed = {} + changed = set() + generated = [] + + # Compile all of the pyc files that we're going to be installing + if pycompile: + compileall.compile_dir(source, force=True, quiet=True) + + def normpath(src, p): + return make_path_relative(src, p).replace(os.path.sep, '/') + + def record_installed(srcfile, destfile, modified=False): + """Map archive RECORD paths to installation RECORD paths.""" + oldpath = normpath(srcfile, wheeldir) + newpath = normpath(destfile, lib_dir) + installed[oldpath] = newpath + if modified: + changed.add(destfile) + + def clobber(source, dest, is_base, fixer=None, filter=None): + if not os.path.exists(dest): # common for the 'include' path + os.makedirs(dest) + + for dir, subdirs, files in os.walk(source): + basedir = dir[len(source):].lstrip(os.path.sep) + destdir = os.path.join(dest, basedir) + if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'): + continue + for s in subdirs: + destsubdir = os.path.join(dest, basedir, s) + if is_base and basedir == '' and destsubdir.endswith('.data'): + data_dirs.append(s) + continue + elif (is_base + and s.endswith('.dist-info') + # is self.req.project_name case preserving? + and s.lower().startswith(req.project_name.replace('-', '_').lower())): + assert not info_dir, 'Multiple .dist-info directories' + info_dir.append(destsubdir) + for f in files: + # Skip unwanted files + if filter and filter(f): + continue + srcfile = os.path.join(dir, f) + destfile = os.path.join(dest, basedir, f) + # directory creation is lazy and after the file filtering above + # to ensure we don't install empty dirs; empty dirs can't be + # uninstalled. + if not os.path.exists(destdir): + os.makedirs(destdir) + # use copy2 (not move) to be extra sure we're not moving + # directories over; copy2 fails for directories. this would + # fail tests (not during released/user execution) + shutil.copy2(srcfile, destfile) + changed = False + if fixer: + changed = fixer(destfile) + record_installed(srcfile, destfile, changed) + + clobber(source, lib_dir, True) + + assert info_dir, "%s .dist-info directory not found" % req + + # Get the defined entry points + ep_file = os.path.join(info_dir[0], 'entry_points.txt') + console, gui = get_entrypoints(ep_file) + + def is_entrypoint_wrapper(name): + # EP, EP.exe and EP-script.py are scripts generated for + # entry point EP by setuptools + if name.lower().endswith('.exe'): + matchname = name[:-4] + elif name.lower().endswith('-script.py'): + matchname = name[:-10] + elif name.lower().endswith(".pya"): + matchname = name[:-4] + else: + matchname = name + # Ignore setuptools-generated scripts + return (matchname in console or matchname in gui) + + for datadir in data_dirs: + fixer = None + filter = None + for subdir in os.listdir(os.path.join(wheeldir, datadir)): + fixer = None + if subdir == 'scripts': + fixer = fix_script + filter = is_entrypoint_wrapper + source = os.path.join(wheeldir, datadir, subdir) + dest = scheme[subdir] + clobber(source, dest, False, fixer=fixer, filter=filter) + + maker = ScriptMaker(None, scheme['scripts']) + + # Ensure we don't generate any variants for scripts because this is almost + # never what somebody wants. + # See https://bitbucket.org/pypa/distlib/issue/35/ + maker.variants = set(('', )) + + # This is required because otherwise distlib creates scripts that are not + # executable. + # See https://bitbucket.org/pypa/distlib/issue/32/ + maker.set_mode = True + + # Simplify the script and fix the fact that the default script swallows + # every single stack trace. + # See https://bitbucket.org/pypa/distlib/issue/34/ + # See https://bitbucket.org/pypa/distlib/issue/33/ + def _get_script_text(entry): + return maker.script_template % { + "module": entry.prefix, + "import_name": entry.suffix.split(".")[0], + "func": entry.suffix, + } + + maker._get_script_text = _get_script_text + maker.script_template = """# -*- coding: utf-8 -*- +import re +import sys + +from %(module)s import %(import_name)s + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(%(func)s()) +""" + + # Special case pip and setuptools to generate versioned wrappers + # + # The issue is that some projects (specifically, pip and setuptools) use + # code in setup.py to create "versioned" entry points - pip2.7 on Python + # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into + # the wheel metadata at build time, and so if the wheel is installed with + # a *different* version of Python the entry points will be wrong. The + # correct fix for this is to enhance the metadata to be able to describe + # such versioned entry points, but that won't happen till Metadata 2.0 is + # available. + # In the meantime, projects using versioned entry points will either have + # incorrect versioned entry points, or they will not be able to distribute + # "universal" wheels (i.e., they will need a wheel per Python version). + # + # Because setuptools and pip are bundled with _ensurepip and virtualenv, + # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we + # override the versioned entry points in the wheel and generate the + # correct ones. This code is purely a short-term measure until Metadat 2.0 + # is available. + # + # To add the level of hack in this section of code, in order to support + # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment + # variable which will control which version scripts get installed. + # + # ENSUREPIP_OPTIONS=altinstall + # - Only pipX.Y and easy_install-X.Y will be generated and installed + # ENSUREPIP_OPTIONS=install + # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note + # that this option is technically if ENSUREPIP_OPTIONS is set and is + # not altinstall + # DEFAULT + # - The default behavior is to install pip, pipX, pipX.Y, easy_install + # and easy_install-X.Y. + pip_script = console.pop('pip', None) + if pip_script: + if "ENSUREPIP_OPTIONS" not in os.environ: + spec = 'pip = ' + pip_script + generated.extend(maker.make(spec)) + + if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": + spec = 'pip%s = %s' % (sys.version[:1], pip_script) + generated.extend(maker.make(spec)) + + spec = 'pip%s = %s' % (sys.version[:3], pip_script) + generated.extend(maker.make(spec)) + # Delete any other versioned pip entry points + pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)] + for k in pip_ep: + del console[k] + easy_install_script = console.pop('easy_install', None) + if easy_install_script: + if "ENSUREPIP_OPTIONS" not in os.environ: + spec = 'easy_install = ' + easy_install_script + generated.extend(maker.make(spec)) + + spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script) + generated.extend(maker.make(spec)) + # Delete any other versioned easy_install entry points + easy_install_ep = [k for k in console + if re.match(r'easy_install(-\d\.\d)?$', k)] + for k in easy_install_ep: + del console[k] + + # Generate the console and GUI entry points specified in the wheel + if len(console) > 0: + generated.extend(maker.make_multiple(['%s = %s' % kv for kv in console.items()])) + if len(gui) > 0: + generated.extend(maker.make_multiple(['%s = %s' % kv for kv in gui.items()], {'gui': True})) + + record = os.path.join(info_dir[0], 'RECORD') + temp_record = os.path.join(info_dir[0], 'RECORD.pip') + with open_for_csv(record, 'r') as record_in: + with open_for_csv(temp_record, 'w+') as record_out: + reader = csv.reader(record_in) + writer = csv.writer(record_out) + for row in reader: + row[0] = installed.pop(row[0], row[0]) + if row[0] in changed: + row[1], row[2] = rehash(row[0]) + writer.writerow(row) + for f in generated: + h, l = rehash(f) + writer.writerow((f, h, l)) + for f in installed: + writer.writerow((installed[f], '', '')) + shutil.move(temp_record, record) + +def _unique(fn): + @functools.wraps(fn) + def unique(*args, **kw): + seen = set() + for item in fn(*args, **kw): + if item not in seen: + seen.add(item) + yield item + return unique + +# TODO: this goes somewhere besides the wheel module +@_unique +def uninstallation_paths(dist): + """ + Yield all the uninstallation paths for dist based on RECORD-without-.pyc + + Yield paths to all the files in RECORD. For each .py file in RECORD, add + the .pyc in the same directory. + + UninstallPathSet.add() takes care of the __pycache__ .pyc. + """ + from pip.req import FakeFile # circular import + r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD'))) + for row in r: + path = os.path.join(dist.location, row[0]) + yield path + if path.endswith('.py'): + dn, fn = os.path.split(path) + base = fn[:-3] + path = os.path.join(dn, base+'.pyc') + yield path + + +def wheel_version(source_dir): + """ + Return the Wheel-Version of an extracted wheel, if possible. + + Otherwise, return False if we couldn't parse / extract it. + """ + try: + dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0] + + wheel_data = dist.get_metadata('WHEEL') + wheel_data = Parser().parsestr(wheel_data) + + version = wheel_data['Wheel-Version'].strip() + version = tuple(map(int, version.split('.'))) + return version + except: + return False + + +def check_compatibility(version, name): + """ + Raises errors or warns if called with an incompatible Wheel-Version. + + Pip should refuse to install a Wheel-Version that's a major series + ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when + installing a version only minor version ahead (e.g 1.2 > 1.1). + + version: a 2-tuple representing a Wheel-Version (Major, Minor) + name: name of wheel or package to raise exception about + + :raises UnsupportedWheel: when an incompatible Wheel-Version is given + """ + if not version: + raise UnsupportedWheel( + "%s is in an unsupported or invalid wheel" % name + ) + if version[0] > VERSION_COMPATIBLE[0]: + raise UnsupportedWheel( + "%s's Wheel-Version (%s) is not compatible with this version " + "of pip" % (name, '.'.join(map(str, version))) + ) + elif version > VERSION_COMPATIBLE: + logger.warn('Installing from a newer Wheel-Version (%s)' + % '.'.join(map(str, version))) + + +class Wheel(object): + """A wheel file""" + + # TODO: maybe move the install code into this class + + wheel_file_re = re.compile( + r"""^(?P<namever>(?P<name>.+?)-(?P<ver>\d.*?)) + ((-(?P<build>\d.*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?) + \.whl|\.dist-info)$""", + re.VERBOSE) + + def __init__(self, filename): + """ + :raises InvalidWheelFilename: when the filename is invalid for a wheel + """ + wheel_info = self.wheel_file_re.match(filename) + if not wheel_info: + raise InvalidWheelFilename("%s is not a valid wheel filename." % filename) + self.filename = filename + self.name = wheel_info.group('name').replace('_', '-') + # we'll assume "_" means "-" due to wheel naming scheme + # (https://github.com/pypa/pip/issues/1150) + self.version = wheel_info.group('ver').replace('_', '-') + self.pyversions = wheel_info.group('pyver').split('.') + self.abis = wheel_info.group('abi').split('.') + self.plats = wheel_info.group('plat').split('.') + + # All the tag combinations from this file + self.file_tags = set((x, y, z) for x in self.pyversions for y + in self.abis for z in self.plats) + + def support_index_min(self, tags=None): + """ + Return the lowest index that one of the wheel's file_tag combinations + achieves in the supported_tags list e.g. if there are 8 supported tags, + and one of the file tags is first in the list, then return 0. Returns + None is the wheel is not supported. + """ + if tags is None: # for mock + tags = pep425tags.supported_tags + indexes = [tags.index(c) for c in self.file_tags if c in tags] + return min(indexes) if indexes else None + + def supported(self, tags=None): + """Is this wheel supported on this system?""" + if tags is None: # for mock + tags = pep425tags.supported_tags + return bool(set(tags).intersection(self.file_tags)) + + +class WheelBuilder(object): + """Build wheels from a RequirementSet.""" + + def __init__(self, requirement_set, finder, wheel_dir, build_options=[], global_options=[]): + self.requirement_set = requirement_set + self.finder = finder + self.wheel_dir = normalize_path(wheel_dir) + self.build_options = build_options + self.global_options = global_options + + def _build_one(self, req): + """Build one wheel.""" + + base_args = [ + sys.executable, '-c', + "import setuptools;__file__=%r;"\ + "exec(compile(open(__file__).read().replace('\\r\\n', '\\n'), __file__, 'exec'))" % req.setup_py] + \ + list(self.global_options) + + logger.notify('Running setup.py bdist_wheel for %s' % req.name) + logger.notify('Destination directory: %s' % self.wheel_dir) + wheel_args = base_args + ['bdist_wheel', '-d', self.wheel_dir] + self.build_options + try: + call_subprocess(wheel_args, cwd=req.source_dir, show_stdout=False) + return True + except: + logger.error('Failed building wheel for %s' % req.name) + return False + + def build(self): + """Build wheels.""" + + #unpack and constructs req set + self.requirement_set.prepare_files(self.finder) + + reqset = self.requirement_set.requirements.values() + + buildset = [req for req in reqset if not req.is_wheel] + + if not buildset: + return + + #build the wheels + logger.notify( + 'Building wheels for collected packages: %s' % + ','.join([req.name for req in buildset]) + ) + logger.indent += 2 + build_success, build_failure = [], [] + for req in buildset: + if self._build_one(req): + build_success.append(req) + else: + build_failure.append(req) + logger.indent -= 2 + + #notify sucess/failure + if build_success: + logger.notify('Successfully built %s' % ' '.join([req.name for req in build_success])) + if build_failure: + logger.notify('Failed to build %s' % ' '.join([req.name for req in build_failure])) diff --git a/lib/python3.4/site-packages/pkg_resources.py b/lib/python3.4/site-packages/pkg_resources.py new file mode 100644 index 0000000..11debf6 --- /dev/null +++ b/lib/python3.4/site-packages/pkg_resources.py @@ -0,0 +1,2891 @@ +""" +Package resource API +-------------------- + +A resource is a logical file contained within a package, or a logical +subdirectory thereof. The package resource API expects resource names +to have their path parts separated with ``/``, *not* whatever the local +path separator is. Do not use os.path operations to manipulate resource +names being passed into the API. + +The package resource API is designed to work with normal filesystem packages, +.egg files, and unpacked .egg files. It can also work in a limited way with +.zip files and with custom PEP 302 loaders that support the ``get_data()`` +method. +""" + +import sys +import os +import time +import re +import imp +import zipfile +import zipimport +import warnings +import stat +import functools +import pkgutil +import token +import symbol +import operator +import platform +import collections +import plistlib +import email.parser +import tempfile +from pkgutil import get_importer + +try: + from urlparse import urlparse, urlunparse +except ImportError: + from urllib.parse import urlparse, urlunparse + +try: + frozenset +except NameError: + from sets import ImmutableSet as frozenset +try: + basestring + next = lambda o: o.next() + from cStringIO import StringIO as BytesIO +except NameError: + basestring = str + from io import BytesIO + def execfile(fn, globs=None, locs=None): + if globs is None: + globs = globals() + if locs is None: + locs = globs + exec(compile(open(fn).read(), fn, 'exec'), globs, locs) + +# capture these to bypass sandboxing +from os import utime +try: + from os import mkdir, rename, unlink + WRITE_SUPPORT = True +except ImportError: + # no write support, probably under GAE + WRITE_SUPPORT = False + +from os import open as os_open +from os.path import isdir, split + +# Avoid try/except due to potential problems with delayed import mechanisms. +if sys.version_info >= (3, 3) and sys.implementation.name == "cpython": + import importlib._bootstrap as importlib_bootstrap +else: + importlib_bootstrap = None + +try: + import parser +except ImportError: + pass + +def _bypass_ensure_directory(name, mode=0o777): + # Sandbox-bypassing version of ensure_directory() + if not WRITE_SUPPORT: + raise IOError('"os.mkdir" not supported on this platform.') + dirname, filename = split(name) + if dirname and filename and not isdir(dirname): + _bypass_ensure_directory(dirname) + mkdir(dirname, mode) + + +_state_vars = {} + +def _declare_state(vartype, **kw): + globals().update(kw) + _state_vars.update(dict.fromkeys(kw, vartype)) + +def __getstate__(): + state = {} + g = globals() + for k, v in _state_vars.items(): + state[k] = g['_sget_'+v](g[k]) + return state + +def __setstate__(state): + g = globals() + for k, v in state.items(): + g['_sset_'+_state_vars[k]](k, g[k], v) + return state + +def _sget_dict(val): + return val.copy() + +def _sset_dict(key, ob, state): + ob.clear() + ob.update(state) + +def _sget_object(val): + return val.__getstate__() + +def _sset_object(key, ob, state): + ob.__setstate__(state) + +_sget_none = _sset_none = lambda *args: None + + +def get_supported_platform(): + """Return this platform's maximum compatible version. + + distutils.util.get_platform() normally reports the minimum version + of Mac OS X that would be required to *use* extensions produced by + distutils. But what we want when checking compatibility is to know the + version of Mac OS X that we are *running*. To allow usage of packages that + explicitly require a newer version of Mac OS X, we must also know the + current version of the OS. + + If this condition occurs for any other platform with a version in its + platform strings, this function should be extended accordingly. + """ + plat = get_build_platform() + m = macosVersionString.match(plat) + if m is not None and sys.platform == "darwin": + try: + plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3)) + except ValueError: + # not Mac OS X + pass + return plat + +__all__ = [ + # Basic resource access and distribution/entry point discovery + 'require', 'run_script', 'get_provider', 'get_distribution', + 'load_entry_point', 'get_entry_map', 'get_entry_info', + 'iter_entry_points', + 'resource_string', 'resource_stream', 'resource_filename', + 'resource_listdir', 'resource_exists', 'resource_isdir', + + # Environmental control + 'declare_namespace', 'working_set', 'add_activation_listener', + 'find_distributions', 'set_extraction_path', 'cleanup_resources', + 'get_default_cache', + + # Primary implementation classes + 'Environment', 'WorkingSet', 'ResourceManager', + 'Distribution', 'Requirement', 'EntryPoint', + + # Exceptions + 'ResolutionError', 'VersionConflict', 'DistributionNotFound', + 'UnknownExtra', 'ExtractionError', + + # Parsing functions and string utilities + 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', + 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections', + 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker', + + # filesystem utilities + 'ensure_directory', 'normalize_path', + + # Distribution "precedence" constants + 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST', + + # "Provider" interfaces, implementations, and registration/lookup APIs + 'IMetadataProvider', 'IResourceProvider', 'FileMetadata', + 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider', + 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider', + 'register_finder', 'register_namespace_handler', 'register_loader_type', + 'fixup_namespace_packages', 'get_importer', + + # Deprecated/backward compatibility only + 'run_main', 'AvailableDistributions', +] + +class ResolutionError(Exception): + """Abstract base for dependency resolution errors""" + def __repr__(self): + return self.__class__.__name__+repr(self.args) + +class VersionConflict(ResolutionError): + """An already-installed version conflicts with the requested version""" + +class DistributionNotFound(ResolutionError): + """A requested distribution was not found""" + +class UnknownExtra(ResolutionError): + """Distribution doesn't have an "extra feature" of the given name""" +_provider_factories = {} + +PY_MAJOR = sys.version[:3] +EGG_DIST = 3 +BINARY_DIST = 2 +SOURCE_DIST = 1 +CHECKOUT_DIST = 0 +DEVELOP_DIST = -1 + +def register_loader_type(loader_type, provider_factory): + """Register `provider_factory` to make providers for `loader_type` + + `loader_type` is the type or class of a PEP 302 ``module.__loader__``, + and `provider_factory` is a function that, passed a *module* object, + returns an ``IResourceProvider`` for that module. + """ + _provider_factories[loader_type] = provider_factory + +def get_provider(moduleOrReq): + """Return an IResourceProvider for the named module or requirement""" + if isinstance(moduleOrReq, Requirement): + return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] + try: + module = sys.modules[moduleOrReq] + except KeyError: + __import__(moduleOrReq) + module = sys.modules[moduleOrReq] + loader = getattr(module, '__loader__', None) + return _find_adapter(_provider_factories, loader)(module) + +def _macosx_vers(_cache=[]): + if not _cache: + version = platform.mac_ver()[0] + # fallback for MacPorts + if version == '': + plist = '/System/Library/CoreServices/SystemVersion.plist' + if os.path.exists(plist): + if hasattr(plistlib, 'readPlist'): + plist_content = plistlib.readPlist(plist) + if 'ProductVersion' in plist_content: + version = plist_content['ProductVersion'] + + _cache.append(version.split('.')) + return _cache[0] + +def _macosx_arch(machine): + return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine) + +def get_build_platform(): + """Return this platform's string for platform-specific distributions + + XXX Currently this is the same as ``distutils.util.get_platform()``, but it + needs some hacks for Linux and Mac OS X. + """ + try: + # Python 2.7 or >=3.2 + from sysconfig import get_platform + except ImportError: + from distutils.util import get_platform + + plat = get_platform() + if sys.platform == "darwin" and not plat.startswith('macosx-'): + try: + version = _macosx_vers() + machine = os.uname()[4].replace(" ", "_") + return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]), + _macosx_arch(machine)) + except ValueError: + # if someone is running a non-Mac darwin system, this will fall + # through to the default implementation + pass + return plat + +macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)") +darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)") +# XXX backward compat +get_platform = get_build_platform + + +def compatible_platforms(provided, required): + """Can code for the `provided` platform run on the `required` platform? + + Returns true if either platform is ``None``, or the platforms are equal. + + XXX Needs compatibility checks for Linux and other unixy OSes. + """ + if provided is None or required is None or provided==required: + # easy case + return True + + # Mac OS X special cases + reqMac = macosVersionString.match(required) + if reqMac: + provMac = macosVersionString.match(provided) + + # is this a Mac package? + if not provMac: + # this is backwards compatibility for packages built before + # setuptools 0.6. All packages built after this point will + # use the new macosx designation. + provDarwin = darwinVersionString.match(provided) + if provDarwin: + dversion = int(provDarwin.group(1)) + macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) + if dversion == 7 and macosversion >= "10.3" or \ + dversion == 8 and macosversion >= "10.4": + return True + # egg isn't macosx or legacy darwin + return False + + # are they the same major version and machine type? + if provMac.group(1) != reqMac.group(1) or \ + provMac.group(3) != reqMac.group(3): + return False + + # is the required OS major update >= the provided one? + if int(provMac.group(2)) > int(reqMac.group(2)): + return False + + return True + + # XXX Linux and other platforms' special cases should go here + return False + + +def run_script(dist_spec, script_name): + """Locate distribution `dist_spec` and run its `script_name` script""" + ns = sys._getframe(1).f_globals + name = ns['__name__'] + ns.clear() + ns['__name__'] = name + require(dist_spec)[0].run_script(script_name, ns) + +# backward compatibility +run_main = run_script + +def get_distribution(dist): + """Return a current distribution object for a Requirement or string""" + if isinstance(dist, basestring): + dist = Requirement.parse(dist) + if isinstance(dist, Requirement): + dist = get_provider(dist) + if not isinstance(dist, Distribution): + raise TypeError("Expected string, Requirement, or Distribution", dist) + return dist + +def load_entry_point(dist, group, name): + """Return `name` entry point of `group` for `dist` or raise ImportError""" + return get_distribution(dist).load_entry_point(group, name) + +def get_entry_map(dist, group=None): + """Return the entry point map for `group`, or the full entry map""" + return get_distribution(dist).get_entry_map(group) + +def get_entry_info(dist, group, name): + """Return the EntryPoint object for `group`+`name`, or ``None``""" + return get_distribution(dist).get_entry_info(group, name) + + +class IMetadataProvider: + + def has_metadata(name): + """Does the package's distribution contain the named metadata?""" + + def get_metadata(name): + """The named metadata resource as a string""" + + def get_metadata_lines(name): + """Yield named metadata resource as list of non-blank non-comment lines + + Leading and trailing whitespace is stripped from each line, and lines + with ``#`` as the first non-blank character are omitted.""" + + def metadata_isdir(name): + """Is the named metadata a directory? (like ``os.path.isdir()``)""" + + def metadata_listdir(name): + """List of metadata names in the directory (like ``os.listdir()``)""" + + def run_script(script_name, namespace): + """Execute the named script in the supplied namespace dictionary""" + + +class IResourceProvider(IMetadataProvider): + """An object that provides access to package resources""" + + def get_resource_filename(manager, resource_name): + """Return a true filesystem path for `resource_name` + + `manager` must be an ``IResourceManager``""" + + def get_resource_stream(manager, resource_name): + """Return a readable file-like object for `resource_name` + + `manager` must be an ``IResourceManager``""" + + def get_resource_string(manager, resource_name): + """Return a string containing the contents of `resource_name` + + `manager` must be an ``IResourceManager``""" + + def has_resource(resource_name): + """Does the package contain the named resource?""" + + def resource_isdir(resource_name): + """Is the named resource a directory? (like ``os.path.isdir()``)""" + + def resource_listdir(resource_name): + """List of resource names in the directory (like ``os.listdir()``)""" + + +class WorkingSet(object): + """A collection of active distributions on sys.path (or a similar list)""" + + def __init__(self, entries=None): + """Create working set from list of path entries (default=sys.path)""" + self.entries = [] + self.entry_keys = {} + self.by_key = {} + self.callbacks = [] + + if entries is None: + entries = sys.path + + for entry in entries: + self.add_entry(entry) + + @classmethod + def _build_master(cls): + """ + Prepare the master working set. + """ + ws = cls() + try: + from __main__ import __requires__ + except ImportError: + # The main program does not list any requirements + return ws + + # ensure the requirements are met + try: + ws.require(__requires__) + except VersionConflict: + return cls._build_from_requirements(__requires__) + + return ws + + @classmethod + def _build_from_requirements(cls, req_spec): + """ + Build a working set from a requirement spec. Rewrites sys.path. + """ + # try it without defaults already on sys.path + # by starting with an empty path + ws = cls([]) + reqs = parse_requirements(req_spec) + dists = ws.resolve(reqs, Environment()) + for dist in dists: + ws.add(dist) + + # add any missing entries from sys.path + for entry in sys.path: + if entry not in ws.entries: + ws.add_entry(entry) + + # then copy back to sys.path + sys.path[:] = ws.entries + return ws + + def add_entry(self, entry): + """Add a path item to ``.entries``, finding any distributions on it + + ``find_distributions(entry, True)`` is used to find distributions + corresponding to the path entry, and they are added. `entry` is + always appended to ``.entries``, even if it is already present. + (This is because ``sys.path`` can contain the same value more than + once, and the ``.entries`` of the ``sys.path`` WorkingSet should always + equal ``sys.path``.) + """ + self.entry_keys.setdefault(entry, []) + self.entries.append(entry) + for dist in find_distributions(entry, True): + self.add(dist, entry, False) + + def __contains__(self, dist): + """True if `dist` is the active distribution for its project""" + return self.by_key.get(dist.key) == dist + + def find(self, req): + """Find a distribution matching requirement `req` + + If there is an active distribution for the requested project, this + returns it as long as it meets the version requirement specified by + `req`. But, if there is an active distribution for the project and it + does *not* meet the `req` requirement, ``VersionConflict`` is raised. + If there is no active distribution for the requested project, ``None`` + is returned. + """ + dist = self.by_key.get(req.key) + if dist is not None and dist not in req: + # XXX add more info + raise VersionConflict(dist, req) + else: + return dist + + def iter_entry_points(self, group, name=None): + """Yield entry point objects from `group` matching `name` + + If `name` is None, yields all entry points in `group` from all + distributions in the working set, otherwise only ones matching + both `group` and `name` are yielded (in distribution order). + """ + for dist in self: + entries = dist.get_entry_map(group) + if name is None: + for ep in entries.values(): + yield ep + elif name in entries: + yield entries[name] + + def run_script(self, requires, script_name): + """Locate distribution for `requires` and run `script_name` script""" + ns = sys._getframe(1).f_globals + name = ns['__name__'] + ns.clear() + ns['__name__'] = name + self.require(requires)[0].run_script(script_name, ns) + + def __iter__(self): + """Yield distributions for non-duplicate projects in the working set + + The yield order is the order in which the items' path entries were + added to the working set. + """ + seen = {} + for item in self.entries: + if item not in self.entry_keys: + # workaround a cache issue + continue + + for key in self.entry_keys[item]: + if key not in seen: + seen[key]=1 + yield self.by_key[key] + + def add(self, dist, entry=None, insert=True, replace=False): + """Add `dist` to working set, associated with `entry` + + If `entry` is unspecified, it defaults to the ``.location`` of `dist`. + On exit from this routine, `entry` is added to the end of the working + set's ``.entries`` (if it wasn't already present). + + `dist` is only added to the working set if it's for a project that + doesn't already have a distribution in the set, unless `replace=True`. + If it's added, any callbacks registered with the ``subscribe()`` method + will be called. + """ + if insert: + dist.insert_on(self.entries, entry) + + if entry is None: + entry = dist.location + keys = self.entry_keys.setdefault(entry,[]) + keys2 = self.entry_keys.setdefault(dist.location,[]) + if not replace and dist.key in self.by_key: + # ignore hidden distros + return + + self.by_key[dist.key] = dist + if dist.key not in keys: + keys.append(dist.key) + if dist.key not in keys2: + keys2.append(dist.key) + self._added_new(dist) + + def resolve(self, requirements, env=None, installer=None, + replace_conflicting=False): + """List all distributions needed to (recursively) meet `requirements` + + `requirements` must be a sequence of ``Requirement`` objects. `env`, + if supplied, should be an ``Environment`` instance. If + not supplied, it defaults to all distributions available within any + entry or distribution in the working set. `installer`, if supplied, + will be invoked with each requirement that cannot be met by an + already-installed distribution; it should return a ``Distribution`` or + ``None``. + + Unless `replace_conflicting=True`, raises a VersionConflict exception if + any requirements are found on the path that have the correct name but + the wrong version. Otherwise, if an `installer` is supplied it will be + invoked to obtain the correct version of the requirement and activate + it. + """ + + # set up the stack + requirements = list(requirements)[::-1] + # set of processed requirements + processed = {} + # key -> dist + best = {} + to_activate = [] + + while requirements: + # process dependencies breadth-first + req = requirements.pop(0) + if req in processed: + # Ignore cyclic or redundant dependencies + continue + dist = best.get(req.key) + if dist is None: + # Find the best distribution and add it to the map + dist = self.by_key.get(req.key) + if dist is None or (dist not in req and replace_conflicting): + ws = self + if env is None: + if dist is None: + env = Environment(self.entries) + else: + # Use an empty environment and workingset to avoid + # any further conflicts with the conflicting + # distribution + env = Environment([]) + ws = WorkingSet([]) + dist = best[req.key] = env.best_match(req, ws, installer) + if dist is None: + #msg = ("The '%s' distribution was not found on this " + # "system, and is required by this application.") + #raise DistributionNotFound(msg % req) + + # unfortunately, zc.buildout uses a str(err) + # to get the name of the distribution here.. + raise DistributionNotFound(req) + to_activate.append(dist) + if dist not in req: + # Oops, the "best" so far conflicts with a dependency + # XXX put more info here + raise VersionConflict(dist, req) + requirements.extend(dist.requires(req.extras)[::-1]) + processed[req] = True + + # return list of distros to activate + return to_activate + + def find_plugins(self, plugin_env, full_env=None, installer=None, + fallback=True): + """Find all activatable distributions in `plugin_env` + + Example usage:: + + distributions, errors = working_set.find_plugins( + Environment(plugin_dirlist) + ) + # add plugins+libs to sys.path + map(working_set.add, distributions) + # display errors + print('Could not load', errors) + + The `plugin_env` should be an ``Environment`` instance that contains + only distributions that are in the project's "plugin directory" or + directories. The `full_env`, if supplied, should be an ``Environment`` + contains all currently-available distributions. If `full_env` is not + supplied, one is created automatically from the ``WorkingSet`` this + method is called on, which will typically mean that every directory on + ``sys.path`` will be scanned for distributions. + + `installer` is a standard installer callback as used by the + ``resolve()`` method. The `fallback` flag indicates whether we should + attempt to resolve older versions of a plugin if the newest version + cannot be resolved. + + This method returns a 2-tuple: (`distributions`, `error_info`), where + `distributions` is a list of the distributions found in `plugin_env` + that were loadable, along with any other distributions that are needed + to resolve their dependencies. `error_info` is a dictionary mapping + unloadable plugin distributions to an exception instance describing the + error that occurred. Usually this will be a ``DistributionNotFound`` or + ``VersionConflict`` instance. + """ + + plugin_projects = list(plugin_env) + # scan project names in alphabetic order + plugin_projects.sort() + + error_info = {} + distributions = {} + + if full_env is None: + env = Environment(self.entries) + env += plugin_env + else: + env = full_env + plugin_env + + shadow_set = self.__class__([]) + # put all our entries in shadow_set + list(map(shadow_set.add, self)) + + for project_name in plugin_projects: + + for dist in plugin_env[project_name]: + + req = [dist.as_requirement()] + + try: + resolvees = shadow_set.resolve(req, env, installer) + + except ResolutionError: + v = sys.exc_info()[1] + # save error info + error_info[dist] = v + if fallback: + # try the next older version of project + continue + else: + # give up on this project, keep going + break + + else: + list(map(shadow_set.add, resolvees)) + distributions.update(dict.fromkeys(resolvees)) + + # success, no need to try any more versions of this project + break + + distributions = list(distributions) + distributions.sort() + + return distributions, error_info + + def require(self, *requirements): + """Ensure that distributions matching `requirements` are activated + + `requirements` must be a string or a (possibly-nested) sequence + thereof, specifying the distributions and versions required. The + return value is a sequence of the distributions that needed to be + activated to fulfill the requirements; all relevant distributions are + included, even if they were already activated in this working set. + """ + needed = self.resolve(parse_requirements(requirements)) + + for dist in needed: + self.add(dist) + + return needed + + def subscribe(self, callback): + """Invoke `callback` for all distributions (including existing ones)""" + if callback in self.callbacks: + return + self.callbacks.append(callback) + for dist in self: + callback(dist) + + def _added_new(self, dist): + for callback in self.callbacks: + callback(dist) + + def __getstate__(self): + return ( + self.entries[:], self.entry_keys.copy(), self.by_key.copy(), + self.callbacks[:] + ) + + def __setstate__(self, e_k_b_c): + entries, keys, by_key, callbacks = e_k_b_c + self.entries = entries[:] + self.entry_keys = keys.copy() + self.by_key = by_key.copy() + self.callbacks = callbacks[:] + + +class Environment(object): + """Searchable snapshot of distributions on a search path""" + + def __init__(self, search_path=None, platform=get_supported_platform(), + python=PY_MAJOR): + """Snapshot distributions available on a search path + + Any distributions found on `search_path` are added to the environment. + `search_path` should be a sequence of ``sys.path`` items. If not + supplied, ``sys.path`` is used. + + `platform` is an optional string specifying the name of the platform + that platform-specific distributions must be compatible with. If + unspecified, it defaults to the current platform. `python` is an + optional string naming the desired version of Python (e.g. ``'3.3'``); + it defaults to the current version. + + You may explicitly set `platform` (and/or `python`) to ``None`` if you + wish to map *all* distributions, not just those compatible with the + running platform or Python version. + """ + self._distmap = {} + self.platform = platform + self.python = python + self.scan(search_path) + + def can_add(self, dist): + """Is distribution `dist` acceptable for this environment? + + The distribution must match the platform and python version + requirements specified when this environment was created, or False + is returned. + """ + return (self.python is None or dist.py_version is None + or dist.py_version==self.python) \ + and compatible_platforms(dist.platform, self.platform) + + def remove(self, dist): + """Remove `dist` from the environment""" + self._distmap[dist.key].remove(dist) + + def scan(self, search_path=None): + """Scan `search_path` for distributions usable in this environment + + Any distributions found are added to the environment. + `search_path` should be a sequence of ``sys.path`` items. If not + supplied, ``sys.path`` is used. Only distributions conforming to + the platform/python version defined at initialization are added. + """ + if search_path is None: + search_path = sys.path + + for item in search_path: + for dist in find_distributions(item): + self.add(dist) + + def __getitem__(self, project_name): + """Return a newest-to-oldest list of distributions for `project_name` + + Uses case-insensitive `project_name` comparison, assuming all the + project's distributions use their project's name converted to all + lowercase as their key. + + """ + distribution_key = project_name.lower() + return self._distmap.get(distribution_key, []) + + def add(self, dist): + """Add `dist` if we ``can_add()`` it and it has not already been added + """ + if self.can_add(dist) and dist.has_version(): + dists = self._distmap.setdefault(dist.key, []) + if dist not in dists: + dists.append(dist) + dists.sort(key=operator.attrgetter('hashcmp'), reverse=True) + + def best_match(self, req, working_set, installer=None): + """Find distribution best matching `req` and usable on `working_set` + + This calls the ``find(req)`` method of the `working_set` to see if a + suitable distribution is already active. (This may raise + ``VersionConflict`` if an unsuitable version of the project is already + active in the specified `working_set`.) If a suitable distribution + isn't active, this method returns the newest distribution in the + environment that meets the ``Requirement`` in `req`. If no suitable + distribution is found, and `installer` is supplied, then the result of + calling the environment's ``obtain(req, installer)`` method will be + returned. + """ + dist = working_set.find(req) + if dist is not None: + return dist + for dist in self[req.key]: + if dist in req: + return dist + # try to download/install + return self.obtain(req, installer) + + def obtain(self, requirement, installer=None): + """Obtain a distribution matching `requirement` (e.g. via download) + + Obtain a distro that matches requirement (e.g. via download). In the + base ``Environment`` class, this routine just returns + ``installer(requirement)``, unless `installer` is None, in which case + None is returned instead. This method is a hook that allows subclasses + to attempt other ways of obtaining a distribution before falling back + to the `installer` argument.""" + if installer is not None: + return installer(requirement) + + def __iter__(self): + """Yield the unique project names of the available distributions""" + for key in self._distmap.keys(): + if self[key]: + yield key + + def __iadd__(self, other): + """In-place addition of a distribution or environment""" + if isinstance(other, Distribution): + self.add(other) + elif isinstance(other, Environment): + for project in other: + for dist in other[project]: + self.add(dist) + else: + raise TypeError("Can't add %r to environment" % (other,)) + return self + + def __add__(self, other): + """Add an environment or distribution to an environment""" + new = self.__class__([], platform=None, python=None) + for env in self, other: + new += env + return new + + +# XXX backward compatibility +AvailableDistributions = Environment + + +class ExtractionError(RuntimeError): + """An error occurred extracting a resource + + The following attributes are available from instances of this exception: + + manager + The resource manager that raised this exception + + cache_path + The base directory for resource extraction + + original_error + The exception instance that caused extraction to fail + """ + + +class ResourceManager: + """Manage resource extraction and packages""" + extraction_path = None + + def __init__(self): + self.cached_files = {} + + def resource_exists(self, package_or_requirement, resource_name): + """Does the named resource exist?""" + return get_provider(package_or_requirement).has_resource(resource_name) + + def resource_isdir(self, package_or_requirement, resource_name): + """Is the named resource an existing directory?""" + return get_provider(package_or_requirement).resource_isdir( + resource_name + ) + + def resource_filename(self, package_or_requirement, resource_name): + """Return a true filesystem path for specified resource""" + return get_provider(package_or_requirement).get_resource_filename( + self, resource_name + ) + + def resource_stream(self, package_or_requirement, resource_name): + """Return a readable file-like object for specified resource""" + return get_provider(package_or_requirement).get_resource_stream( + self, resource_name + ) + + def resource_string(self, package_or_requirement, resource_name): + """Return specified resource as a string""" + return get_provider(package_or_requirement).get_resource_string( + self, resource_name + ) + + def resource_listdir(self, package_or_requirement, resource_name): + """List the contents of the named resource directory""" + return get_provider(package_or_requirement).resource_listdir( + resource_name + ) + + def extraction_error(self): + """Give an error message for problems extracting file(s)""" + + old_exc = sys.exc_info()[1] + cache_path = self.extraction_path or get_default_cache() + + err = ExtractionError("""Can't extract file(s) to egg cache + +The following error occurred while trying to extract file(s) to the Python egg +cache: + + %s + +The Python egg cache directory is currently set to: + + %s + +Perhaps your account does not have write access to this directory? You can +change the cache directory by setting the PYTHON_EGG_CACHE environment +variable to point to an accessible directory. +""" % (old_exc, cache_path) + ) + err.manager = self + err.cache_path = cache_path + err.original_error = old_exc + raise err + + def get_cache_path(self, archive_name, names=()): + """Return absolute location in cache for `archive_name` and `names` + + The parent directory of the resulting path will be created if it does + not already exist. `archive_name` should be the base filename of the + enclosing egg (which may not be the name of the enclosing zipfile!), + including its ".egg" extension. `names`, if provided, should be a + sequence of path name parts "under" the egg's extraction location. + + This method should only be called by resource providers that need to + obtain an extraction location, and only for names they intend to + extract, as it tracks the generated names for possible cleanup later. + """ + extract_path = self.extraction_path or get_default_cache() + target_path = os.path.join(extract_path, archive_name+'-tmp', *names) + try: + _bypass_ensure_directory(target_path) + except: + self.extraction_error() + + self._warn_unsafe_extraction_path(extract_path) + + self.cached_files[target_path] = 1 + return target_path + + @staticmethod + def _warn_unsafe_extraction_path(path): + """ + If the default extraction path is overridden and set to an insecure + location, such as /tmp, it opens up an opportunity for an attacker to + replace an extracted file with an unauthorized payload. Warn the user + if a known insecure location is used. + + See Distribute #375 for more details. + """ + if os.name == 'nt' and not path.startswith(os.environ['windir']): + # On Windows, permissions are generally restrictive by default + # and temp directories are not writable by other users, so + # bypass the warning. + return + mode = os.stat(path).st_mode + if mode & stat.S_IWOTH or mode & stat.S_IWGRP: + msg = ("%s is writable by group/others and vulnerable to attack " + "when " + "used with get_resource_filename. Consider a more secure " + "location (set with .set_extraction_path or the " + "PYTHON_EGG_CACHE environment variable)." % path) + warnings.warn(msg, UserWarning) + + def postprocess(self, tempname, filename): + """Perform any platform-specific postprocessing of `tempname` + + This is where Mac header rewrites should be done; other platforms don't + have anything special they should do. + + Resource providers should call this method ONLY after successfully + extracting a compressed resource. They must NOT call it on resources + that are already in the filesystem. + + `tempname` is the current (temporary) name of the file, and `filename` + is the name it will be renamed to by the caller after this routine + returns. + """ + + if os.name == 'posix': + # Make the resource executable + mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777 + os.chmod(tempname, mode) + + def set_extraction_path(self, path): + """Set the base path where resources will be extracted to, if needed. + + If you do not call this routine before any extractions take place, the + path defaults to the return value of ``get_default_cache()``. (Which + is based on the ``PYTHON_EGG_CACHE`` environment variable, with various + platform-specific fallbacks. See that routine's documentation for more + details.) + + Resources are extracted to subdirectories of this path based upon + information given by the ``IResourceProvider``. You may set this to a + temporary directory, but then you must call ``cleanup_resources()`` to + delete the extracted files when done. There is no guarantee that + ``cleanup_resources()`` will be able to remove all extracted files. + + (Note: you may not change the extraction path for a given resource + manager once resources have been extracted, unless you first call + ``cleanup_resources()``.) + """ + if self.cached_files: + raise ValueError( + "Can't change extraction path, files already extracted" + ) + + self.extraction_path = path + + def cleanup_resources(self, force=False): + """ + Delete all extracted resource files and directories, returning a list + of the file and directory names that could not be successfully removed. + This function does not have any concurrency protection, so it should + generally only be called when the extraction path is a temporary + directory exclusive to a single process. This method is not + automatically called; you must call it explicitly or register it as an + ``atexit`` function if you wish to ensure cleanup of a temporary + directory used for extractions. + """ + # XXX + +def get_default_cache(): + """Determine the default cache location + + This returns the ``PYTHON_EGG_CACHE`` environment variable, if set. + Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the + "Application Data" directory. On all other systems, it's "~/.python-eggs". + """ + try: + return os.environ['PYTHON_EGG_CACHE'] + except KeyError: + pass + + if os.name!='nt': + return os.path.expanduser('~/.python-eggs') + + # XXX this may be locale-specific! + app_data = 'Application Data' + app_homes = [ + # best option, should be locale-safe + (('APPDATA',), None), + (('USERPROFILE',), app_data), + (('HOMEDRIVE','HOMEPATH'), app_data), + (('HOMEPATH',), app_data), + (('HOME',), None), + # 95/98/ME + (('WINDIR',), app_data), + ] + + for keys, subdir in app_homes: + dirname = '' + for key in keys: + if key in os.environ: + dirname = os.path.join(dirname, os.environ[key]) + else: + break + else: + if subdir: + dirname = os.path.join(dirname, subdir) + return os.path.join(dirname, 'Python-Eggs') + else: + raise RuntimeError( + "Please set the PYTHON_EGG_CACHE enviroment variable" + ) + +def safe_name(name): + """Convert an arbitrary string to a standard distribution name + + Any runs of non-alphanumeric/. characters are replaced with a single '-'. + """ + return re.sub('[^A-Za-z0-9.]+', '-', name) + + +def safe_version(version): + """Convert an arbitrary string to a standard version string + + Spaces become dots, and all other non-alphanumeric characters become + dashes, with runs of multiple dashes condensed to a single dash. + """ + version = version.replace(' ','.') + return re.sub('[^A-Za-z0-9.]+', '-', version) + + +def safe_extra(extra): + """Convert an arbitrary string to a standard 'extra' name + + Any runs of non-alphanumeric characters are replaced with a single '_', + and the result is always lowercased. + """ + return re.sub('[^A-Za-z0-9.]+', '_', extra).lower() + + +def to_filename(name): + """Convert a project or version name to its filename-escaped form + + Any '-' characters are currently replaced with '_'. + """ + return name.replace('-','_') + + +class MarkerEvaluation(object): + values = { + 'os_name': lambda: os.name, + 'sys_platform': lambda: sys.platform, + 'python_full_version': platform.python_version, + 'python_version': lambda: platform.python_version()[:3], + 'platform_version': platform.version, + 'platform_machine': platform.machine, + 'python_implementation': platform.python_implementation, + } + + @classmethod + def is_invalid_marker(cls, text): + """ + Validate text as a PEP 426 environment marker; return an exception + if invalid or False otherwise. + """ + try: + cls.evaluate_marker(text) + except SyntaxError: + return cls.normalize_exception(sys.exc_info()[1]) + return False + + @staticmethod + def normalize_exception(exc): + """ + Given a SyntaxError from a marker evaluation, normalize the error + message: + - Remove indications of filename and line number. + - Replace platform-specific error messages with standard error + messages. + """ + subs = { + 'unexpected EOF while parsing': 'invalid syntax', + 'parenthesis is never closed': 'invalid syntax', + } + exc.filename = None + exc.lineno = None + exc.msg = subs.get(exc.msg, exc.msg) + return exc + + @classmethod + def and_test(cls, nodelist): + # MUST NOT short-circuit evaluation, or invalid syntax can be skipped! + items = [ + cls.interpret(nodelist[i]) + for i in range(1, len(nodelist), 2) + ] + return functools.reduce(operator.and_, items) + + @classmethod + def test(cls, nodelist): + # MUST NOT short-circuit evaluation, or invalid syntax can be skipped! + items = [ + cls.interpret(nodelist[i]) + for i in range(1, len(nodelist), 2) + ] + return functools.reduce(operator.or_, items) + + @classmethod + def atom(cls, nodelist): + t = nodelist[1][0] + if t == token.LPAR: + if nodelist[2][0] == token.RPAR: + raise SyntaxError("Empty parentheses") + return cls.interpret(nodelist[2]) + msg = "Language feature not supported in environment markers" + raise SyntaxError(msg) + + @classmethod + def comparison(cls, nodelist): + if len(nodelist) > 4: + msg = "Chained comparison not allowed in environment markers" + raise SyntaxError(msg) + comp = nodelist[2][1] + cop = comp[1] + if comp[0] == token.NAME: + if len(nodelist[2]) == 3: + if cop == 'not': + cop = 'not in' + else: + cop = 'is not' + try: + cop = cls.get_op(cop) + except KeyError: + msg = repr(cop) + " operator not allowed in environment markers" + raise SyntaxError(msg) + return cop(cls.evaluate(nodelist[1]), cls.evaluate(nodelist[3])) + + @classmethod + def get_op(cls, op): + ops = { + symbol.test: cls.test, + symbol.and_test: cls.and_test, + symbol.atom: cls.atom, + symbol.comparison: cls.comparison, + 'not in': lambda x, y: x not in y, + 'in': lambda x, y: x in y, + '==': operator.eq, + '!=': operator.ne, + } + if hasattr(symbol, 'or_test'): + ops[symbol.or_test] = cls.test + return ops[op] + + @classmethod + def evaluate_marker(cls, text, extra=None): + """ + Evaluate a PEP 426 environment marker on CPython 2.4+. + Return a boolean indicating the marker result in this environment. + Raise SyntaxError if marker is invalid. + + This implementation uses the 'parser' module, which is not implemented + on + Jython and has been superseded by the 'ast' module in Python 2.6 and + later. + """ + return cls.interpret(parser.expr(text).totuple(1)[1]) + + @classmethod + def _markerlib_evaluate(cls, text): + """ + Evaluate a PEP 426 environment marker using markerlib. + Return a boolean indicating the marker result in this environment. + Raise SyntaxError if marker is invalid. + """ + import _markerlib + # markerlib implements Metadata 1.2 (PEP 345) environment markers. + # Translate the variables to Metadata 2.0 (PEP 426). + env = _markerlib.default_environment() + for key in env.keys(): + new_key = key.replace('.', '_') + env[new_key] = env.pop(key) + try: + result = _markerlib.interpret(text, env) + except NameError: + e = sys.exc_info()[1] + raise SyntaxError(e.args[0]) + return result + + if 'parser' not in globals(): + # Fall back to less-complete _markerlib implementation if 'parser' module + # is not available. + evaluate_marker = _markerlib_evaluate + + @classmethod + def interpret(cls, nodelist): + while len(nodelist)==2: nodelist = nodelist[1] + try: + op = cls.get_op(nodelist[0]) + except KeyError: + raise SyntaxError("Comparison or logical expression expected") + return op(nodelist) + + @classmethod + def evaluate(cls, nodelist): + while len(nodelist)==2: nodelist = nodelist[1] + kind = nodelist[0] + name = nodelist[1] + if kind==token.NAME: + try: + op = cls.values[name] + except KeyError: + raise SyntaxError("Unknown name %r" % name) + return op() + if kind==token.STRING: + s = nodelist[1] + if not cls._safe_string(s): + raise SyntaxError( + "Only plain strings allowed in environment markers") + return s[1:-1] + msg = "Language feature not supported in environment markers" + raise SyntaxError(msg) + + @staticmethod + def _safe_string(cand): + return ( + cand[:1] in "'\"" and + not cand.startswith('"""') and + not cand.startswith("'''") and + '\\' not in cand + ) + +invalid_marker = MarkerEvaluation.is_invalid_marker +evaluate_marker = MarkerEvaluation.evaluate_marker + +class NullProvider: + """Try to implement resources and metadata for arbitrary PEP 302 loaders""" + + egg_name = None + egg_info = None + loader = None + + def __init__(self, module): + self.loader = getattr(module, '__loader__', None) + self.module_path = os.path.dirname(getattr(module, '__file__', '')) + + def get_resource_filename(self, manager, resource_name): + return self._fn(self.module_path, resource_name) + + def get_resource_stream(self, manager, resource_name): + return BytesIO(self.get_resource_string(manager, resource_name)) + + def get_resource_string(self, manager, resource_name): + return self._get(self._fn(self.module_path, resource_name)) + + def has_resource(self, resource_name): + return self._has(self._fn(self.module_path, resource_name)) + + def has_metadata(self, name): + return self.egg_info and self._has(self._fn(self.egg_info, name)) + + if sys.version_info <= (3,): + def get_metadata(self, name): + if not self.egg_info: + return "" + return self._get(self._fn(self.egg_info, name)) + else: + def get_metadata(self, name): + if not self.egg_info: + return "" + return self._get(self._fn(self.egg_info, name)).decode("utf-8") + + def get_metadata_lines(self, name): + return yield_lines(self.get_metadata(name)) + + def resource_isdir(self, resource_name): + return self._isdir(self._fn(self.module_path, resource_name)) + + def metadata_isdir(self, name): + return self.egg_info and self._isdir(self._fn(self.egg_info, name)) + + def resource_listdir(self, resource_name): + return self._listdir(self._fn(self.module_path, resource_name)) + + def metadata_listdir(self, name): + if self.egg_info: + return self._listdir(self._fn(self.egg_info, name)) + return [] + + def run_script(self, script_name, namespace): + script = 'scripts/'+script_name + if not self.has_metadata(script): + raise ResolutionError("No script named %r" % script_name) + script_text = self.get_metadata(script).replace('\r\n', '\n') + script_text = script_text.replace('\r', '\n') + script_filename = self._fn(self.egg_info, script) + namespace['__file__'] = script_filename + if os.path.exists(script_filename): + execfile(script_filename, namespace, namespace) + else: + from linecache import cache + cache[script_filename] = ( + len(script_text), 0, script_text.split('\n'), script_filename + ) + script_code = compile(script_text, script_filename,'exec') + exec(script_code, namespace, namespace) + + def _has(self, path): + raise NotImplementedError( + "Can't perform this operation for unregistered loader type" + ) + + def _isdir(self, path): + raise NotImplementedError( + "Can't perform this operation for unregistered loader type" + ) + + def _listdir(self, path): + raise NotImplementedError( + "Can't perform this operation for unregistered loader type" + ) + + def _fn(self, base, resource_name): + if resource_name: + return os.path.join(base, *resource_name.split('/')) + return base + + def _get(self, path): + if hasattr(self.loader, 'get_data'): + return self.loader.get_data(path) + raise NotImplementedError( + "Can't perform this operation for loaders without 'get_data()'" + ) + +register_loader_type(object, NullProvider) + + +class EggProvider(NullProvider): + """Provider based on a virtual filesystem""" + + def __init__(self, module): + NullProvider.__init__(self, module) + self._setup_prefix() + + def _setup_prefix(self): + # we assume here that our metadata may be nested inside a "basket" + # of multiple eggs; that's why we use module_path instead of .archive + path = self.module_path + old = None + while path!=old: + if path.lower().endswith('.egg'): + self.egg_name = os.path.basename(path) + self.egg_info = os.path.join(path, 'EGG-INFO') + self.egg_root = path + break + old = path + path, base = os.path.split(path) + +class DefaultProvider(EggProvider): + """Provides access to package resources in the filesystem""" + + def _has(self, path): + return os.path.exists(path) + + def _isdir(self, path): + return os.path.isdir(path) + + def _listdir(self, path): + return os.listdir(path) + + def get_resource_stream(self, manager, resource_name): + return open(self._fn(self.module_path, resource_name), 'rb') + + def _get(self, path): + with open(path, 'rb') as stream: + return stream.read() + +register_loader_type(type(None), DefaultProvider) + +if importlib_bootstrap is not None: + register_loader_type(importlib_bootstrap.SourceFileLoader, DefaultProvider) + + +class EmptyProvider(NullProvider): + """Provider that returns nothing for all requests""" + + _isdir = _has = lambda self, path: False + _get = lambda self, path: '' + _listdir = lambda self, path: [] + module_path = None + + def __init__(self): + pass + +empty_provider = EmptyProvider() + + +class ZipManifests(dict): + """ + zip manifest builder + """ + + @classmethod + def build(cls, path): + """ + Build a dictionary similar to the zipimport directory + caches, except instead of tuples, store ZipInfo objects. + + Use a platform-specific path separator (os.sep) for the path keys + for compatibility with pypy on Windows. + """ + with ContextualZipFile(path) as zfile: + items = ( + ( + name.replace('/', os.sep), + zfile.getinfo(name), + ) + for name in zfile.namelist() + ) + return dict(items) + + load = build + + +class MemoizedZipManifests(ZipManifests): + """ + Memoized zipfile manifests. + """ + manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime') + + def load(self, path): + """ + Load a manifest at path or return a suitable manifest already loaded. + """ + path = os.path.normpath(path) + mtime = os.stat(path).st_mtime + + if path not in self or self[path].mtime != mtime: + manifest = self.build(path) + self[path] = self.manifest_mod(manifest, mtime) + + return self[path].manifest + + +class ContextualZipFile(zipfile.ZipFile): + """ + Supplement ZipFile class to support context manager for Python 2.6 + """ + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + self.close() + + def __new__(cls, *args, **kwargs): + """ + Construct a ZipFile or ContextualZipFile as appropriate + """ + if hasattr(zipfile.ZipFile, '__exit__'): + return zipfile.ZipFile(*args, **kwargs) + return super(ContextualZipFile, cls).__new__(cls) + + +class ZipProvider(EggProvider): + """Resource support for zips and eggs""" + + eagers = None + _zip_manifests = ( + MemoizedZipManifests() + if os.environ.get('PKG_RESOURCES_CACHE_ZIP_MANIFESTS') else + ZipManifests() + ) + + def __init__(self, module): + EggProvider.__init__(self, module) + self.zip_pre = self.loader.archive+os.sep + + def _zipinfo_name(self, fspath): + # Convert a virtual filename (full path to file) into a zipfile subpath + # usable with the zipimport directory cache for our target archive + if fspath.startswith(self.zip_pre): + return fspath[len(self.zip_pre):] + raise AssertionError( + "%s is not a subpath of %s" % (fspath, self.zip_pre) + ) + + def _parts(self, zip_path): + # Convert a zipfile subpath into an egg-relative path part list. + # pseudo-fs path + fspath = self.zip_pre+zip_path + if fspath.startswith(self.egg_root+os.sep): + return fspath[len(self.egg_root)+1:].split(os.sep) + raise AssertionError( + "%s is not a subpath of %s" % (fspath, self.egg_root) + ) + + @property + def zipinfo(self): + return self._zip_manifests.load(self.loader.archive) + + def get_resource_filename(self, manager, resource_name): + if not self.egg_name: + raise NotImplementedError( + "resource_filename() only supported for .egg, not .zip" + ) + # no need to lock for extraction, since we use temp names + zip_path = self._resource_to_zip(resource_name) + eagers = self._get_eager_resources() + if '/'.join(self._parts(zip_path)) in eagers: + for name in eagers: + self._extract_resource(manager, self._eager_to_zip(name)) + return self._extract_resource(manager, zip_path) + + @staticmethod + def _get_date_and_size(zip_stat): + size = zip_stat.file_size + # ymdhms+wday, yday, dst + date_time = zip_stat.date_time + (0, 0, -1) + # 1980 offset already done + timestamp = time.mktime(date_time) + return timestamp, size + + def _extract_resource(self, manager, zip_path): + + if zip_path in self._index(): + for name in self._index()[zip_path]: + last = self._extract_resource( + manager, os.path.join(zip_path, name) + ) + # return the extracted directory name + return os.path.dirname(last) + + timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) + + if not WRITE_SUPPORT: + raise IOError('"os.rename" and "os.unlink" are not supported ' + 'on this platform') + try: + + real_path = manager.get_cache_path( + self.egg_name, self._parts(zip_path) + ) + + if self._is_current(real_path, zip_path): + return real_path + + outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path)) + os.write(outf, self.loader.get_data(zip_path)) + os.close(outf) + utime(tmpnam, (timestamp, timestamp)) + manager.postprocess(tmpnam, real_path) + + try: + rename(tmpnam, real_path) + + except os.error: + if os.path.isfile(real_path): + if self._is_current(real_path, zip_path): + # the file became current since it was checked above, + # so proceed. + return real_path + # Windows, del old file and retry + elif os.name=='nt': + unlink(real_path) + rename(tmpnam, real_path) + return real_path + raise + + except os.error: + # report a user-friendly error + manager.extraction_error() + + return real_path + + def _is_current(self, file_path, zip_path): + """ + Return True if the file_path is current for this zip_path + """ + timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) + if not os.path.isfile(file_path): + return False + stat = os.stat(file_path) + if stat.st_size!=size or stat.st_mtime!=timestamp: + return False + # check that the contents match + zip_contents = self.loader.get_data(zip_path) + with open(file_path, 'rb') as f: + file_contents = f.read() + return zip_contents == file_contents + + def _get_eager_resources(self): + if self.eagers is None: + eagers = [] + for name in ('native_libs.txt', 'eager_resources.txt'): + if self.has_metadata(name): + eagers.extend(self.get_metadata_lines(name)) + self.eagers = eagers + return self.eagers + + def _index(self): + try: + return self._dirindex + except AttributeError: + ind = {} + for path in self.zipinfo: + parts = path.split(os.sep) + while parts: + parent = os.sep.join(parts[:-1]) + if parent in ind: + ind[parent].append(parts[-1]) + break + else: + ind[parent] = [parts.pop()] + self._dirindex = ind + return ind + + def _has(self, fspath): + zip_path = self._zipinfo_name(fspath) + return zip_path in self.zipinfo or zip_path in self._index() + + def _isdir(self, fspath): + return self._zipinfo_name(fspath) in self._index() + + def _listdir(self, fspath): + return list(self._index().get(self._zipinfo_name(fspath), ())) + + def _eager_to_zip(self, resource_name): + return self._zipinfo_name(self._fn(self.egg_root, resource_name)) + + def _resource_to_zip(self, resource_name): + return self._zipinfo_name(self._fn(self.module_path, resource_name)) + +register_loader_type(zipimport.zipimporter, ZipProvider) + + +class FileMetadata(EmptyProvider): + """Metadata handler for standalone PKG-INFO files + + Usage:: + + metadata = FileMetadata("/path/to/PKG-INFO") + + This provider rejects all data and metadata requests except for PKG-INFO, + which is treated as existing, and will be the contents of the file at + the provided location. + """ + + def __init__(self, path): + self.path = path + + def has_metadata(self, name): + return name=='PKG-INFO' + + def get_metadata(self, name): + if name=='PKG-INFO': + with open(self.path,'rU') as f: + metadata = f.read() + return metadata + raise KeyError("No metadata except PKG-INFO is available") + + def get_metadata_lines(self, name): + return yield_lines(self.get_metadata(name)) + + +class PathMetadata(DefaultProvider): + """Metadata provider for egg directories + + Usage:: + + # Development eggs: + + egg_info = "/path/to/PackageName.egg-info" + base_dir = os.path.dirname(egg_info) + metadata = PathMetadata(base_dir, egg_info) + dist_name = os.path.splitext(os.path.basename(egg_info))[0] + dist = Distribution(basedir, project_name=dist_name, metadata=metadata) + + # Unpacked egg directories: + + egg_path = "/path/to/PackageName-ver-pyver-etc.egg" + metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) + dist = Distribution.from_filename(egg_path, metadata=metadata) + """ + + def __init__(self, path, egg_info): + self.module_path = path + self.egg_info = egg_info + + +class EggMetadata(ZipProvider): + """Metadata provider for .egg files""" + + def __init__(self, importer): + """Create a metadata provider from a zipimporter""" + + self.zip_pre = importer.archive+os.sep + self.loader = importer + if importer.prefix: + self.module_path = os.path.join(importer.archive, importer.prefix) + else: + self.module_path = importer.archive + self._setup_prefix() + +_declare_state('dict', _distribution_finders = {}) + +def register_finder(importer_type, distribution_finder): + """Register `distribution_finder` to find distributions in sys.path items + + `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item + handler), and `distribution_finder` is a callable that, passed a path + item and the importer instance, yields ``Distribution`` instances found on + that path item. See ``pkg_resources.find_on_path`` for an example.""" + _distribution_finders[importer_type] = distribution_finder + + +def find_distributions(path_item, only=False): + """Yield distributions accessible via `path_item`""" + importer = get_importer(path_item) + finder = _find_adapter(_distribution_finders, importer) + return finder(importer, path_item, only) + +def find_eggs_in_zip(importer, path_item, only=False): + """ + Find eggs in zip files; possibly multiple nested eggs. + """ + if importer.archive.endswith('.whl'): + # wheels are not supported with this finder + # they don't have PKG-INFO metadata, and won't ever contain eggs + return + metadata = EggMetadata(importer) + if metadata.has_metadata('PKG-INFO'): + yield Distribution.from_filename(path_item, metadata=metadata) + if only: + # don't yield nested distros + return + for subitem in metadata.resource_listdir('/'): + if subitem.endswith('.egg'): + subpath = os.path.join(path_item, subitem) + for dist in find_eggs_in_zip(zipimport.zipimporter(subpath), subpath): + yield dist + +register_finder(zipimport.zipimporter, find_eggs_in_zip) + +def find_nothing(importer, path_item, only=False): + return () +register_finder(object, find_nothing) + +def find_on_path(importer, path_item, only=False): + """Yield distributions accessible on a sys.path directory""" + path_item = _normalize_cached(path_item) + + if os.path.isdir(path_item) and os.access(path_item, os.R_OK): + if path_item.lower().endswith('.egg'): + # unpacked egg + yield Distribution.from_filename( + path_item, metadata=PathMetadata( + path_item, os.path.join(path_item,'EGG-INFO') + ) + ) + else: + # scan for .egg and .egg-info in directory + for entry in os.listdir(path_item): + lower = entry.lower() + if lower.endswith('.egg-info') or lower.endswith('.dist-info'): + fullpath = os.path.join(path_item, entry) + if os.path.isdir(fullpath): + # egg-info directory, allow getting metadata + metadata = PathMetadata(path_item, fullpath) + else: + metadata = FileMetadata(fullpath) + yield Distribution.from_location( + path_item, entry, metadata, precedence=DEVELOP_DIST + ) + elif not only and lower.endswith('.egg'): + dists = find_distributions(os.path.join(path_item, entry)) + for dist in dists: + yield dist + elif not only and lower.endswith('.egg-link'): + with open(os.path.join(path_item, entry)) as entry_file: + entry_lines = entry_file.readlines() + for line in entry_lines: + if not line.strip(): + continue + path = os.path.join(path_item, line.rstrip()) + dists = find_distributions(path) + for item in dists: + yield item + break +register_finder(pkgutil.ImpImporter, find_on_path) + +if importlib_bootstrap is not None: + register_finder(importlib_bootstrap.FileFinder, find_on_path) + +_declare_state('dict', _namespace_handlers={}) +_declare_state('dict', _namespace_packages={}) + + +def register_namespace_handler(importer_type, namespace_handler): + """Register `namespace_handler` to declare namespace packages + + `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item + handler), and `namespace_handler` is a callable like this:: + + def namespace_handler(importer, path_entry, moduleName, module): + # return a path_entry to use for child packages + + Namespace handlers are only called if the importer object has already + agreed that it can handle the relevant path item, and they should only + return a subpath if the module __path__ does not already contain an + equivalent subpath. For an example namespace handler, see + ``pkg_resources.file_ns_handler``. + """ + _namespace_handlers[importer_type] = namespace_handler + +def _handle_ns(packageName, path_item): + """Ensure that named package includes a subpath of path_item (if needed)""" + + importer = get_importer(path_item) + if importer is None: + return None + loader = importer.find_module(packageName) + if loader is None: + return None + module = sys.modules.get(packageName) + if module is None: + module = sys.modules[packageName] = imp.new_module(packageName) + module.__path__ = [] + _set_parent_ns(packageName) + elif not hasattr(module,'__path__'): + raise TypeError("Not a package:", packageName) + handler = _find_adapter(_namespace_handlers, importer) + subpath = handler(importer, path_item, packageName, module) + if subpath is not None: + path = module.__path__ + path.append(subpath) + loader.load_module(packageName) + for path_item in path: + if path_item not in module.__path__: + module.__path__.append(path_item) + return subpath + +def declare_namespace(packageName): + """Declare that package 'packageName' is a namespace package""" + + imp.acquire_lock() + try: + if packageName in _namespace_packages: + return + + path, parent = sys.path, None + if '.' in packageName: + parent = '.'.join(packageName.split('.')[:-1]) + declare_namespace(parent) + if parent not in _namespace_packages: + __import__(parent) + try: + path = sys.modules[parent].__path__ + except AttributeError: + raise TypeError("Not a package:", parent) + + # Track what packages are namespaces, so when new path items are added, + # they can be updated + _namespace_packages.setdefault(parent,[]).append(packageName) + _namespace_packages.setdefault(packageName,[]) + + for path_item in path: + # Ensure all the parent's path items are reflected in the child, + # if they apply + _handle_ns(packageName, path_item) + + finally: + imp.release_lock() + +def fixup_namespace_packages(path_item, parent=None): + """Ensure that previously-declared namespace packages include path_item""" + imp.acquire_lock() + try: + for package in _namespace_packages.get(parent,()): + subpath = _handle_ns(package, path_item) + if subpath: + fixup_namespace_packages(subpath, package) + finally: + imp.release_lock() + +def file_ns_handler(importer, path_item, packageName, module): + """Compute an ns-package subpath for a filesystem or zipfile importer""" + + subpath = os.path.join(path_item, packageName.split('.')[-1]) + normalized = _normalize_cached(subpath) + for item in module.__path__: + if _normalize_cached(item)==normalized: + break + else: + # Only return the path if it's not already there + return subpath + +register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) +register_namespace_handler(zipimport.zipimporter, file_ns_handler) + +if importlib_bootstrap is not None: + register_namespace_handler(importlib_bootstrap.FileFinder, file_ns_handler) + + +def null_ns_handler(importer, path_item, packageName, module): + return None + +register_namespace_handler(object, null_ns_handler) + + +def normalize_path(filename): + """Normalize a file/dir name for comparison purposes""" + return os.path.normcase(os.path.realpath(filename)) + +def _normalize_cached(filename, _cache={}): + try: + return _cache[filename] + except KeyError: + _cache[filename] = result = normalize_path(filename) + return result + +def _set_parent_ns(packageName): + parts = packageName.split('.') + name = parts.pop() + if parts: + parent = '.'.join(parts) + setattr(sys.modules[parent], name, sys.modules[packageName]) + + +def yield_lines(strs): + """Yield non-empty/non-comment lines of a ``basestring`` or sequence""" + if isinstance(strs, basestring): + for s in strs.splitlines(): + s = s.strip() + # skip blank lines/comments + if s and not s.startswith('#'): + yield s + else: + for ss in strs: + for s in yield_lines(ss): + yield s + +# whitespace and comment +LINE_END = re.compile(r"\s*(#.*)?$").match +# line continuation +CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match +# Distribution or extra +DISTRO = re.compile(r"\s*((\w|[-.])+)").match +# ver. info +VERSION = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match +# comma between items +COMMA = re.compile(r"\s*,").match +OBRACKET = re.compile(r"\s*\[").match +CBRACKET = re.compile(r"\s*\]").match +MODULE = re.compile(r"\w+(\.\w+)*$").match +EGG_NAME = re.compile( + r"(?P<name>[^-]+)" + r"( -(?P<ver>[^-]+) (-py(?P<pyver>[^-]+) (-(?P<plat>.+))? )? )?", + re.VERBOSE | re.IGNORECASE +).match + +component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE) +replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get + +def _parse_version_parts(s): + for part in component_re.split(s): + part = replace(part, part) + if not part or part=='.': + continue + if part[:1] in '0123456789': + # pad for numeric comparison + yield part.zfill(8) + else: + yield '*'+part + + # ensure that alpha/beta/candidate are before final + yield '*final' + +def parse_version(s): + """Convert a version string to a chronologically-sortable key + + This is a rough cross between distutils' StrictVersion and LooseVersion; + if you give it versions that would work with StrictVersion, then it behaves + the same; otherwise it acts like a slightly-smarter LooseVersion. It is + *possible* to create pathological version coding schemes that will fool + this parser, but they should be very rare in practice. + + The returned value will be a tuple of strings. Numeric portions of the + version are padded to 8 digits so they will compare numerically, but + without relying on how numbers compare relative to strings. Dots are + dropped, but dashes are retained. Trailing zeros between alpha segments + or dashes are suppressed, so that e.g. "2.4.0" is considered the same as + "2.4". Alphanumeric parts are lower-cased. + + The algorithm assumes that strings like "-" and any alpha string that + alphabetically follows "final" represents a "patch level". So, "2.4-1" + is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is + considered newer than "2.4-1", which in turn is newer than "2.4". + + Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that + come before "final" alphabetically) are assumed to be pre-release versions, + so that the version "2.4" is considered newer than "2.4a1". + + Finally, to handle miscellaneous cases, the strings "pre", "preview", and + "rc" are treated as if they were "c", i.e. as though they were release + candidates, and therefore are not as new as a version string that does not + contain them, and "dev" is replaced with an '@' so that it sorts lower than + than any other pre-release tag. + """ + parts = [] + for part in _parse_version_parts(s.lower()): + if part.startswith('*'): + # remove '-' before a prerelease tag + if part < '*final': + while parts and parts[-1] == '*final-': + parts.pop() + # remove trailing zeros from each series of numeric parts + while parts and parts[-1]=='00000000': + parts.pop() + parts.append(part) + return tuple(parts) + + +class EntryPoint(object): + """Object representing an advertised importable object""" + + def __init__(self, name, module_name, attrs=(), extras=(), dist=None): + if not MODULE(module_name): + raise ValueError("Invalid module name", module_name) + self.name = name + self.module_name = module_name + self.attrs = tuple(attrs) + self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras + self.dist = dist + + def __str__(self): + s = "%s = %s" % (self.name, self.module_name) + if self.attrs: + s += ':' + '.'.join(self.attrs) + if self.extras: + s += ' [%s]' % ','.join(self.extras) + return s + + def __repr__(self): + return "EntryPoint.parse(%r)" % str(self) + + def load(self, require=True, env=None, installer=None): + if require: + self.require(env, installer) + entry = __import__(self.module_name, globals(), globals(), + ['__name__']) + for attr in self.attrs: + try: + entry = getattr(entry, attr) + except AttributeError: + raise ImportError("%r has no %r attribute" % (entry, attr)) + return entry + + def require(self, env=None, installer=None): + if self.extras and not self.dist: + raise UnknownExtra("Can't require() without a distribution", self) + reqs = self.dist.requires(self.extras) + items = working_set.resolve(reqs, env, installer) + list(map(working_set.add, items)) + + @classmethod + def parse(cls, src, dist=None): + """Parse a single entry point from string `src` + + Entry point syntax follows the form:: + + name = some.module:some.attr [extra1, extra2] + + The entry name and module name are required, but the ``:attrs`` and + ``[extras]`` parts are optional + """ + try: + attrs = extras = () + name, value = src.split('=', 1) + if '[' in value: + value, extras = value.split('[', 1) + req = Requirement.parse("x[" + extras) + if req.specs: + raise ValueError + extras = req.extras + if ':' in value: + value, attrs = value.split(':', 1) + if not MODULE(attrs.rstrip()): + raise ValueError + attrs = attrs.rstrip().split('.') + except ValueError: + msg = "EntryPoint must be in 'name=module:attrs [extras]' format" + raise ValueError(msg, src) + else: + return cls(name.strip(), value.strip(), attrs, extras, dist) + + @classmethod + def parse_group(cls, group, lines, dist=None): + """Parse an entry point group""" + if not MODULE(group): + raise ValueError("Invalid group name", group) + this = {} + for line in yield_lines(lines): + ep = cls.parse(line, dist) + if ep.name in this: + raise ValueError("Duplicate entry point", group, ep.name) + this[ep.name]=ep + return this + + @classmethod + def parse_map(cls, data, dist=None): + """Parse a map of entry point groups""" + if isinstance(data, dict): + data = data.items() + else: + data = split_sections(data) + maps = {} + for group, lines in data: + if group is None: + if not lines: + continue + raise ValueError("Entry points must be listed in groups") + group = group.strip() + if group in maps: + raise ValueError("Duplicate group name", group) + maps[group] = cls.parse_group(group, lines, dist) + return maps + + +def _remove_md5_fragment(location): + if not location: + return '' + parsed = urlparse(location) + if parsed[-1].startswith('md5='): + return urlunparse(parsed[:-1] + ('',)) + return location + + +class Distribution(object): + """Wrap an actual or potential sys.path entry w/metadata""" + PKG_INFO = 'PKG-INFO' + + def __init__(self, location=None, metadata=None, project_name=None, + version=None, py_version=PY_MAJOR, platform=None, + precedence=EGG_DIST): + self.project_name = safe_name(project_name or 'Unknown') + if version is not None: + self._version = safe_version(version) + self.py_version = py_version + self.platform = platform + self.location = location + self.precedence = precedence + self._provider = metadata or empty_provider + + @classmethod + def from_location(cls, location, basename, metadata=None,**kw): + project_name, version, py_version, platform = [None]*4 + basename, ext = os.path.splitext(basename) + if ext.lower() in _distributionImpl: + # .dist-info gets much metadata differently + match = EGG_NAME(basename) + if match: + project_name, version, py_version, platform = match.group( + 'name','ver','pyver','plat' + ) + cls = _distributionImpl[ext.lower()] + return cls( + location, metadata, project_name=project_name, version=version, + py_version=py_version, platform=platform, **kw + ) + + @property + def hashcmp(self): + return ( + getattr(self, 'parsed_version', ()), + self.precedence, + self.key, + _remove_md5_fragment(self.location), + self.py_version, + self.platform, + ) + + def __hash__(self): + return hash(self.hashcmp) + + def __lt__(self, other): + return self.hashcmp < other.hashcmp + + def __le__(self, other): + return self.hashcmp <= other.hashcmp + + def __gt__(self, other): + return self.hashcmp > other.hashcmp + + def __ge__(self, other): + return self.hashcmp >= other.hashcmp + + def __eq__(self, other): + if not isinstance(other, self.__class__): + # It's not a Distribution, so they are not equal + return False + return self.hashcmp == other.hashcmp + + def __ne__(self, other): + return not self == other + + # These properties have to be lazy so that we don't have to load any + # metadata until/unless it's actually needed. (i.e., some distributions + # may not know their name or version without loading PKG-INFO) + + @property + def key(self): + try: + return self._key + except AttributeError: + self._key = key = self.project_name.lower() + return key + + @property + def parsed_version(self): + try: + return self._parsed_version + except AttributeError: + self._parsed_version = pv = parse_version(self.version) + return pv + + @property + def version(self): + try: + return self._version + except AttributeError: + for line in self._get_metadata(self.PKG_INFO): + if line.lower().startswith('version:'): + self._version = safe_version(line.split(':',1)[1].strip()) + return self._version + else: + tmpl = "Missing 'Version:' header and/or %s file" + raise ValueError(tmpl % self.PKG_INFO, self) + + @property + def _dep_map(self): + try: + return self.__dep_map + except AttributeError: + dm = self.__dep_map = {None: []} + for name in 'requires.txt', 'depends.txt': + for extra, reqs in split_sections(self._get_metadata(name)): + if extra: + if ':' in extra: + extra, marker = extra.split(':', 1) + if invalid_marker(marker): + # XXX warn + reqs=[] + elif not evaluate_marker(marker): + reqs=[] + extra = safe_extra(extra) or None + dm.setdefault(extra,[]).extend(parse_requirements(reqs)) + return dm + + def requires(self, extras=()): + """List of Requirements needed for this distro if `extras` are used""" + dm = self._dep_map + deps = [] + deps.extend(dm.get(None, ())) + for ext in extras: + try: + deps.extend(dm[safe_extra(ext)]) + except KeyError: + raise UnknownExtra( + "%s has no such extra feature %r" % (self, ext) + ) + return deps + + def _get_metadata(self, name): + if self.has_metadata(name): + for line in self.get_metadata_lines(name): + yield line + + def activate(self, path=None): + """Ensure distribution is importable on `path` (default=sys.path)""" + if path is None: + path = sys.path + self.insert_on(path) + if path is sys.path: + fixup_namespace_packages(self.location) + for pkg in self._get_metadata('namespace_packages.txt'): + if pkg in sys.modules: + declare_namespace(pkg) + + def egg_name(self): + """Return what this distribution's standard .egg filename should be""" + filename = "%s-%s-py%s" % ( + to_filename(self.project_name), to_filename(self.version), + self.py_version or PY_MAJOR + ) + + if self.platform: + filename += '-' + self.platform + return filename + + def __repr__(self): + if self.location: + return "%s (%s)" % (self, self.location) + else: + return str(self) + + def __str__(self): + try: + version = getattr(self, 'version', None) + except ValueError: + version = None + version = version or "[unknown version]" + return "%s %s" % (self.project_name, version) + + def __getattr__(self, attr): + """Delegate all unrecognized public attributes to .metadata provider""" + if attr.startswith('_'): + raise AttributeError(attr) + return getattr(self._provider, attr) + + @classmethod + def from_filename(cls, filename, metadata=None, **kw): + return cls.from_location( + _normalize_cached(filename), os.path.basename(filename), metadata, + **kw + ) + + def as_requirement(self): + """Return a ``Requirement`` that matches this distribution exactly""" + return Requirement.parse('%s==%s' % (self.project_name, self.version)) + + def load_entry_point(self, group, name): + """Return the `name` entry point of `group` or raise ImportError""" + ep = self.get_entry_info(group, name) + if ep is None: + raise ImportError("Entry point %r not found" % ((group, name),)) + return ep.load() + + def get_entry_map(self, group=None): + """Return the entry point map for `group`, or the full entry map""" + try: + ep_map = self._ep_map + except AttributeError: + ep_map = self._ep_map = EntryPoint.parse_map( + self._get_metadata('entry_points.txt'), self + ) + if group is not None: + return ep_map.get(group,{}) + return ep_map + + def get_entry_info(self, group, name): + """Return the EntryPoint object for `group`+`name`, or ``None``""" + return self.get_entry_map(group).get(name) + + def insert_on(self, path, loc = None): + """Insert self.location in path before its nearest parent directory""" + + loc = loc or self.location + if not loc: + return + + nloc = _normalize_cached(loc) + bdir = os.path.dirname(nloc) + npath= [(p and _normalize_cached(p) or p) for p in path] + + for p, item in enumerate(npath): + if item == nloc: + break + elif item == bdir and self.precedence == EGG_DIST: + # if it's an .egg, give it precedence over its directory + if path is sys.path: + self.check_version_conflict() + path.insert(p, loc) + npath.insert(p, nloc) + break + else: + if path is sys.path: + self.check_version_conflict() + path.append(loc) + return + + # p is the spot where we found or inserted loc; now remove duplicates + while True: + try: + np = npath.index(nloc, p+1) + except ValueError: + break + else: + del npath[np], path[np] + # ha! + p = np + + return + + def check_version_conflict(self): + if self.key == 'setuptools': + # ignore the inevitable setuptools self-conflicts :( + return + + nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) + loc = normalize_path(self.location) + for modname in self._get_metadata('top_level.txt'): + if (modname not in sys.modules or modname in nsp + or modname in _namespace_packages): + continue + if modname in ('pkg_resources', 'setuptools', 'site'): + continue + fn = getattr(sys.modules[modname], '__file__', None) + if fn and (normalize_path(fn).startswith(loc) or + fn.startswith(self.location)): + continue + issue_warning( + "Module %s was already imported from %s, but %s is being added" + " to sys.path" % (modname, fn, self.location), + ) + + def has_version(self): + try: + self.version + except ValueError: + issue_warning("Unbuilt egg for " + repr(self)) + return False + return True + + def clone(self,**kw): + """Copy this distribution, substituting in any changed keyword args""" + names = 'project_name version py_version platform location precedence' + for attr in names.split(): + kw.setdefault(attr, getattr(self, attr, None)) + kw.setdefault('metadata', self._provider) + return self.__class__(**kw) + + @property + def extras(self): + return [dep for dep in self._dep_map if dep] + + +class DistInfoDistribution(Distribution): + """Wrap an actual or potential sys.path entry w/metadata, .dist-info style""" + PKG_INFO = 'METADATA' + EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])") + + @property + def _parsed_pkg_info(self): + """Parse and cache metadata""" + try: + return self._pkg_info + except AttributeError: + metadata = self.get_metadata(self.PKG_INFO) + self._pkg_info = email.parser.Parser().parsestr(metadata) + return self._pkg_info + + @property + def _dep_map(self): + try: + return self.__dep_map + except AttributeError: + self.__dep_map = self._compute_dependencies() + return self.__dep_map + + def _preparse_requirement(self, requires_dist): + """Convert 'Foobar (1); baz' to ('Foobar ==1', 'baz') + Split environment marker, add == prefix to version specifiers as + necessary, and remove parenthesis. + """ + parts = requires_dist.split(';', 1) + [''] + distvers = parts[0].strip() + mark = parts[1].strip() + distvers = re.sub(self.EQEQ, r"\1==\2\3", distvers) + distvers = distvers.replace('(', '').replace(')', '') + return (distvers, mark) + + def _compute_dependencies(self): + """Recompute this distribution's dependencies.""" + from _markerlib import compile as compile_marker + dm = self.__dep_map = {None: []} + + reqs = [] + # Including any condition expressions + for req in self._parsed_pkg_info.get_all('Requires-Dist') or []: + distvers, mark = self._preparse_requirement(req) + parsed = next(parse_requirements(distvers)) + parsed.marker_fn = compile_marker(mark) + reqs.append(parsed) + + def reqs_for_extra(extra): + for req in reqs: + if req.marker_fn(override={'extra':extra}): + yield req + + common = frozenset(reqs_for_extra(None)) + dm[None].extend(common) + + for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []: + extra = safe_extra(extra.strip()) + dm[extra] = list(frozenset(reqs_for_extra(extra)) - common) + + return dm + + +_distributionImpl = { + '.egg': Distribution, + '.egg-info': Distribution, + '.dist-info': DistInfoDistribution, + } + + +def issue_warning(*args,**kw): + level = 1 + g = globals() + try: + # find the first stack frame that is *not* code in + # the pkg_resources module, to use for the warning + while sys._getframe(level).f_globals is g: + level += 1 + except ValueError: + pass + warnings.warn(stacklevel=level + 1, *args, **kw) + + +def parse_requirements(strs): + """Yield ``Requirement`` objects for each specification in `strs` + + `strs` must be an instance of ``basestring``, or a (possibly-nested) + iterable thereof. + """ + # create a steppable iterator, so we can handle \-continuations + lines = iter(yield_lines(strs)) + + def scan_list(ITEM, TERMINATOR, line, p, groups, item_name): + + items = [] + + while not TERMINATOR(line, p): + if CONTINUE(line, p): + try: + line = next(lines) + p = 0 + except StopIteration: + raise ValueError( + "\\ must not appear on the last nonblank line" + ) + + match = ITEM(line, p) + if not match: + msg = "Expected " + item_name + " in" + raise ValueError(msg, line, "at", line[p:]) + + items.append(match.group(*groups)) + p = match.end() + + match = COMMA(line, p) + if match: + # skip the comma + p = match.end() + elif not TERMINATOR(line, p): + msg = "Expected ',' or end-of-list in" + raise ValueError(msg, line, "at", line[p:]) + + match = TERMINATOR(line, p) + # skip the terminator, if any + if match: + p = match.end() + return line, p, items + + for line in lines: + match = DISTRO(line) + if not match: + raise ValueError("Missing distribution spec", line) + project_name = match.group(1) + p = match.end() + extras = [] + + match = OBRACKET(line, p) + if match: + p = match.end() + line, p, extras = scan_list( + DISTRO, CBRACKET, line, p, (1,), "'extra' name" + ) + + line, p, specs = scan_list(VERSION, LINE_END, line, p, (1, 2), + "version spec") + specs = [(op, safe_version(val)) for op, val in specs] + yield Requirement(project_name, specs, extras) + + +class Requirement: + def __init__(self, project_name, specs, extras): + """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" + self.unsafe_name, project_name = project_name, safe_name(project_name) + self.project_name, self.key = project_name, project_name.lower() + index = [ + (parse_version(v), state_machine[op], op, v) + for op, v in specs + ] + index.sort() + self.specs = [(op, ver) for parsed, trans, op, ver in index] + self.index, self.extras = index, tuple(map(safe_extra, extras)) + self.hashCmp = ( + self.key, + tuple((op, parsed) for parsed, trans, op, ver in index), + frozenset(self.extras), + ) + self.__hash = hash(self.hashCmp) + + def __str__(self): + specs = ','.join([''.join(s) for s in self.specs]) + extras = ','.join(self.extras) + if extras: + extras = '[%s]' % extras + return '%s%s%s' % (self.project_name, extras, specs) + + def __eq__(self, other): + return ( + isinstance(other, Requirement) and + self.hashCmp == other.hashCmp + ) + + def __contains__(self, item): + if isinstance(item, Distribution): + if item.key != self.key: + return False + # only get if we need it + if self.index: + item = item.parsed_version + elif isinstance(item, basestring): + item = parse_version(item) + last = None + # -1, 0, 1 + compare = lambda a, b: (a > b) - (a < b) + for parsed, trans, op, ver in self.index: + # Indexing: 0, 1, -1 + action = trans[compare(item, parsed)] + if action == 'F': + return False + elif action == 'T': + return True + elif action == '+': + last = True + elif action == '-' or last is None: + last = False + # no rules encountered + if last is None: + last = True + return last + + def __hash__(self): + return self.__hash + + def __repr__(self): return "Requirement.parse(%r)" % str(self) + + @staticmethod + def parse(s): + reqs = list(parse_requirements(s)) + if reqs: + if len(reqs) == 1: + return reqs[0] + raise ValueError("Expected only one requirement", s) + raise ValueError("No requirements found", s) + +state_machine = { + # =>< + '<': '--T', + '<=': 'T-T', + '>': 'F+F', + '>=': 'T+F', + '==': 'T..', + '!=': 'F++', +} + + +def _get_mro(cls): + """Get an mro for a type or classic class""" + if not isinstance(cls, type): + class cls(cls, object): pass + return cls.__mro__[1:] + return cls.__mro__ + +def _find_adapter(registry, ob): + """Return an adapter factory for `ob` from `registry`""" + for t in _get_mro(getattr(ob, '__class__', type(ob))): + if t in registry: + return registry[t] + + +def ensure_directory(path): + """Ensure that the parent directory of `path` exists""" + dirname = os.path.dirname(path) + if not os.path.isdir(dirname): + os.makedirs(dirname) + +def split_sections(s): + """Split a string or iterable thereof into (section, content) pairs + + Each ``section`` is a stripped version of the section header ("[section]") + and each ``content`` is a list of stripped lines excluding blank lines and + comment-only lines. If there are any such lines before the first section + header, they're returned in a first ``section`` of ``None``. + """ + section = None + content = [] + for line in yield_lines(s): + if line.startswith("["): + if line.endswith("]"): + if section or content: + yield section, content + section = line[1:-1].strip() + content = [] + else: + raise ValueError("Invalid section heading", line) + else: + content.append(line) + + # wrap up last segment + yield section, content + +def _mkstemp(*args,**kw): + old_open = os.open + try: + # temporarily bypass sandboxing + os.open = os_open + return tempfile.mkstemp(*args,**kw) + finally: + # and then put it back + os.open = old_open + + +# Set up global resource manager (deliberately not state-saved) +_manager = ResourceManager() +def _initialize(g): + for name in dir(_manager): + if not name.startswith('_'): + g[name] = getattr(_manager, name) +_initialize(globals()) + +# Prepare the master working set and make the ``require()`` API available +working_set = WorkingSet._build_master() +_declare_state('object', working_set=working_set) + +require = working_set.require +iter_entry_points = working_set.iter_entry_points +add_activation_listener = working_set.subscribe +run_script = working_set.run_script +# backward compatibility +run_main = run_script +# Activate all distributions already on sys.path, and ensure that +# all distributions added to the working set in the future (e.g. by +# calling ``require()``) will get activated as well. +add_activation_listener(lambda dist: dist.activate()) +working_set.entries=[] +# match order +list(map(working_set.add_entry, sys.path)) diff --git a/lib/python3.4/site-packages/pyOpenSSL-16.0.0.dist-info/DESCRIPTION.rst b/lib/python3.4/site-packages/pyOpenSSL-16.0.0.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..517b186 --- /dev/null +++ b/lib/python3.4/site-packages/pyOpenSSL-16.0.0.dist-info/DESCRIPTION.rst @@ -0,0 +1,112 @@ +======================================================== +pyOpenSSL -- A Python wrapper around the OpenSSL library +======================================================== + +.. image:: https://readthedocs.org/projects/pyopenssl/badge/?version=stable + :target: https://pyopenssl.readthedocs.org/ + :alt: Stable Docs + +.. image:: https://travis-ci.org/pyca/pyopenssl.svg?branch=master + :target: https://travis-ci.org/pyca/pyopenssl + :alt: Build status + +.. image:: https://codecov.io/github/pyca/pyopenssl/coverage.svg?branch=master + :target: https://codecov.io/github/pyca/pyopenssl + :alt: Test coverage + + +High-level wrapper around a subset of the OpenSSL library. Includes + +* ``SSL.Connection`` objects, wrapping the methods of Python's portable sockets +* Callbacks written in Python +* Extensive error-handling mechanism, mirroring OpenSSL's error codes + +... and much more. + +You can find more information in the documentation_. +Development takes place on GitHub_. + + +Discussion +========== + +If you run into bugs, you can file them in our `issue tracker`_. + +We maintain a cryptography-dev_ mailing list for both user and development discussions. + +You can also join ``#cryptography-dev`` on Freenode to ask questions or get involved. + + +.. _documentation: https://pyopenssl.readthedocs.org/ +.. _`issue tracker`: https://github.com/pyca/pyopenssl/issues +.. _cryptography-dev: https://mail.python.org/mailman/listinfo/cryptography-dev +.. _GitHub: https://github.com/pyca/pyopenssl + + +Release Information +=================== + +16.0.0 (2016-03-19) +------------------- + +This is the first release under full stewardship of PyCA. +We have made *many* changes to make local development more pleasing. +The test suite now passes both on Linux and OS X with OpenSSL 0.9.8, 1.0.1, and 1.0.2. +It has been moved to `py.test <https://pytest.org/>`_, all CI test runs are part of `tox <https://testrun.org/tox/>`_ and the source code has been made fully `flake8 <https://flake8.readthedocs.org/>`_ compliant. + +We hope to have lowered the barrier for contributions significantly but are open to hear about any remaining frustrations. + + +Backward-incompatible changes: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- Python 3.2 support has been dropped. + It never had significant real world usage and has been dropped by our main dependency ``cryptography``. + Affected users should upgrade to Python 3.3 or later. + + +Deprecations: +^^^^^^^^^^^^^ + +- The support for EGD has been removed. + The only affected function ``OpenSSL.rand.egd()`` now uses ``os.urandom()`` to seed the internal PRNG instead. + Please see `pyca/cryptography#1636 <https://github.com/pyca/cryptography/pull/1636>`_ for more background information on this decision. + In accordance with our backward compatibility policy ``OpenSSL.rand.egd()`` will be *removed* no sooner than a year from the release of 16.0.0. + + Please note that you should `use urandom <http://sockpuppet.org/blog/2014/02/25/safely-generate-random-numbers/>`_ for all your secure random number needs. +- Python 2.6 support has been deprecated. + Our main dependency ``cryptography`` deprecated 2.6 in version 0.9 (2015-05-14) with no time table for actually dropping it. + pyOpenSSL will drop Python 2.6 support once ``cryptography`` does. + + +Changes: +^^^^^^^^ + +- Fixed ``OpenSSL.SSL.Context.set_session_id``, ``OpenSSL.SSL.Connection.renegotiate``, ``OpenSSL.SSL.Connection.renegotiate_pending``, and ``OpenSSL.SSL.Context.load_client_ca``. + They were lacking an implementation since 0.14. + `#422 <https://github.com/pyca/pyopenssl/pull/422>`_ +- Fixed segmentation fault when using keys larger than 4096-bit to sign data. + `#428 <https://github.com/pyca/pyopenssl/pull/428>`_ +- Fixed ``AttributeError`` when ``OpenSSL.SSL.Connection.get_app_data()`` was called before setting any app data. + `#304 <https://github.com/pyca/pyopenssl/pull/304>`_ +- Added ``OpenSSL.crypto.dump_publickey()`` to dump ``OpenSSL.crypto.PKey`` objects that represent public keys, and ``OpenSSL.crypto.load_publickey()`` to load such objects from serialized representations. + `#382 <https://github.com/pyca/pyopenssl/pull/382>`_ +- Added ``OpenSSL.crypto.dump_crl()`` to dump a certificate revocation list out to a string buffer. + `#368 <https://github.com/pyca/pyopenssl/pull/368>`_ +- Added ``OpenSSL.SSL.Connection.get_state_string()`` using the OpenSSL binding ``state_string_long``. + `#358 <https://github.com/pyca/pyopenssl/pull/358>`_ +- Added support for the ``socket.MSG_PEEK`` flag to ``OpenSSL.SSL.Connection.recv()`` and ``OpenSSL.SSL.Connection.recv_into()``. + `#294 <https://github.com/pyca/pyopenssl/pull/294>`_ +- Added ``OpenSSL.SSL.Connection.get_protocol_version()`` and ``OpenSSL.SSL.Connection.get_protocol_version_name()``. + `#244 <https://github.com/pyca/pyopenssl/pull/244>`_ +- Switched to ``utf8string`` mask by default. + OpenSSL formerly defaulted to a ``T61String`` if there were UTF-8 characters present. + This was changed to default to ``UTF8String`` in the config around 2005, but the actual code didn't change it until late last year. + This will default us to the setting that actually works. + To revert this you can call ``OpenSSL.crypto._lib.ASN1_STRING_set_default_mask_asc(b"default")``. + `#234 <https://github.com/pyca/pyopenssl/pull/234>`_ + +`Full changelog <https://pyopenssl.readthedocs.org/en/stable/changelog.html>`_. + + + diff --git a/lib/python3.4/site-packages/pyOpenSSL-16.0.0.dist-info/METADATA b/lib/python3.4/site-packages/pyOpenSSL-16.0.0.dist-info/METADATA new file mode 100644 index 0000000..c6edcb5 --- /dev/null +++ b/lib/python3.4/site-packages/pyOpenSSL-16.0.0.dist-info/METADATA @@ -0,0 +1,142 @@ +Metadata-Version: 2.0 +Name: pyOpenSSL +Version: 16.0.0 +Summary: Python wrapper module around the OpenSSL library +Home-page: https://pyopenssl.readthedocs.org/ +Author: Hynek Schlawack +Author-email: hs@ox.cx +License: Apache License, Version 2.0 +Platform: UNKNOWN +Classifier: Development Status :: 6 - Mature +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Security :: Cryptography +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: System :: Networking +Requires-Dist: cryptography (>=1.3) +Requires-Dist: six (>=1.5.2) + +======================================================== +pyOpenSSL -- A Python wrapper around the OpenSSL library +======================================================== + +.. image:: https://readthedocs.org/projects/pyopenssl/badge/?version=stable + :target: https://pyopenssl.readthedocs.org/ + :alt: Stable Docs + +.. image:: https://travis-ci.org/pyca/pyopenssl.svg?branch=master + :target: https://travis-ci.org/pyca/pyopenssl + :alt: Build status + +.. image:: https://codecov.io/github/pyca/pyopenssl/coverage.svg?branch=master + :target: https://codecov.io/github/pyca/pyopenssl + :alt: Test coverage + + +High-level wrapper around a subset of the OpenSSL library. Includes + +* ``SSL.Connection`` objects, wrapping the methods of Python's portable sockets +* Callbacks written in Python +* Extensive error-handling mechanism, mirroring OpenSSL's error codes + +... and much more. + +You can find more information in the documentation_. +Development takes place on GitHub_. + + +Discussion +========== + +If you run into bugs, you can file them in our `issue tracker`_. + +We maintain a cryptography-dev_ mailing list for both user and development discussions. + +You can also join ``#cryptography-dev`` on Freenode to ask questions or get involved. + + +.. _documentation: https://pyopenssl.readthedocs.org/ +.. _`issue tracker`: https://github.com/pyca/pyopenssl/issues +.. _cryptography-dev: https://mail.python.org/mailman/listinfo/cryptography-dev +.. _GitHub: https://github.com/pyca/pyopenssl + + +Release Information +=================== + +16.0.0 (2016-03-19) +------------------- + +This is the first release under full stewardship of PyCA. +We have made *many* changes to make local development more pleasing. +The test suite now passes both on Linux and OS X with OpenSSL 0.9.8, 1.0.1, and 1.0.2. +It has been moved to `py.test <https://pytest.org/>`_, all CI test runs are part of `tox <https://testrun.org/tox/>`_ and the source code has been made fully `flake8 <https://flake8.readthedocs.org/>`_ compliant. + +We hope to have lowered the barrier for contributions significantly but are open to hear about any remaining frustrations. + + +Backward-incompatible changes: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- Python 3.2 support has been dropped. + It never had significant real world usage and has been dropped by our main dependency ``cryptography``. + Affected users should upgrade to Python 3.3 or later. + + +Deprecations: +^^^^^^^^^^^^^ + +- The support for EGD has been removed. + The only affected function ``OpenSSL.rand.egd()`` now uses ``os.urandom()`` to seed the internal PRNG instead. + Please see `pyca/cryptography#1636 <https://github.com/pyca/cryptography/pull/1636>`_ for more background information on this decision. + In accordance with our backward compatibility policy ``OpenSSL.rand.egd()`` will be *removed* no sooner than a year from the release of 16.0.0. + + Please note that you should `use urandom <http://sockpuppet.org/blog/2014/02/25/safely-generate-random-numbers/>`_ for all your secure random number needs. +- Python 2.6 support has been deprecated. + Our main dependency ``cryptography`` deprecated 2.6 in version 0.9 (2015-05-14) with no time table for actually dropping it. + pyOpenSSL will drop Python 2.6 support once ``cryptography`` does. + + +Changes: +^^^^^^^^ + +- Fixed ``OpenSSL.SSL.Context.set_session_id``, ``OpenSSL.SSL.Connection.renegotiate``, ``OpenSSL.SSL.Connection.renegotiate_pending``, and ``OpenSSL.SSL.Context.load_client_ca``. + They were lacking an implementation since 0.14. + `#422 <https://github.com/pyca/pyopenssl/pull/422>`_ +- Fixed segmentation fault when using keys larger than 4096-bit to sign data. + `#428 <https://github.com/pyca/pyopenssl/pull/428>`_ +- Fixed ``AttributeError`` when ``OpenSSL.SSL.Connection.get_app_data()`` was called before setting any app data. + `#304 <https://github.com/pyca/pyopenssl/pull/304>`_ +- Added ``OpenSSL.crypto.dump_publickey()`` to dump ``OpenSSL.crypto.PKey`` objects that represent public keys, and ``OpenSSL.crypto.load_publickey()`` to load such objects from serialized representations. + `#382 <https://github.com/pyca/pyopenssl/pull/382>`_ +- Added ``OpenSSL.crypto.dump_crl()`` to dump a certificate revocation list out to a string buffer. + `#368 <https://github.com/pyca/pyopenssl/pull/368>`_ +- Added ``OpenSSL.SSL.Connection.get_state_string()`` using the OpenSSL binding ``state_string_long``. + `#358 <https://github.com/pyca/pyopenssl/pull/358>`_ +- Added support for the ``socket.MSG_PEEK`` flag to ``OpenSSL.SSL.Connection.recv()`` and ``OpenSSL.SSL.Connection.recv_into()``. + `#294 <https://github.com/pyca/pyopenssl/pull/294>`_ +- Added ``OpenSSL.SSL.Connection.get_protocol_version()`` and ``OpenSSL.SSL.Connection.get_protocol_version_name()``. + `#244 <https://github.com/pyca/pyopenssl/pull/244>`_ +- Switched to ``utf8string`` mask by default. + OpenSSL formerly defaulted to a ``T61String`` if there were UTF-8 characters present. + This was changed to default to ``UTF8String`` in the config around 2005, but the actual code didn't change it until late last year. + This will default us to the setting that actually works. + To revert this you can call ``OpenSSL.crypto._lib.ASN1_STRING_set_default_mask_asc(b"default")``. + `#234 <https://github.com/pyca/pyopenssl/pull/234>`_ + +`Full changelog <https://pyopenssl.readthedocs.org/en/stable/changelog.html>`_. + + + diff --git a/lib/python3.4/site-packages/pyOpenSSL-16.0.0.dist-info/RECORD b/lib/python3.4/site-packages/pyOpenSSL-16.0.0.dist-info/RECORD new file mode 100644 index 0000000..0b14947 --- /dev/null +++ b/lib/python3.4/site-packages/pyOpenSSL-16.0.0.dist-info/RECORD @@ -0,0 +1,20 @@ +OpenSSL/SSL.py,sha256=rGbZaQjcs2OIOeDKuPmKfDpHlPwg4dP0h32yH_2mVt0,65521 +OpenSSL/__init__.py,sha256=6h9AwEYEXCGfxgriJsSswfozR0JlP4Yz4DGoWDLvaV8,461 +OpenSSL/_util.py,sha256=0efm8GrZIISgT9ILBRX81YmsJ8OdnIOPOdAajOR-37Y,3881 +OpenSSL/crypto.py,sha256=wv80GiHIeCO-mWXZxoFs4wqHpmU-Fw0GMRdcLa-_1o4,90546 +OpenSSL/rand.py,sha256=1AqfXsI-qn2AmXfSaty4MlHkDU2sCVtrYyUnUOlVCnY,5795 +OpenSSL/tsafe.py,sha256=uG7elFAYcE4xf2JdmQarRPTQPEcbqFYcNNK4Qqssr1k,975 +OpenSSL/version.py,sha256=Gj3BcBVvw8HePcIZju9KcDxtnGKN_zpwD6WytZSxRq0,638 +pyOpenSSL-16.0.0.dist-info/DESCRIPTION.rst,sha256=JsN90m25_RzES7K27g9WSnK8LR95Qphe9dKi76AKb98,5354 +pyOpenSSL-16.0.0.dist-info/METADATA,sha256=LOAk9SFoLMHgf1ZdXA1utKNlsBh-QX96MfDLn-zPt9k,6620 +pyOpenSSL-16.0.0.dist-info/RECORD,, +pyOpenSSL-16.0.0.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 +pyOpenSSL-16.0.0.dist-info/metadata.json,sha256=yaWguod9F34s-dOuVlkb7rLKyP2BhEqXLup6mmWVd34,1331 +pyOpenSSL-16.0.0.dist-info/top_level.txt,sha256=NNxWqS8hKNJh2cUXa1RZOMX62VJfyd8URo1TsYnR_MU,8 +OpenSSL/__pycache__/_util.cpython-34.pyc,, +OpenSSL/__pycache__/version.cpython-34.pyc,, +OpenSSL/__pycache__/__init__.cpython-34.pyc,, +OpenSSL/__pycache__/tsafe.cpython-34.pyc,, +OpenSSL/__pycache__/rand.cpython-34.pyc,, +OpenSSL/__pycache__/SSL.cpython-34.pyc,, +OpenSSL/__pycache__/crypto.cpython-34.pyc,, diff --git a/lib/python3.4/site-packages/pyOpenSSL-16.0.0.dist-info/WHEEL b/lib/python3.4/site-packages/pyOpenSSL-16.0.0.dist-info/WHEEL new file mode 100644 index 0000000..8b6dd1b --- /dev/null +++ b/lib/python3.4/site-packages/pyOpenSSL-16.0.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/lib/python3.4/site-packages/pyOpenSSL-16.0.0.dist-info/metadata.json b/lib/python3.4/site-packages/pyOpenSSL-16.0.0.dist-info/metadata.json new file mode 100644 index 0000000..96a011e --- /dev/null +++ b/lib/python3.4/site-packages/pyOpenSSL-16.0.0.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 6 - Mature", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", "Operating System :: POSIX", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Security :: Cryptography", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: System :: Networking"], "extensions": {"python.details": {"contacts": [{"email": "hs@ox.cx", "name": "Hynek Schlawack", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://pyopenssl.readthedocs.org/"}}}, "extras": [], "generator": "bdist_wheel (0.29.0)", "license": "Apache License, Version 2.0", "metadata_version": "2.0", "name": "pyOpenSSL", "run_requires": [{"requires": ["cryptography (>=1.3)", "six (>=1.5.2)"]}], "summary": "Python wrapper module around the OpenSSL library", "version": "16.0.0"} \ No newline at end of file diff --git a/lib/python3.4/site-packages/pyOpenSSL-16.0.0.dist-info/top_level.txt b/lib/python3.4/site-packages/pyOpenSSL-16.0.0.dist-info/top_level.txt new file mode 100644 index 0000000..effce34 --- /dev/null +++ b/lib/python3.4/site-packages/pyOpenSSL-16.0.0.dist-info/top_level.txt @@ -0,0 +1 @@ +OpenSSL diff --git a/lib/python3.4/site-packages/pyasn1-0.1.9.dist-info/DESCRIPTION.rst b/lib/python3.4/site-packages/pyasn1-0.1.9.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..33f1bd7 --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1-0.1.9.dist-info/DESCRIPTION.rst @@ -0,0 +1,3 @@ +A pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208). + + diff --git a/lib/python3.4/site-packages/pyasn1-0.1.9.dist-info/METADATA b/lib/python3.4/site-packages/pyasn1-0.1.9.dist-info/METADATA new file mode 100644 index 0000000..c8cfcb4 --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1-0.1.9.dist-info/METADATA @@ -0,0 +1,29 @@ +Metadata-Version: 2.0 +Name: pyasn1 +Version: 0.1.9 +Summary: ASN.1 types and codecs +Home-page: http://sourceforge.net/projects/pyasn1/ +Author: Ilya Etingof <ilya@glas.net> +Author-email: ilya@glas.net +License: BSD +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Education +Classifier: Intended Audience :: Information Technology +Classifier: Intended Audience :: Science/Research +Classifier: Intended Audience :: System Administrators +Classifier: Intended Audience :: Telecommunications Industry +Classifier: License :: OSI Approved :: BSD License +Classifier: Natural Language :: English +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 3 +Classifier: Topic :: Communications +Classifier: Topic :: Security :: Cryptography +Classifier: Topic :: Software Development :: Libraries :: Python Modules + +A pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208). + + diff --git a/lib/python3.4/site-packages/pyasn1-0.1.9.dist-info/RECORD b/lib/python3.4/site-packages/pyasn1-0.1.9.dist-info/RECORD new file mode 100644 index 0000000..b2647b6 --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1-0.1.9.dist-info/RECORD @@ -0,0 +1,63 @@ +pyasn1/__init__.py,sha256=9wUs8dGZ3lSJ0n9QvCUpoHvPtB1PDymDiV5t737GcGc,175 +pyasn1/debug.py,sha256=GBDbND5nTDKmXcCCEhrpQKDyGquMUNsuP6rS6r-kVzk,3044 +pyasn1/error.py,sha256=I6-rNY7ksHWniQD__fS1ZTVRMP4gyN8pBUDeVaUIQlw,129 +pyasn1/codec/__init__.py,sha256=EEDlJYS172EH39GUidN_8FbkNcWY9OVV8e30AV58pn0,59 +pyasn1/codec/ber/__init__.py,sha256=EEDlJYS172EH39GUidN_8FbkNcWY9OVV8e30AV58pn0,59 +pyasn1/codec/ber/decoder.py,sha256=n45dHgLRXkhV1iAbLyqYcixK27-UsYfl19NglEySvk8,38266 +pyasn1/codec/ber/encoder.py,sha256=9gFd4BdM6yoVfusgbfUuJlm8a1vvqitZONjdy8Phc1Y,16317 +pyasn1/codec/ber/eoo.py,sha256=GxRYRnAEBMGBsYTKr-FhiodzWa0MkyDHPXrbxf7izRE,237 +pyasn1/codec/cer/__init__.py,sha256=EEDlJYS172EH39GUidN_8FbkNcWY9OVV8e30AV58pn0,59 +pyasn1/codec/cer/decoder.py,sha256=dXB6PXdhUuLQiOYeiDlJOx-obWASZMUpAHt7SD2akBY,1267 +pyasn1/codec/cer/encoder.py,sha256=FT-OIzv3gtOZtjabAefLewYljRcJWjW0NFSnNXNpX6M,4998 +pyasn1/codec/der/__init__.py,sha256=EEDlJYS172EH39GUidN_8FbkNcWY9OVV8e30AV58pn0,59 +pyasn1/codec/der/decoder.py,sha256=PgwaEhzihgnjnPr5nac7RNmJ3_yS7WPHcf10pzTI-8A,200 +pyasn1/codec/der/encoder.py,sha256=4J-b3uiVQr6vntmWsdFIteAjjj2HmbUgU18_ZXNhhT4,1139 +pyasn1/compat/__init__.py,sha256=EEDlJYS172EH39GUidN_8FbkNcWY9OVV8e30AV58pn0,59 +pyasn1/compat/binary.py,sha256=0lH-a1brfDwbKEOtG5RSVPLvyf0VKGGJI_8Ye3DWmJY,201 +pyasn1/compat/octets.py,sha256=tL1RMmEb_jVc8Dn6qKmYKvluaNTz2TIhuRkZSG2fgFU,709 +pyasn1/type/__init__.py,sha256=EEDlJYS172EH39GUidN_8FbkNcWY9OVV8e30AV58pn0,59 +pyasn1/type/base.py,sha256=LabMxhI2cb1GnL4j2qjRLewvBLI0pAJ3x95qeM8Lk0I,10706 +pyasn1/type/char.py,sha256=Y5jDlIzc2eKFWoU5Fp0G4Nlg_dpj-uX9Tkn2F68ukcY,2043 +pyasn1/type/constraint.py,sha256=YypUxhOGlHgTRqThRvzLbVjGpDKPqp1j1S0nasrsyOU,7279 +pyasn1/type/error.py,sha256=16b_v-xZv0G4exx1AIZpFx5vF9KBYZ9h0pvVj7VCkGs,84 +pyasn1/type/namedtype.py,sha256=NPsCg40U3Zlk46VTjf9L0sYrofsHliW2APk-T1xWius,5725 +pyasn1/type/namedval.py,sha256=E0Lt0_0gQ30n53-8Oe9nQJ6mi5H2Ga0lf25Y3EMcZno,2163 +pyasn1/type/tag.py,sha256=X8RoVOJZ-91mSikzk4EaAfOR5yTsNs_Eo3H1HhBeurE,4499 +pyasn1/type/tagmap.py,sha256=RBapxQ3qlFfVSYnn3lB9c2nt1ci4NKSAraf8MLcVvbA,2392 +pyasn1/type/univ.py,sha256=IruhRVyQhuooXuTrPMNi0OIRjlNazLVJj4jwtBr1VZU,44623 +pyasn1/type/useful.py,sha256=M5Z4dzObZqxyw7crb9W06jfIrm04hqZ3Ax0dsxoYWEY,565 +pyasn1-0.1.9.dist-info/DESCRIPTION.rst,sha256=mCZKqo4T-3U1mijgd31Nk6dbr14R9iokXNsYCwdAFDs,78 +pyasn1-0.1.9.dist-info/METADATA,sha256=K6mO8aa0CH9jifao8IRL-I9t4NDboUnwvwsjKOo0cHg,1093 +pyasn1-0.1.9.dist-info/RECORD,, +pyasn1-0.1.9.dist-info/WHEEL,sha256=GrqQvamwgBV4nLoJe0vhYRSWzWsx7xjlt74FT0SWYfE,110 +pyasn1-0.1.9.dist-info/metadata.json,sha256=XQXbRRTa3jGg8Drz3Ojg-3qR2_rtXXap8LiMewEqftM,1097 +pyasn1-0.1.9.dist-info/top_level.txt,sha256=dnNEQt3nIDIO5mSCCOB5obQHrjDOUsRycdBujc2vrWE,7 +pyasn1-0.1.9.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +pyasn1/compat/__pycache__/octets.cpython-34.pyc,, +pyasn1/compat/__pycache__/__init__.cpython-34.pyc,, +pyasn1/__pycache__/__init__.cpython-34.pyc,, +pyasn1/codec/der/__pycache__/decoder.cpython-34.pyc,, +pyasn1/codec/der/__pycache__/encoder.cpython-34.pyc,, +pyasn1/type/__pycache__/error.cpython-34.pyc,, +pyasn1/type/__pycache__/char.cpython-34.pyc,, +pyasn1/codec/cer/__pycache__/decoder.cpython-34.pyc,, +pyasn1/codec/ber/__pycache__/encoder.cpython-34.pyc,, +pyasn1/type/__pycache__/useful.cpython-34.pyc,, +pyasn1/__pycache__/debug.cpython-34.pyc,, +pyasn1/type/__pycache__/tag.cpython-34.pyc,, +pyasn1/type/__pycache__/__init__.cpython-34.pyc,, +pyasn1/__pycache__/error.cpython-34.pyc,, +pyasn1/codec/der/__pycache__/__init__.cpython-34.pyc,, +pyasn1/type/__pycache__/constraint.cpython-34.pyc,, +pyasn1/type/__pycache__/univ.cpython-34.pyc,, +pyasn1/type/__pycache__/base.cpython-34.pyc,, +pyasn1/codec/cer/__pycache__/__init__.cpython-34.pyc,, +pyasn1/codec/ber/__pycache__/decoder.cpython-34.pyc,, +pyasn1/type/__pycache__/namedtype.cpython-34.pyc,, +pyasn1/compat/__pycache__/binary.cpython-34.pyc,, +pyasn1/type/__pycache__/tagmap.cpython-34.pyc,, +pyasn1/codec/__pycache__/__init__.cpython-34.pyc,, +pyasn1/codec/ber/__pycache__/eoo.cpython-34.pyc,, +pyasn1/type/__pycache__/namedval.cpython-34.pyc,, +pyasn1/codec/cer/__pycache__/encoder.cpython-34.pyc,, +pyasn1/codec/ber/__pycache__/__init__.cpython-34.pyc,, diff --git a/lib/python3.4/site-packages/pyasn1-0.1.9.dist-info/WHEEL b/lib/python3.4/site-packages/pyasn1-0.1.9.dist-info/WHEEL new file mode 100644 index 0000000..0de529b --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1-0.1.9.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.26.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/lib/python3.4/site-packages/pyasn1-0.1.9.dist-info/metadata.json b/lib/python3.4/site-packages/pyasn1-0.1.9.dist-info/metadata.json new file mode 100644 index 0000000..c2e7359 --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1-0.1.9.dist-info/metadata.json @@ -0,0 +1 @@ +{"generator": "bdist_wheel (0.26.0)", "summary": "ASN.1 types and codecs", "classifiers": ["Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: Education", "Intended Audience :: Information Technology", "Intended Audience :: Science/Research", "Intended Audience :: System Administrators", "Intended Audience :: Telecommunications Industry", "License :: OSI Approved :: BSD License", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3", "Topic :: Communications", "Topic :: Security :: Cryptography", "Topic :: Software Development :: Libraries :: Python Modules"], "extensions": {"python.details": {"project_urls": {"Home": "http://sourceforge.net/projects/pyasn1/"}, "contacts": [{"email": "ilya@glas.net", "name": "Ilya Etingof <ilya@glas.net>", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}}}, "license": "BSD", "metadata_version": "2.0", "name": "pyasn1", "platform": "any", "version": "0.1.9"} \ No newline at end of file diff --git a/lib/python3.4/site-packages/pyasn1-0.1.9.dist-info/top_level.txt b/lib/python3.4/site-packages/pyasn1-0.1.9.dist-info/top_level.txt new file mode 100644 index 0000000..38fe414 --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1-0.1.9.dist-info/top_level.txt @@ -0,0 +1 @@ +pyasn1 diff --git a/lib/python3.4/site-packages/pyasn1-0.1.9.dist-info/zip-safe b/lib/python3.4/site-packages/pyasn1-0.1.9.dist-info/zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1-0.1.9.dist-info/zip-safe @@ -0,0 +1 @@ + diff --git a/lib/python3.4/site-packages/pyasn1/__init__.py b/lib/python3.4/site-packages/pyasn1/__init__.py new file mode 100644 index 0000000..5f09300 --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1/__init__.py @@ -0,0 +1,8 @@ +import sys + +# http://www.python.org/dev/peps/pep-0396/ +__version__ = '0.1.9' + +if sys.version_info[:2] < (2, 4): + raise RuntimeError('PyASN1 requires Python 2.4 or later') + diff --git a/lib/python3.4/site-packages/pyasn1/codec/__init__.py b/lib/python3.4/site-packages/pyasn1/codec/__init__.py new file mode 100644 index 0000000..8c3066b --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1/codec/__init__.py @@ -0,0 +1 @@ +# This file is necessary to make this directory a package. diff --git a/lib/python3.4/site-packages/pyasn1/codec/ber/__init__.py b/lib/python3.4/site-packages/pyasn1/codec/ber/__init__.py new file mode 100644 index 0000000..8c3066b --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1/codec/ber/__init__.py @@ -0,0 +1 @@ +# This file is necessary to make this directory a package. diff --git a/lib/python3.4/site-packages/pyasn1/codec/ber/decoder.py b/lib/python3.4/site-packages/pyasn1/codec/ber/decoder.py new file mode 100644 index 0000000..61bfbce --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1/codec/ber/decoder.py @@ -0,0 +1,841 @@ +# BER decoder +from pyasn1.type import tag, univ, char, useful, tagmap +from pyasn1.codec.ber import eoo +from pyasn1.compat.octets import oct2int, isOctetsType +from pyasn1 import debug, error + +class AbstractDecoder: + protoComponent = None + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + raise error.PyAsn1Error('Decoder not implemented for %s' % (tagSet,)) + + def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + raise error.PyAsn1Error('Indefinite length mode decoder not implemented for %s' % (tagSet,)) + +class AbstractSimpleDecoder(AbstractDecoder): + tagFormats = (tag.tagFormatSimple,) + def _createComponent(self, asn1Spec, tagSet, value=None): + if tagSet[0][1] not in self.tagFormats: + raise error.PyAsn1Error('Invalid tag format %s for %s' % (tagSet[0], self.protoComponent.prettyPrintType())) + if asn1Spec is None: + return self.protoComponent.clone(value, tagSet) + elif value is None: + return asn1Spec + else: + return asn1Spec.clone(value) + +class AbstractConstructedDecoder(AbstractDecoder): + tagFormats = (tag.tagFormatConstructed,) + def _createComponent(self, asn1Spec, tagSet, value=None): + if tagSet[0][1] not in self.tagFormats: + raise error.PyAsn1Error('Invalid tag format %s for %s' % (tagSet[0], self.protoComponent.prettyPrintType())) + if asn1Spec is None: + return self.protoComponent.clone(tagSet) + else: + return asn1Spec.clone() + +class ExplicitTagDecoder(AbstractSimpleDecoder): + protoComponent = univ.Any('') + tagFormats = (tag.tagFormatConstructed,) + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + if substrateFun: + return substrateFun( + self._createComponent(asn1Spec, tagSet, ''), + substrate, length + ) + head, tail = substrate[:length], substrate[length:] + value, _ = decodeFun(head, asn1Spec, tagSet, length) + return value, tail + + def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + if substrateFun: + return substrateFun( + self._createComponent(asn1Spec, tagSet, ''), + substrate, length + ) + value, substrate = decodeFun(substrate, asn1Spec, tagSet, length) + terminator, substrate = decodeFun(substrate, allowEoo=True) + if eoo.endOfOctets.isSameTypeWith(terminator) and \ + terminator == eoo.endOfOctets: + return value, substrate + else: + raise error.PyAsn1Error('Missing end-of-octets terminator') + +explicitTagDecoder = ExplicitTagDecoder() + +class IntegerDecoder(AbstractSimpleDecoder): + protoComponent = univ.Integer(0) + precomputedValues = { + '\x00': 0, + '\x01': 1, + '\x02': 2, + '\x03': 3, + '\x04': 4, + '\x05': 5, + '\x06': 6, + '\x07': 7, + '\x08': 8, + '\x09': 9, + '\xff': -1, + '\xfe': -2, + '\xfd': -3, + '\xfc': -4, + '\xfb': -5 + } + + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, + state, decodeFun, substrateFun): + head, tail = substrate[:length], substrate[length:] + if not head: + return self._createComponent(asn1Spec, tagSet, 0), tail + if head in self.precomputedValues: + value = self.precomputedValues[head] + else: + firstOctet = oct2int(head[0]) + if firstOctet & 0x80: + value = -1 + else: + value = 0 + for octet in head: + value = value << 8 | oct2int(octet) + return self._createComponent(asn1Spec, tagSet, value), tail + +class BooleanDecoder(IntegerDecoder): + protoComponent = univ.Boolean(0) + def _createComponent(self, asn1Spec, tagSet, value=None): + return IntegerDecoder._createComponent(self, asn1Spec, tagSet, value and 1 or 0) + +class BitStringDecoder(AbstractSimpleDecoder): + protoComponent = univ.BitString(()) + tagFormats = (tag.tagFormatSimple, tag.tagFormatConstructed) + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, + state, decodeFun, substrateFun): + head, tail = substrate[:length], substrate[length:] + if tagSet[0][1] == tag.tagFormatSimple: # XXX what tag to check? + if not head: + raise error.PyAsn1Error('Empty substrate') + trailingBits = oct2int(head[0]) + if trailingBits > 7: + raise error.PyAsn1Error( + 'Trailing bits overflow %s' % trailingBits + ) + head = head[1:] + lsb = p = 0; l = len(head)-1; b = [] + while p <= l: + if p == l: + lsb = trailingBits + j = 7 + o = oct2int(head[p]) + while j >= lsb: + b.append((o>>j)&0x01) + j = j - 1 + p = p + 1 + return self._createComponent(asn1Spec, tagSet, b), tail + r = self._createComponent(asn1Spec, tagSet, ()) + if substrateFun: + return substrateFun(r, substrate, length) + while head: + component, head = decodeFun(head, self.protoComponent) + r = r + component + return r, tail + + def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + r = self._createComponent(asn1Spec, tagSet, '') + if substrateFun: + return substrateFun(r, substrate, length) + while substrate: + component, substrate = decodeFun(substrate, self.protoComponent, + allowEoo=True) + if eoo.endOfOctets.isSameTypeWith(component) and \ + component == eoo.endOfOctets: + break + r = r + component + else: + raise error.SubstrateUnderrunError( + 'No EOO seen before substrate ends' + ) + return r, substrate + +class OctetStringDecoder(AbstractSimpleDecoder): + protoComponent = univ.OctetString('') + tagFormats = (tag.tagFormatSimple, tag.tagFormatConstructed) + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, + state, decodeFun, substrateFun): + head, tail = substrate[:length], substrate[length:] + if tagSet[0][1] == tag.tagFormatSimple: # XXX what tag to check? + return self._createComponent(asn1Spec, tagSet, head), tail + r = self._createComponent(asn1Spec, tagSet, '') + if substrateFun: + return substrateFun(r, substrate, length) + while head: + component, head = decodeFun(head, self.protoComponent) + r = r + component + return r, tail + + def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + r = self._createComponent(asn1Spec, tagSet, '') + if substrateFun: + return substrateFun(r, substrate, length) + while substrate: + component, substrate = decodeFun(substrate, self.protoComponent, + allowEoo=True) + if eoo.endOfOctets.isSameTypeWith(component) and \ + component == eoo.endOfOctets: + break + r = r + component + else: + raise error.SubstrateUnderrunError( + 'No EOO seen before substrate ends' + ) + return r, substrate + +class NullDecoder(AbstractSimpleDecoder): + protoComponent = univ.Null('') + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + head, tail = substrate[:length], substrate[length:] + r = self._createComponent(asn1Spec, tagSet) + if head: + raise error.PyAsn1Error('Unexpected %d-octet substrate for Null' % length) + return r, tail + +class ObjectIdentifierDecoder(AbstractSimpleDecoder): + protoComponent = univ.ObjectIdentifier(()) + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, + state, decodeFun, substrateFun): + head, tail = substrate[:length], substrate[length:] + if not head: + raise error.PyAsn1Error('Empty substrate') + + oid = () + index = 0 + substrateLen = len(head) + while index < substrateLen: + subId = oct2int(head[index]) + index += 1 + if subId < 128: + oid = oid + (subId,) + elif subId > 128: + # Construct subid from a number of octets + nextSubId = subId + subId = 0 + while nextSubId >= 128: + subId = (subId << 7) + (nextSubId & 0x7F) + if index >= substrateLen: + raise error.SubstrateUnderrunError( + 'Short substrate for sub-OID past %s' % (oid,) + ) + nextSubId = oct2int(head[index]) + index += 1 + oid = oid + ((subId << 7) + nextSubId,) + elif subId == 128: + # ASN.1 spec forbids leading zeros (0x80) in OID + # encoding, tolerating it opens a vulnerability. See + # http://www.cosic.esat.kuleuven.be/publications/article-1432.pdf + # page 7 + raise error.PyAsn1Error('Invalid octet 0x80 in OID encoding') + + # Decode two leading arcs + if 0 <= oid[0] <= 39: + oid = (0,) + oid + elif 40 <= oid[0] <= 79: + oid = (1, oid[0]-40) + oid[1:] + elif oid[0] >= 80: + oid = (2, oid[0]-80) + oid[1:] + else: + raise error.PyAsn1Error('Malformed first OID octet: %s' % head[0]) + + return self._createComponent(asn1Spec, tagSet, oid), tail + +class RealDecoder(AbstractSimpleDecoder): + protoComponent = univ.Real() + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + head, tail = substrate[:length], substrate[length:] + if not head: + return self._createComponent(asn1Spec, tagSet, 0.0), tail + fo = oct2int(head[0]); head = head[1:] + if fo & 0x80: # binary encoding + if not head: + raise error.PyAsn1Error("Incomplete floating-point value") + n = (fo & 0x03) + 1 + if n == 4: + n = oct2int(head[0]) + head = head[1:] + eo, head = head[:n], head[n:] + if not eo or not head: + raise error.PyAsn1Error('Real exponent screwed') + e = oct2int(eo[0]) & 0x80 and -1 or 0 + while eo: # exponent + e <<= 8 + e |= oct2int(eo[0]) + eo = eo[1:] + b = fo >> 4 & 0x03 # base bits + if b > 2: + raise error.PyAsn1Error('Illegal Real base') + if b == 1: # encbase = 8 + e *= 3 + elif b == 2: # encbase = 16 + e *= 4 + p = 0 + while head: # value + p <<= 8 + p |= oct2int(head[0]) + head = head[1:] + if fo & 0x40: # sign bit + p = -p + sf = fo >> 2 & 0x03 # scale bits + p *= 2**sf + value = (p, 2, e) + elif fo & 0x40: # infinite value + value = fo & 0x01 and '-inf' or 'inf' + elif fo & 0xc0 == 0: # character encoding + if not head: + raise error.PyAsn1Error("Incomplete floating-point value") + try: + if fo & 0x3 == 0x1: # NR1 + value = (int(head), 10, 0) + elif fo & 0x3 == 0x2: # NR2 + value = float(head) + elif fo & 0x3 == 0x3: # NR3 + value = float(head) + else: + raise error.SubstrateUnderrunError( + 'Unknown NR (tag %s)' % fo + ) + except ValueError: + raise error.SubstrateUnderrunError( + 'Bad character Real syntax' + ) + else: + raise error.SubstrateUnderrunError( + 'Unknown encoding (tag %s)' % fo + ) + return self._createComponent(asn1Spec, tagSet, value), tail + +class SequenceDecoder(AbstractConstructedDecoder): + protoComponent = univ.Sequence() + def _getComponentTagMap(self, r, idx): + try: + return r.getComponentTagMapNearPosition(idx) + except error.PyAsn1Error: + return + + def _getComponentPositionByType(self, r, t, idx): + return r.getComponentPositionNearType(t, idx) + + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + head, tail = substrate[:length], substrate[length:] + r = self._createComponent(asn1Spec, tagSet) + idx = 0 + if substrateFun: + return substrateFun(r, substrate, length) + while head: + asn1Spec = self._getComponentTagMap(r, idx) + component, head = decodeFun(head, asn1Spec) + idx = self._getComponentPositionByType( + r, component.getEffectiveTagSet(), idx + ) + r.setComponentByPosition(idx, component, asn1Spec is None) + idx = idx + 1 + r.setDefaultComponents() + r.verifySizeSpec() + return r, tail + + def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + r = self._createComponent(asn1Spec, tagSet) + if substrateFun: + return substrateFun(r, substrate, length) + idx = 0 + while substrate: + asn1Spec = self._getComponentTagMap(r, idx) + component, substrate = decodeFun(substrate, asn1Spec, allowEoo=True) + if eoo.endOfOctets.isSameTypeWith(component) and \ + component == eoo.endOfOctets: + break + idx = self._getComponentPositionByType( + r, component.getEffectiveTagSet(), idx + ) + r.setComponentByPosition(idx, component, asn1Spec is None) + idx = idx + 1 + else: + raise error.SubstrateUnderrunError( + 'No EOO seen before substrate ends' + ) + r.setDefaultComponents() + r.verifySizeSpec() + return r, substrate + +class SequenceOfDecoder(AbstractConstructedDecoder): + protoComponent = univ.SequenceOf() + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + head, tail = substrate[:length], substrate[length:] + r = self._createComponent(asn1Spec, tagSet) + if substrateFun: + return substrateFun(r, substrate, length) + asn1Spec = r.getComponentType() + idx = 0 + while head: + component, head = decodeFun(head, asn1Spec) + r.setComponentByPosition(idx, component, asn1Spec is None) + idx = idx + 1 + r.verifySizeSpec() + return r, tail + + def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + r = self._createComponent(asn1Spec, tagSet) + if substrateFun: + return substrateFun(r, substrate, length) + asn1Spec = r.getComponentType() + idx = 0 + while substrate: + component, substrate = decodeFun(substrate, asn1Spec, allowEoo=True) + if eoo.endOfOctets.isSameTypeWith(component) and \ + component == eoo.endOfOctets: + break + r.setComponentByPosition(idx, component, asn1Spec is None) + idx = idx + 1 + else: + raise error.SubstrateUnderrunError( + 'No EOO seen before substrate ends' + ) + r.verifySizeSpec() + return r, substrate + +class SetDecoder(SequenceDecoder): + protoComponent = univ.Set() + def _getComponentTagMap(self, r, idx): + return r.getComponentTagMap() + + def _getComponentPositionByType(self, r, t, idx): + nextIdx = r.getComponentPositionByType(t) + if nextIdx is None: + return idx + else: + return nextIdx + +class SetOfDecoder(SequenceOfDecoder): + protoComponent = univ.SetOf() + +class ChoiceDecoder(AbstractConstructedDecoder): + protoComponent = univ.Choice() + tagFormats = (tag.tagFormatSimple, tag.tagFormatConstructed) + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + head, tail = substrate[:length], substrate[length:] + r = self._createComponent(asn1Spec, tagSet) + if substrateFun: + return substrateFun(r, substrate, length) + if r.getTagSet() == tagSet: # explicitly tagged Choice + component, head = decodeFun( + head, r.getComponentTagMap() + ) + else: + component, head = decodeFun( + head, r.getComponentTagMap(), tagSet, length, state + ) + if isinstance(component, univ.Choice): + effectiveTagSet = component.getEffectiveTagSet() + else: + effectiveTagSet = component.getTagSet() + r.setComponentByType(effectiveTagSet, component, 0, asn1Spec is None) + return r, tail + + def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + r = self._createComponent(asn1Spec, tagSet) + if substrateFun: + return substrateFun(r, substrate, length) + if r.getTagSet() == tagSet: # explicitly tagged Choice + component, substrate = decodeFun(substrate, r.getComponentTagMap()) + # eat up EOO marker + eooMarker, substrate = decodeFun(substrate, allowEoo=True) + if not eoo.endOfOctets.isSameTypeWith(eooMarker) or \ + eooMarker != eoo.endOfOctets: + raise error.PyAsn1Error('No EOO seen before substrate ends') + else: + component, substrate= decodeFun( + substrate, r.getComponentTagMap(), tagSet, length, state + ) + if isinstance(component, univ.Choice): + effectiveTagSet = component.getEffectiveTagSet() + else: + effectiveTagSet = component.getTagSet() + r.setComponentByType(effectiveTagSet, component, 0, asn1Spec is None) + return r, substrate + +class AnyDecoder(AbstractSimpleDecoder): + protoComponent = univ.Any() + tagFormats = (tag.tagFormatSimple, tag.tagFormatConstructed) + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + if asn1Spec is None or \ + asn1Spec is not None and tagSet != asn1Spec.getTagSet(): + # untagged Any container, recover inner header substrate + length = length + len(fullSubstrate) - len(substrate) + substrate = fullSubstrate + if substrateFun: + return substrateFun(self._createComponent(asn1Spec, tagSet), + substrate, length) + head, tail = substrate[:length], substrate[length:] + return self._createComponent(asn1Spec, tagSet, value=head), tail + + def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + if asn1Spec is not None and tagSet == asn1Spec.getTagSet(): + # tagged Any type -- consume header substrate + header = '' + else: + # untagged Any, recover header substrate + header = fullSubstrate[:-len(substrate)] + + r = self._createComponent(asn1Spec, tagSet, header) + + # Any components do not inherit initial tag + asn1Spec = self.protoComponent + + if substrateFun: + return substrateFun(r, substrate, length) + while substrate: + component, substrate = decodeFun(substrate, asn1Spec, allowEoo=True) + if eoo.endOfOctets.isSameTypeWith(component) and \ + component == eoo.endOfOctets: + break + r = r + component + else: + raise error.SubstrateUnderrunError( + 'No EOO seen before substrate ends' + ) + return r, substrate + +# character string types +class UTF8StringDecoder(OctetStringDecoder): + protoComponent = char.UTF8String() +class NumericStringDecoder(OctetStringDecoder): + protoComponent = char.NumericString() +class PrintableStringDecoder(OctetStringDecoder): + protoComponent = char.PrintableString() +class TeletexStringDecoder(OctetStringDecoder): + protoComponent = char.TeletexString() +class VideotexStringDecoder(OctetStringDecoder): + protoComponent = char.VideotexString() +class IA5StringDecoder(OctetStringDecoder): + protoComponent = char.IA5String() +class GraphicStringDecoder(OctetStringDecoder): + protoComponent = char.GraphicString() +class VisibleStringDecoder(OctetStringDecoder): + protoComponent = char.VisibleString() +class GeneralStringDecoder(OctetStringDecoder): + protoComponent = char.GeneralString() +class UniversalStringDecoder(OctetStringDecoder): + protoComponent = char.UniversalString() +class BMPStringDecoder(OctetStringDecoder): + protoComponent = char.BMPString() + +# "useful" types +class ObjectDescriptorDecoder(OctetStringDecoder): + protoComponent = useful.ObjectDescriptor() +class GeneralizedTimeDecoder(OctetStringDecoder): + protoComponent = useful.GeneralizedTime() +class UTCTimeDecoder(OctetStringDecoder): + protoComponent = useful.UTCTime() + +tagMap = { + univ.Integer.tagSet: IntegerDecoder(), + univ.Boolean.tagSet: BooleanDecoder(), + univ.BitString.tagSet: BitStringDecoder(), + univ.OctetString.tagSet: OctetStringDecoder(), + univ.Null.tagSet: NullDecoder(), + univ.ObjectIdentifier.tagSet: ObjectIdentifierDecoder(), + univ.Enumerated.tagSet: IntegerDecoder(), + univ.Real.tagSet: RealDecoder(), + univ.Sequence.tagSet: SequenceDecoder(), # conflicts with SequenceOf + univ.Set.tagSet: SetDecoder(), # conflicts with SetOf + univ.Choice.tagSet: ChoiceDecoder(), # conflicts with Any + # character string types + char.UTF8String.tagSet: UTF8StringDecoder(), + char.NumericString.tagSet: NumericStringDecoder(), + char.PrintableString.tagSet: PrintableStringDecoder(), + char.TeletexString.tagSet: TeletexStringDecoder(), + char.VideotexString.tagSet: VideotexStringDecoder(), + char.IA5String.tagSet: IA5StringDecoder(), + char.GraphicString.tagSet: GraphicStringDecoder(), + char.VisibleString.tagSet: VisibleStringDecoder(), + char.GeneralString.tagSet: GeneralStringDecoder(), + char.UniversalString.tagSet: UniversalStringDecoder(), + char.BMPString.tagSet: BMPStringDecoder(), + # useful types + useful.ObjectDescriptor.tagSet: ObjectDescriptorDecoder(), + useful.GeneralizedTime.tagSet: GeneralizedTimeDecoder(), + useful.UTCTime.tagSet: UTCTimeDecoder() +} + +# Type-to-codec map for ambiguous ASN.1 types +typeMap = { + univ.Set.typeId: SetDecoder(), + univ.SetOf.typeId: SetOfDecoder(), + univ.Sequence.typeId: SequenceDecoder(), + univ.SequenceOf.typeId: SequenceOfDecoder(), + univ.Choice.typeId: ChoiceDecoder(), + univ.Any.typeId: AnyDecoder() +} + +( stDecodeTag, stDecodeLength, stGetValueDecoder, stGetValueDecoderByAsn1Spec, + stGetValueDecoderByTag, stTryAsExplicitTag, stDecodeValue, + stDumpRawValue, stErrorCondition, stStop ) = [x for x in range(10)] + +class Decoder: + defaultErrorState = stErrorCondition +# defaultErrorState = stDumpRawValue + defaultRawDecoder = AnyDecoder() + supportIndefLength = True + def __init__(self, tagMap, typeMap={}): + self.__tagMap = tagMap + self.__typeMap = typeMap + # Tag & TagSet objects caches + self.__tagCache = {} + self.__tagSetCache = {} + + def __call__(self, substrate, asn1Spec=None, tagSet=None, + length=None, state=stDecodeTag, recursiveFlag=1, + substrateFun=None, allowEoo=False): + if debug.logger & debug.flagDecoder: + debug.logger('decoder called at scope %s with state %d, working with up to %d octets of substrate: %s' % (debug.scope, state, len(substrate), debug.hexdump(substrate))) + fullSubstrate = substrate + while state != stStop: + if state == stDecodeTag: + if not substrate: + raise error.SubstrateUnderrunError( + 'Short octet stream on tag decoding' + ) + if not isOctetsType(substrate) and \ + not isinstance(substrate, univ.OctetString): + raise error.PyAsn1Error('Bad octet stream type') + # Decode tag + firstOctet = substrate[0] + substrate = substrate[1:] + if firstOctet in self.__tagCache: + lastTag = self.__tagCache[firstOctet] + else: + t = oct2int(firstOctet) + # Look for end-of-octets sentinel + if t == 0: + if substrate and oct2int(substrate[0]) == 0: + if allowEoo and self.supportIndefLength: + debug.logger and debug.logger & debug.flagDecoder and debug.logger('end-of-octets sentinel found') + value, substrate = eoo.endOfOctets, substrate[1:] + state = stStop + continue + else: + raise error.PyAsn1Error('Unexpected end-of-contents sentinel') + else: + raise error.PyAsn1Error('Zero tag encountered') + tagClass = t&0xC0 + tagFormat = t&0x20 + tagId = t&0x1F + if tagId == 0x1F: + tagId = 0 + while 1: + if not substrate: + raise error.SubstrateUnderrunError( + 'Short octet stream on long tag decoding' + ) + t = oct2int(substrate[0]) + tagId = tagId << 7 | (t&0x7F) + substrate = substrate[1:] + if not t&0x80: + break + lastTag = tag.Tag( + tagClass=tagClass, tagFormat=tagFormat, tagId=tagId + ) + if tagId < 31: + # cache short tags + self.__tagCache[firstOctet] = lastTag + if tagSet is None: + if firstOctet in self.__tagSetCache: + tagSet = self.__tagSetCache[firstOctet] + else: + # base tag not recovered + tagSet = tag.TagSet((), lastTag) + if firstOctet in self.__tagCache: + self.__tagSetCache[firstOctet] = tagSet + else: + tagSet = lastTag + tagSet + state = stDecodeLength + debug.logger and debug.logger & debug.flagDecoder and debug.logger('tag decoded into %s, decoding length' % tagSet) + if state == stDecodeLength: + # Decode length + if not substrate: + raise error.SubstrateUnderrunError( + 'Short octet stream on length decoding' + ) + firstOctet = oct2int(substrate[0]) + if firstOctet == 128: + size = 1 + length = -1 + elif firstOctet < 128: + length, size = firstOctet, 1 + else: + size = firstOctet & 0x7F + # encoded in size bytes + length = 0 + lengthString = substrate[1:size+1] + # missing check on maximum size, which shouldn't be a + # problem, we can handle more than is possible + if len(lengthString) != size: + raise error.SubstrateUnderrunError( + '%s<%s at %s' % + (size, len(lengthString), tagSet) + ) + for char in lengthString: + length = (length << 8) | oct2int(char) + size = size + 1 + substrate = substrate[size:] + if length != -1 and len(substrate) < length: + raise error.SubstrateUnderrunError( + '%d-octet short' % (length - len(substrate)) + ) + if length == -1 and not self.supportIndefLength: + error.PyAsn1Error('Indefinite length encoding not supported by this codec') + state = stGetValueDecoder + debug.logger and debug.logger & debug.flagDecoder and debug.logger('value length decoded into %d, payload substrate is: %s' % (length, debug.hexdump(length == -1 and substrate or substrate[:length]))) + if state == stGetValueDecoder: + if asn1Spec is None: + state = stGetValueDecoderByTag + else: + state = stGetValueDecoderByAsn1Spec + # + # There're two ways of creating subtypes in ASN.1 what influences + # decoder operation. These methods are: + # 1) Either base types used in or no IMPLICIT tagging has been + # applied on subtyping. + # 2) Subtype syntax drops base type information (by means of + # IMPLICIT tagging. + # The first case allows for complete tag recovery from substrate + # while the second one requires original ASN.1 type spec for + # decoding. + # + # In either case a set of tags (tagSet) is coming from substrate + # in an incremental, tag-by-tag fashion (this is the case of + # EXPLICIT tag which is most basic). Outermost tag comes first + # from the wire. + # + if state == stGetValueDecoderByTag: + if tagSet in self.__tagMap: + concreteDecoder = self.__tagMap[tagSet] + else: + concreteDecoder = None + if concreteDecoder: + state = stDecodeValue + else: + _k = tagSet[:1] + if _k in self.__tagMap: + concreteDecoder = self.__tagMap[_k] + else: + concreteDecoder = None + if concreteDecoder: + state = stDecodeValue + else: + state = stTryAsExplicitTag + if debug.logger and debug.logger & debug.flagDecoder: + debug.logger('codec %s chosen by a built-in type, decoding %s' % (concreteDecoder and concreteDecoder.__class__.__name__ or "<none>", state == stDecodeValue and 'value' or 'as explicit tag')) + debug.scope.push(concreteDecoder is None and '?' or concreteDecoder.protoComponent.__class__.__name__) + if state == stGetValueDecoderByAsn1Spec: + if isinstance(asn1Spec, (dict, tagmap.TagMap)): + if tagSet in asn1Spec: + __chosenSpec = asn1Spec[tagSet] + else: + __chosenSpec = None + if debug.logger and debug.logger & debug.flagDecoder: + debug.logger('candidate ASN.1 spec is a map of:') + for t, v in asn1Spec.getPosMap().items(): + debug.logger(' %s -> %s' % (t, v.__class__.__name__)) + if asn1Spec.getNegMap(): + debug.logger('but neither of: ') + for t, v in asn1Spec.getNegMap().items(): + debug.logger(' %s -> %s' % (t, v.__class__.__name__)) + debug.logger('new candidate ASN.1 spec is %s, chosen by %s' % (__chosenSpec is None and '<none>' or __chosenSpec.prettyPrintType(), tagSet)) + else: + __chosenSpec = asn1Spec + debug.logger and debug.logger & debug.flagDecoder and debug.logger('candidate ASN.1 spec is %s' % asn1Spec.__class__.__name__) + if __chosenSpec is not None and ( + tagSet == __chosenSpec.getTagSet() or \ + tagSet in __chosenSpec.getTagMap() + ): + # use base type for codec lookup to recover untagged types + baseTagSet = __chosenSpec.baseTagSet + if __chosenSpec.typeId is not None and \ + __chosenSpec.typeId in self.__typeMap: + # ambiguous type + concreteDecoder = self.__typeMap[__chosenSpec.typeId] + debug.logger and debug.logger & debug.flagDecoder and debug.logger('value decoder chosen for an ambiguous type by type ID %s' % (__chosenSpec.typeId,)) + elif baseTagSet in self.__tagMap: + # base type or tagged subtype + concreteDecoder = self.__tagMap[baseTagSet] + debug.logger and debug.logger & debug.flagDecoder and debug.logger('value decoder chosen by base %s' % (baseTagSet,)) + else: + concreteDecoder = None + if concreteDecoder: + asn1Spec = __chosenSpec + state = stDecodeValue + else: + state = stTryAsExplicitTag + else: + concreteDecoder = None + state = stTryAsExplicitTag + if debug.logger and debug.logger & debug.flagDecoder: + debug.logger('codec %s chosen by ASN.1 spec, decoding %s' % (state == stDecodeValue and concreteDecoder.__class__.__name__ or "<none>", state == stDecodeValue and 'value' or 'as explicit tag')) + debug.scope.push(__chosenSpec is None and '?' or __chosenSpec.__class__.__name__) + if state == stTryAsExplicitTag: + if tagSet and \ + tagSet[0][1] == tag.tagFormatConstructed and \ + tagSet[0][0] != tag.tagClassUniversal: + # Assume explicit tagging + concreteDecoder = explicitTagDecoder + state = stDecodeValue + else: + concreteDecoder = None + state = self.defaultErrorState + debug.logger and debug.logger & debug.flagDecoder and debug.logger('codec %s chosen, decoding %s' % (concreteDecoder and concreteDecoder.__class__.__name__ or "<none>", state == stDecodeValue and 'value' or 'as failure')) + if state == stDumpRawValue: + concreteDecoder = self.defaultRawDecoder + debug.logger and debug.logger & debug.flagDecoder and debug.logger('codec %s chosen, decoding value' % concreteDecoder.__class__.__name__) + state = stDecodeValue + if state == stDecodeValue: + if recursiveFlag == 0 and not substrateFun: # legacy + substrateFun = lambda a,b,c: (a,b[:c]) + if length == -1: # indef length + value, substrate = concreteDecoder.indefLenValueDecoder( + fullSubstrate, substrate, asn1Spec, tagSet, length, + stGetValueDecoder, self, substrateFun + ) + else: + value, substrate = concreteDecoder.valueDecoder( + fullSubstrate, substrate, asn1Spec, tagSet, length, + stGetValueDecoder, self, substrateFun + ) + state = stStop + debug.logger and debug.logger & debug.flagDecoder and debug.logger('codec %s yields type %s, value:\n%s\n...remaining substrate is: %s' % (concreteDecoder.__class__.__name__, value.__class__.__name__, value.prettyPrint(), substrate and debug.hexdump(substrate) or '<none>')) + if state == stErrorCondition: + raise error.PyAsn1Error( + '%s not in asn1Spec: %s' % (tagSet, asn1Spec) + ) + if debug.logger and debug.logger & debug.flagDecoder: + debug.scope.pop() + debug.logger('decoder left scope %s, call completed' % debug.scope) + return value, substrate + +decode = Decoder(tagMap, typeMap) + +# XXX +# non-recursive decoding; return position rather than substrate diff --git a/lib/python3.4/site-packages/pyasn1/codec/ber/encoder.py b/lib/python3.4/site-packages/pyasn1/codec/ber/encoder.py new file mode 100644 index 0000000..0fb4ae7 --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1/codec/ber/encoder.py @@ -0,0 +1,433 @@ +# BER encoder +from pyasn1.type import base, tag, univ, char, useful +from pyasn1.codec.ber import eoo +from pyasn1.compat.octets import int2oct, oct2int, ints2octs, null, str2octs +from pyasn1 import debug, error + +class Error(Exception): pass + +class AbstractItemEncoder: + supportIndefLenMode = 1 + def encodeTag(self, t, isConstructed): + tagClass, tagFormat, tagId = t.asTuple() # this is a hotspot + v = tagClass | tagFormat + if isConstructed: + v = v|tag.tagFormatConstructed + if tagId < 31: + return int2oct(v|tagId) + else: + s = int2oct(tagId&0x7f) + tagId = tagId >> 7 + while tagId: + s = int2oct(0x80|(tagId&0x7f)) + s + tagId = tagId >> 7 + return int2oct(v|0x1F) + s + + def encodeLength(self, length, defMode): + if not defMode and self.supportIndefLenMode: + return int2oct(0x80) + if length < 0x80: + return int2oct(length) + else: + substrate = null + while length: + substrate = int2oct(length&0xff) + substrate + length = length >> 8 + substrateLen = len(substrate) + if substrateLen > 126: + raise Error('Length octets overflow (%d)' % substrateLen) + return int2oct(0x80 | substrateLen) + substrate + + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + raise Error('Not implemented') + + def _encodeEndOfOctets(self, encodeFun, defMode): + if defMode or not self.supportIndefLenMode: + return null + else: + return encodeFun(eoo.endOfOctets, defMode) + + def encode(self, encodeFun, value, defMode, maxChunkSize): + substrate, isConstructed = self.encodeValue( + encodeFun, value, defMode, maxChunkSize + ) + tagSet = value.getTagSet() + if tagSet: + if not isConstructed: # primitive form implies definite mode + defMode = 1 + return self.encodeTag( + tagSet[-1], isConstructed + ) + self.encodeLength( + len(substrate), defMode + ) + substrate + self._encodeEndOfOctets(encodeFun, defMode) + else: + return substrate # untagged value + +class EndOfOctetsEncoder(AbstractItemEncoder): + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + return null, 0 + +class ExplicitlyTaggedItemEncoder(AbstractItemEncoder): + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + if isinstance(value, base.AbstractConstructedAsn1Item): + value = value.clone(tagSet=value.getTagSet()[:-1], + cloneValueFlag=1) + else: + value = value.clone(tagSet=value.getTagSet()[:-1]) + return encodeFun(value, defMode, maxChunkSize), 1 + +explicitlyTaggedItemEncoder = ExplicitlyTaggedItemEncoder() + +class BooleanEncoder(AbstractItemEncoder): + supportIndefLenMode = 0 + _true = ints2octs((1,)) + _false = ints2octs((0,)) + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + return value and self._true or self._false, 0 + +class IntegerEncoder(AbstractItemEncoder): + supportIndefLenMode = 0 + supportCompactZero = False + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + if value == 0: # shortcut for zero value + if self.supportCompactZero: + # this seems to be a correct way for encoding zeros + return null, 0 + else: + # this seems to be a widespread way for encoding zeros + return ints2octs((0,)), 0 + octets = [] + value = int(value) # to save on ops on asn1 type + while 1: + octets.insert(0, value & 0xff) + if value == 0 or value == -1: + break + value = value >> 8 + if value == 0 and octets[0] & 0x80: + octets.insert(0, 0) + while len(octets) > 1 and \ + (octets[0] == 0 and octets[1] & 0x80 == 0 or \ + octets[0] == 0xff and octets[1] & 0x80 != 0): + del octets[0] + return ints2octs(octets), 0 + +class BitStringEncoder(AbstractItemEncoder): + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + if not maxChunkSize or len(value) <= maxChunkSize*8: + out_len = (len(value) + 7) // 8 + out_list = out_len * [0] + j = 7 + i = -1 + for val in value: + j += 1 + if j == 8: + i += 1 + j = 0 + out_list[i] = out_list[i] | val << (7-j) + return int2oct(7-j) + ints2octs(out_list), 0 + else: + pos = 0; substrate = null + while 1: + # count in octets + v = value.clone(value[pos*8:pos*8+maxChunkSize*8]) + if not v: + break + substrate = substrate + encodeFun(v, defMode, maxChunkSize) + pos = pos + maxChunkSize + return substrate, 1 + +class OctetStringEncoder(AbstractItemEncoder): + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + if not maxChunkSize or len(value) <= maxChunkSize: + return value.asOctets(), 0 + else: + pos = 0; substrate = null + while 1: + v = value.clone(value[pos:pos+maxChunkSize]) + if not v: + break + substrate = substrate + encodeFun(v, defMode, maxChunkSize) + pos = pos + maxChunkSize + return substrate, 1 + +class NullEncoder(AbstractItemEncoder): + supportIndefLenMode = 0 + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + return null, 0 + +class ObjectIdentifierEncoder(AbstractItemEncoder): + supportIndefLenMode = 0 + precomputedValues = { + (1, 3, 6, 1, 2): (43, 6, 1, 2), + (1, 3, 6, 1, 4): (43, 6, 1, 4) + } + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + oid = value.asTuple() + if oid[:5] in self.precomputedValues: + octets = self.precomputedValues[oid[:5]] + oid = oid[5:] + else: + if len(oid) < 2: + raise error.PyAsn1Error('Short OID %s' % (value,)) + + octets = () + + # Build the first twos + if oid[0] == 0 and 0 <= oid[1] <= 39: + oid = (oid[1],) + oid[2:] + elif oid[0] == 1 and 0 <= oid[1] <= 39: + oid = (oid[1] + 40,) + oid[2:] + elif oid[0] == 2: + oid = (oid[1] + 80,) + oid[2:] + else: + raise error.PyAsn1Error( + 'Impossible initial arcs %s at %s' % (oid[:2], value) + ) + + # Cycle through subIds + for subId in oid: + if subId > -1 and subId < 128: + # Optimize for the common case + octets = octets + (subId & 0x7f,) + elif subId < 0: + raise error.PyAsn1Error( + 'Negative OID arc %s at %s' % (subId, value) + ) + else: + # Pack large Sub-Object IDs + res = (subId & 0x7f,) + subId = subId >> 7 + while subId > 0: + res = (0x80 | (subId & 0x7f),) + res + subId = subId >> 7 + # Add packed Sub-Object ID to resulted Object ID + octets += res + + return ints2octs(octets), 0 + +class RealEncoder(AbstractItemEncoder): + supportIndefLenMode = 0 + binEncBase = 2 # set to None to choose encoding base automatically + def _dropFloatingPoint(self, m, encbase, e): + ms, es = 1, 1 + if m < 0: + ms = -1 # mantissa sign + if e < 0: + es = -1 # exponenta sign + m *= ms + if encbase == 8: + m = m*2**(abs(e) % 3 * es) + e = abs(e) // 3 * es + elif encbase == 16: + m = m*2**(abs(e) % 4 * es) + e = abs(e) // 4 * es + + while 1: + if int(m) != m: + m *= encbase + e -= 1 + continue + break + return ms, int(m), encbase, e + + def _chooseEncBase(self, value): + m, b, e = value + base = [2, 8, 16] + if value.binEncBase in base: + return self._dropFloatingPoint(m, value.binEncBase, e) + elif self.binEncBase in base: + return self._dropFloatingPoint(m, self.binEncBase, e) + # auto choosing base 2/8/16 + mantissa = [m, m, m] + exponenta = [e, e, e] + encbase = 2 + e = float('inf') + for i in range(3): + sign, mantissa[i], base[i], exponenta[i] = \ + self._dropFloatingPoint(mantissa[i], base[i], exponenta[i]) + if abs(exponenta[i]) < abs(e) or \ + (abs(exponenta[i]) == abs(e) and mantissa[i] < m): + e = exponenta[i] + m = int(mantissa[i]) + encbase = base[i] + return sign, m, encbase, e + + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + if value.isPlusInfinity(): + return int2oct(0x40), 0 + if value.isMinusInfinity(): + return int2oct(0x41), 0 + m, b, e = value + if not m: + return null, 0 + if b == 10: + return str2octs('\x03%dE%s%d' % (m, e == 0 and '+' or '', e)), 0 + elif b == 2: + fo = 0x80 # binary encoding + ms, m, encbase, e = self._chooseEncBase(value) + if ms < 0: # mantissa sign + fo = fo | 0x40 # sign bit + # exponenta & mantissa normalization + if encbase == 2: + while m & 0x1 == 0: + m >>= 1 + e += 1 + elif encbase == 8: + while m & 0x7 == 0: + m >>= 3 + e += 1 + fo |= 0x10 + else: # encbase = 16 + while m & 0xf == 0: + m >>= 4 + e += 1 + fo |= 0x20 + sf = 0 # scale factor + while m & 0x1 == 0: + m >>= 1 + sf += 1 + if sf > 3: + raise error.PyAsn1Error('Scale factor overflow') # bug if raised + fo |= sf << 2 + eo = null + if e == 0 or e == -1: + eo = int2oct(e&0xff) + else: + while e not in (0, -1): + eo = int2oct(e&0xff) + eo + e >>= 8 + if e == 0 and eo and oct2int(eo[0]) & 0x80: + eo = int2oct(0) + eo + if e == -1 and eo and not (oct2int(eo[0]) & 0x80): + eo = int2oct(0xff) + eo + n = len(eo) + if n > 0xff: + raise error.PyAsn1Error('Real exponent overflow') + if n == 1: + pass + elif n == 2: + fo |= 1 + elif n == 3: + fo |= 2 + else: + fo |= 3 + eo = int2oct(n&0xff) + eo + po = null + while m: + po = int2oct(m&0xff) + po + m >>= 8 + substrate = int2oct(fo) + eo + po + return substrate, 0 + else: + raise error.PyAsn1Error('Prohibited Real base %s' % b) + +class SequenceEncoder(AbstractItemEncoder): + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + value.setDefaultComponents() + value.verifySizeSpec() + substrate = null; idx = len(value) + while idx > 0: + idx = idx - 1 + if value[idx] is None: # Optional component + continue + component = value.getDefaultComponentByPosition(idx) + if component is not None and component == value[idx]: + continue + substrate = encodeFun( + value[idx], defMode, maxChunkSize + ) + substrate + return substrate, 1 + +class SequenceOfEncoder(AbstractItemEncoder): + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + value.verifySizeSpec() + substrate = null; idx = len(value) + while idx > 0: + idx = idx - 1 + substrate = encodeFun( + value[idx], defMode, maxChunkSize + ) + substrate + return substrate, 1 + +class ChoiceEncoder(AbstractItemEncoder): + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + return encodeFun(value.getComponent(), defMode, maxChunkSize), 1 + +class AnyEncoder(OctetStringEncoder): + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + return value.asOctets(), defMode == 0 + +tagMap = { + eoo.endOfOctets.tagSet: EndOfOctetsEncoder(), + univ.Boolean.tagSet: BooleanEncoder(), + univ.Integer.tagSet: IntegerEncoder(), + univ.BitString.tagSet: BitStringEncoder(), + univ.OctetString.tagSet: OctetStringEncoder(), + univ.Null.tagSet: NullEncoder(), + univ.ObjectIdentifier.tagSet: ObjectIdentifierEncoder(), + univ.Enumerated.tagSet: IntegerEncoder(), + univ.Real.tagSet: RealEncoder(), + # Sequence & Set have same tags as SequenceOf & SetOf + univ.SequenceOf.tagSet: SequenceOfEncoder(), + univ.SetOf.tagSet: SequenceOfEncoder(), + univ.Choice.tagSet: ChoiceEncoder(), + # character string types + char.UTF8String.tagSet: OctetStringEncoder(), + char.NumericString.tagSet: OctetStringEncoder(), + char.PrintableString.tagSet: OctetStringEncoder(), + char.TeletexString.tagSet: OctetStringEncoder(), + char.VideotexString.tagSet: OctetStringEncoder(), + char.IA5String.tagSet: OctetStringEncoder(), + char.GraphicString.tagSet: OctetStringEncoder(), + char.VisibleString.tagSet: OctetStringEncoder(), + char.GeneralString.tagSet: OctetStringEncoder(), + char.UniversalString.tagSet: OctetStringEncoder(), + char.BMPString.tagSet: OctetStringEncoder(), + # useful types + useful.ObjectDescriptor.tagSet: OctetStringEncoder(), + useful.GeneralizedTime.tagSet: OctetStringEncoder(), + useful.UTCTime.tagSet: OctetStringEncoder() + } + +# Type-to-codec map for ambiguous ASN.1 types +typeMap = { + univ.Set.typeId: SequenceEncoder(), + univ.SetOf.typeId: SequenceOfEncoder(), + univ.Sequence.typeId: SequenceEncoder(), + univ.SequenceOf.typeId: SequenceOfEncoder(), + univ.Choice.typeId: ChoiceEncoder(), + univ.Any.typeId: AnyEncoder() + } + +class Encoder: + supportIndefLength = True + def __init__(self, tagMap, typeMap={}): + self.__tagMap = tagMap + self.__typeMap = typeMap + + def __call__(self, value, defMode=True, maxChunkSize=0): + if not defMode and not self.supportIndefLength: + raise error.PyAsn1Error('Indefinite length encoding not supported by this codec') + debug.logger & debug.flagEncoder and debug.logger('encoder called in %sdef mode, chunk size %s for type %s, value:\n%s' % (not defMode and 'in' or '', maxChunkSize, value.prettyPrintType(), value.prettyPrint())) + tagSet = value.getTagSet() + if len(tagSet) > 1: + concreteEncoder = explicitlyTaggedItemEncoder + else: + if value.typeId is not None and value.typeId in self.__typeMap: + concreteEncoder = self.__typeMap[value.typeId] + elif tagSet in self.__tagMap: + concreteEncoder = self.__tagMap[tagSet] + else: + tagSet = value.baseTagSet + if tagSet in self.__tagMap: + concreteEncoder = self.__tagMap[tagSet] + else: + raise Error('No encoder for %s' % (value,)) + debug.logger & debug.flagEncoder and debug.logger('using value codec %s chosen by %s' % (concreteEncoder.__class__.__name__, tagSet)) + substrate = concreteEncoder.encode( + self, value, defMode, maxChunkSize + ) + debug.logger & debug.flagEncoder and debug.logger('built %s octets of substrate: %s\nencoder completed' % (len(substrate), debug.hexdump(substrate))) + return substrate + +encode = Encoder(tagMap, typeMap) diff --git a/lib/python3.4/site-packages/pyasn1/codec/ber/eoo.py b/lib/python3.4/site-packages/pyasn1/codec/ber/eoo.py new file mode 100644 index 0000000..379be19 --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1/codec/ber/eoo.py @@ -0,0 +1,8 @@ +from pyasn1.type import base, tag + +class EndOfOctets(base.AbstractSimpleAsn1Item): + defaultValue = 0 + tagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x00) + ) +endOfOctets = EndOfOctets() diff --git a/lib/python3.4/site-packages/pyasn1/codec/cer/__init__.py b/lib/python3.4/site-packages/pyasn1/codec/cer/__init__.py new file mode 100644 index 0000000..8c3066b --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1/codec/cer/__init__.py @@ -0,0 +1 @@ +# This file is necessary to make this directory a package. diff --git a/lib/python3.4/site-packages/pyasn1/codec/cer/decoder.py b/lib/python3.4/site-packages/pyasn1/codec/cer/decoder.py new file mode 100644 index 0000000..1770cd8 --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1/codec/cer/decoder.py @@ -0,0 +1,35 @@ +# CER decoder +from pyasn1.type import univ +from pyasn1.codec.ber import decoder +from pyasn1.compat.octets import oct2int +from pyasn1 import error + +class BooleanDecoder(decoder.AbstractSimpleDecoder): + protoComponent = univ.Boolean(0) + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, + state, decodeFun, substrateFun): + head, tail = substrate[:length], substrate[length:] + if not head or length != 1: + raise error.PyAsn1Error('Not single-octet Boolean payload') + byte = oct2int(head[0]) + # CER/DER specifies encoding of TRUE as 0xFF and FALSE as 0x0, while + # BER allows any non-zero value as TRUE; cf. sections 8.2.2. and 11.1 + # in http://www.itu.int/ITU-T/studygroups/com17/languages/X.690-0207.pdf + if byte == 0xff: + value = 1 + elif byte == 0x00: + value = 0 + else: + raise error.PyAsn1Error('Unexpected Boolean payload: %s' % byte) + return self._createComponent(asn1Spec, tagSet, value), tail + +tagMap = decoder.tagMap.copy() +tagMap.update({ + univ.Boolean.tagSet: BooleanDecoder() + }) + +typeMap = decoder.typeMap + +class Decoder(decoder.Decoder): pass + +decode = Decoder(tagMap, decoder.typeMap) diff --git a/lib/python3.4/site-packages/pyasn1/codec/cer/encoder.py b/lib/python3.4/site-packages/pyasn1/codec/cer/encoder.py new file mode 100644 index 0000000..61ce8a1 --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1/codec/cer/encoder.py @@ -0,0 +1,130 @@ +# CER encoder +from pyasn1.type import univ +from pyasn1.type import useful +from pyasn1.codec.ber import encoder +from pyasn1.compat.octets import int2oct, str2octs, null +from pyasn1 import error + +class BooleanEncoder(encoder.IntegerEncoder): + def encodeValue(self, encodeFun, client, defMode, maxChunkSize): + if client == 0: + substrate = int2oct(0) + else: + substrate = int2oct(255) + return substrate, 0 + +class BitStringEncoder(encoder.BitStringEncoder): + def encodeValue(self, encodeFun, client, defMode, maxChunkSize): + return encoder.BitStringEncoder.encodeValue( + self, encodeFun, client, defMode, 1000 + ) + +class OctetStringEncoder(encoder.OctetStringEncoder): + def encodeValue(self, encodeFun, client, defMode, maxChunkSize): + return encoder.OctetStringEncoder.encodeValue( + self, encodeFun, client, defMode, 1000 + ) + +class RealEncoder(encoder.RealEncoder): + def _chooseEncBase(self, value): + m, b, e = value + return self._dropFloatingPoint(m, b, e) + +# specialized GeneralStringEncoder here + +class GeneralizedTimeEncoder(OctetStringEncoder): + zchar = str2octs('Z') + pluschar = str2octs('+') + minuschar = str2octs('-') + zero = str2octs('0') + def encodeValue(self, encodeFun, client, defMode, maxChunkSize): + octets = client.asOctets() +# This breaks too many existing data items +# if '.' not in octets: +# raise error.PyAsn1Error('Format must include fraction of second: %r' % octets) + if len(octets) < 15: + raise error.PyAsn1Error('Bad UTC time length: %r' % octets) + if self.pluschar in octets or self.minuschar in octets: + raise error.PyAsn1Error('Must be UTC time: %r' % octets) + if octets[-1] != self.zchar[0]: + raise error.PyAsn1Error('Missing timezone specifier: %r' % octets) + return encoder.OctetStringEncoder.encodeValue( + self, encodeFun, client, defMode, 1000 + ) + +class UTCTimeEncoder(encoder.OctetStringEncoder): + zchar = str2octs('Z') + pluschar = str2octs('+') + minuschar = str2octs('-') + def encodeValue(self, encodeFun, client, defMode, maxChunkSize): + octets = client.asOctets() + if self.pluschar in octets or self.minuschar in octets: + raise error.PyAsn1Error('Must be UTC time: %r' % octets) + if octets and octets[-1] != self.zchar[0]: + client = client.clone(octets + self.zchar) + if len(client) != 13: + raise error.PyAsn1Error('Bad UTC time length: %r' % client) + return encoder.OctetStringEncoder.encodeValue( + self, encodeFun, client, defMode, 1000 + ) + +class SetOfEncoder(encoder.SequenceOfEncoder): + def encodeValue(self, encodeFun, client, defMode, maxChunkSize): + if isinstance(client, univ.SequenceAndSetBase): + client.setDefaultComponents() + client.verifySizeSpec() + substrate = null; idx = len(client) + # This is certainly a hack but how else do I distinguish SetOf + # from Set if they have the same tags&constraints? + if isinstance(client, univ.SequenceAndSetBase): + # Set + comps = [] + while idx > 0: + idx = idx - 1 + if client[idx] is None: # Optional component + continue + if client.getDefaultComponentByPosition(idx) == client[idx]: + continue + comps.append(client[idx]) + comps.sort(key=lambda x: isinstance(x, univ.Choice) and \ + x.getMinTagSet() or x.getTagSet()) + for c in comps: + substrate += encodeFun(c, defMode, maxChunkSize) + else: + # SetOf + compSubs = [] + while idx > 0: + idx = idx - 1 + compSubs.append( + encodeFun(client[idx], defMode, maxChunkSize) + ) + compSubs.sort() # perhaps padding's not needed + substrate = null + for compSub in compSubs: + substrate += compSub + return substrate, 1 + +tagMap = encoder.tagMap.copy() +tagMap.update({ + univ.Boolean.tagSet: BooleanEncoder(), + univ.BitString.tagSet: BitStringEncoder(), + univ.OctetString.tagSet: OctetStringEncoder(), + univ.Real.tagSet: RealEncoder(), + useful.GeneralizedTime.tagSet: GeneralizedTimeEncoder(), + useful.UTCTime.tagSet: UTCTimeEncoder(), + univ.SetOf().tagSet: SetOfEncoder() # conflcts with Set +}) + +typeMap = encoder.typeMap.copy() +typeMap.update({ + univ.Set.typeId: SetOfEncoder(), + univ.SetOf.typeId: SetOfEncoder() +}) + +class Encoder(encoder.Encoder): + def __call__(self, client, defMode=False, maxChunkSize=0): + return encoder.Encoder.__call__(self, client, defMode, maxChunkSize) + +encode = Encoder(tagMap, typeMap) + +# EncoderFactory queries class instance and builds a map of tags -> encoders diff --git a/lib/python3.4/site-packages/pyasn1/codec/der/__init__.py b/lib/python3.4/site-packages/pyasn1/codec/der/__init__.py new file mode 100644 index 0000000..8c3066b --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1/codec/der/__init__.py @@ -0,0 +1 @@ +# This file is necessary to make this directory a package. diff --git a/lib/python3.4/site-packages/pyasn1/codec/der/decoder.py b/lib/python3.4/site-packages/pyasn1/codec/der/decoder.py new file mode 100644 index 0000000..ea58d6d --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1/codec/der/decoder.py @@ -0,0 +1,9 @@ +# DER decoder +from pyasn1.codec.cer import decoder + +tagMap = decoder.tagMap +typeMap = decoder.typeMap +class Decoder(decoder.Decoder): + supportIndefLength = False + +decode = Decoder(tagMap, typeMap) diff --git a/lib/python3.4/site-packages/pyasn1/codec/der/encoder.py b/lib/python3.4/site-packages/pyasn1/codec/der/encoder.py new file mode 100644 index 0000000..7f55eeb --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1/codec/der/encoder.py @@ -0,0 +1,32 @@ +# DER encoder +from pyasn1.type import univ +from pyasn1.codec.cer import encoder +from pyasn1 import error + +class SetOfEncoder(encoder.SetOfEncoder): + def _cmpSetComponents(self, c1, c2): + tagSet1 = isinstance(c1, univ.Choice) and \ + c1.getEffectiveTagSet() or c1.getTagSet() + tagSet2 = isinstance(c2, univ.Choice) and \ + c2.getEffectiveTagSet() or c2.getTagSet() + return cmp(tagSet1, tagSet2) + +tagMap = encoder.tagMap.copy() +tagMap.update({ + # Overload CER encoders with BER ones (a bit hackerish XXX) + univ.BitString.tagSet: encoder.encoder.BitStringEncoder(), + univ.OctetString.tagSet: encoder.encoder.OctetStringEncoder(), + # Set & SetOf have same tags + univ.SetOf().tagSet: SetOfEncoder() +}) + +typeMap = encoder.typeMap + +class Encoder(encoder.Encoder): + supportIndefLength = False + def __call__(self, client, defMode=True, maxChunkSize=0): + if not defMode: + raise error.PyAsn1Error('DER forbids indefinite length mode') + return encoder.Encoder.__call__(self, client, defMode, maxChunkSize) + +encode = Encoder(tagMap, typeMap) diff --git a/lib/python3.4/site-packages/pyasn1/compat/__init__.py b/lib/python3.4/site-packages/pyasn1/compat/__init__.py new file mode 100644 index 0000000..8c3066b --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1/compat/__init__.py @@ -0,0 +1 @@ +# This file is necessary to make this directory a package. diff --git a/lib/python3.4/site-packages/pyasn1/compat/binary.py b/lib/python3.4/site-packages/pyasn1/compat/binary.py new file mode 100644 index 0000000..b38932a --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1/compat/binary.py @@ -0,0 +1,10 @@ +from sys import version_info + +if version_info[0:2] < (2, 6): + def bin(x): + if x <= 1: + return '0b'+str(x) + else: + return bin(x>>1) + str(x&1) +else: + bin = bin diff --git a/lib/python3.4/site-packages/pyasn1/compat/octets.py b/lib/python3.4/site-packages/pyasn1/compat/octets.py new file mode 100644 index 0000000..e812737 --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1/compat/octets.py @@ -0,0 +1,22 @@ +from sys import version_info + +if version_info[0] <= 2: + int2oct = chr + ints2octs = lambda s: ''.join([ int2oct(x) for x in s ]) + null = '' + oct2int = ord + octs2ints = lambda s: [ oct2int(x) for x in s ] + str2octs = lambda x: x + octs2str = lambda x: x + isOctetsType = lambda s: isinstance(s, str) + isStringType = lambda s: isinstance(s, (str, unicode)) +else: + ints2octs = bytes + int2oct = lambda x: ints2octs((x,)) + null = ints2octs() + oct2int = lambda x: x + octs2ints = lambda s: [ x for x in s ] + str2octs = lambda x: x.encode() + octs2str = lambda x: x.decode() + isOctetsType = lambda s: isinstance(s, bytes) + isStringType = lambda s: isinstance(s, str) diff --git a/lib/python3.4/site-packages/pyasn1/debug.py b/lib/python3.4/site-packages/pyasn1/debug.py new file mode 100644 index 0000000..9b69886 --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1/debug.py @@ -0,0 +1,110 @@ +import time +import logging +from pyasn1.compat.octets import octs2ints +from pyasn1 import error +from pyasn1 import __version__ + +flagNone = 0x0000 +flagEncoder = 0x0001 +flagDecoder = 0x0002 +flagAll = 0xffff + +flagMap = { + 'encoder': flagEncoder, + 'decoder': flagDecoder, + 'all': flagAll + } + +class Printer: + def __init__(self, logger=None, handler=None, formatter=None): + if logger is None: + logger = logging.getLogger('pyasn1') + logger.setLevel(logging.DEBUG) + if handler is None: + handler = logging.StreamHandler() + if formatter is None: + formatter = logging.Formatter('%(asctime)s %(name)s: %(message)s') + handler.setFormatter(formatter) + handler.setLevel(logging.DEBUG) + logger.addHandler(handler) + self.__logger = logger + + def __call__(self, msg): self.__logger.debug(msg) + def __str__(self): return '<python built-in logging>' + +if hasattr(logging, 'NullHandler'): + NullHandler = logging.NullHandler +else: + # Python 2.6 and older + class NullHandler(logging.Handler): + def emit(self, record): + pass + +class Debug: + defaultPrinter = None + def __init__(self, *flags, **options): + self._flags = flagNone + if options.get('printer') is not None: + self._printer = options.get('printer') + elif self.defaultPrinter is not None: + self._printer = self.defaultPrinter + if 'loggerName' in options: + # route our logs to parent logger + self._printer = Printer( + logger=logging.getLogger(options['loggerName']), + handler=NullHandler() + ) + else: + self._printer = Printer() + self('running pyasn1 version %s' % __version__) + for f in flags: + inverse = f and f[0] in ('!', '~') + if inverse: + f = f[1:] + try: + if inverse: + self._flags &= ~flagMap[f] + else: + self._flags |= flagMap[f] + except KeyError: + raise error.PyAsn1Error('bad debug flag %s' % f) + + self('debug category \'%s\' %s' % (f, inverse and 'disabled' or 'enabled')) + + def __str__(self): + return 'logger %s, flags %x' % (self._printer, self._flags) + + def __call__(self, msg): + self._printer(msg) + + def __and__(self, flag): + return self._flags & flag + + def __rand__(self, flag): + return flag & self._flags + +logger = 0 + +def setLogger(l): + global logger + logger = l + +def hexdump(octets): + return ' '.join( + [ '%s%.2X' % (n%16 == 0 and ('\n%.5d: ' % n) or '', x) + for n,x in zip(range(len(octets)), octs2ints(octets)) ] + ) + +class Scope: + def __init__(self): + self._list = [] + + def __str__(self): return '.'.join(self._list) + + def push(self, token): + self._list.append(token) + + def pop(self): + return self._list.pop() + +scope = Scope() diff --git a/lib/python3.4/site-packages/pyasn1/error.py b/lib/python3.4/site-packages/pyasn1/error.py new file mode 100644 index 0000000..716406f --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1/error.py @@ -0,0 +1,3 @@ +class PyAsn1Error(Exception): pass +class ValueConstraintError(PyAsn1Error): pass +class SubstrateUnderrunError(PyAsn1Error): pass diff --git a/lib/python3.4/site-packages/pyasn1/type/__init__.py b/lib/python3.4/site-packages/pyasn1/type/__init__.py new file mode 100644 index 0000000..8c3066b --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1/type/__init__.py @@ -0,0 +1 @@ +# This file is necessary to make this directory a package. diff --git a/lib/python3.4/site-packages/pyasn1/type/base.py b/lib/python3.4/site-packages/pyasn1/type/base.py new file mode 100644 index 0000000..72920a9 --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1/type/base.py @@ -0,0 +1,278 @@ +# Base classes for ASN.1 types +import sys +from pyasn1.type import constraint, tagmap, tag +from pyasn1 import error + +class Asn1Item: pass + +class Asn1ItemBase(Asn1Item): + # Set of tags for this ASN.1 type + tagSet = tag.TagSet() + + # A list of constraint.Constraint instances for checking values + subtypeSpec = constraint.ConstraintsIntersection() + + # Used for ambiguous ASN.1 types identification + typeId = None + + def __init__(self, tagSet=None, subtypeSpec=None): + if tagSet is None: + self._tagSet = self.tagSet + else: + self._tagSet = tagSet + if subtypeSpec is None: + self._subtypeSpec = self.subtypeSpec + else: + self._subtypeSpec = subtypeSpec + + def _verifySubtypeSpec(self, value, idx=None): + try: + self._subtypeSpec(value, idx) + except error.PyAsn1Error: + c, i, t = sys.exc_info() + raise c('%s at %s' % (i, self.__class__.__name__)) + + def getSubtypeSpec(self): return self._subtypeSpec + + def getTagSet(self): return self._tagSet + def getEffectiveTagSet(self): return self._tagSet # used by untagged types + def getTagMap(self): return tagmap.TagMap({self._tagSet: self}) + + def isSameTypeWith(self, other, matchTags=True, matchConstraints=True): + return self is other or \ + (not matchTags or \ + self._tagSet == other.getTagSet()) and \ + (not matchConstraints or \ + self._subtypeSpec==other.getSubtypeSpec()) + + def isSuperTypeOf(self, other, matchTags=True, matchConstraints=True): + """Returns true if argument is a ASN1 subtype of ourselves""" + return (not matchTags or \ + self._tagSet.isSuperTagSetOf(other.getTagSet())) and \ + (not matchConstraints or \ + (self._subtypeSpec.isSuperTypeOf(other.getSubtypeSpec()))) + +class NoValue: + def __getattr__(self, attr): + raise error.PyAsn1Error('No value for %s()' % attr) + def __getitem__(self, i): + raise error.PyAsn1Error('No value') + def __repr__(self): return '%s()' % self.__class__.__name__ + +noValue = NoValue() + +# Base class for "simple" ASN.1 objects. These are immutable. +class AbstractSimpleAsn1Item(Asn1ItemBase): + defaultValue = noValue + def __init__(self, value=None, tagSet=None, subtypeSpec=None): + Asn1ItemBase.__init__(self, tagSet, subtypeSpec) + if value is None or value is noValue: + value = self.defaultValue + if value is None or value is noValue: + self.__hashedValue = value = noValue + else: + value = self.prettyIn(value) + self._verifySubtypeSpec(value) + self.__hashedValue = hash(value) + self._value = value + self._len = None + + def __repr__(self): + r = [] + if self._value is not self.defaultValue: + r.append(self.prettyOut(self._value)) + if self._tagSet is not self.tagSet: + r.append('tagSet=%r' % (self._tagSet,)) + if self._subtypeSpec is not self.subtypeSpec: + r.append('subtypeSpec=%r' % (self._subtypeSpec,)) + return '%s(%s)' % (self.__class__.__name__, ', '.join(r)) + + def __str__(self): return str(self._value) + def __eq__(self, other): + return self is other and True or self._value == other + def __ne__(self, other): return self._value != other + def __lt__(self, other): return self._value < other + def __le__(self, other): return self._value <= other + def __gt__(self, other): return self._value > other + def __ge__(self, other): return self._value >= other + if sys.version_info[0] <= 2: + def __nonzero__(self): return bool(self._value) + else: + def __bool__(self): return bool(self._value) + def __hash__(self): + return self.__hashedValue is noValue and hash(noValue) or self.__hashedValue + + def hasValue(self): + return not isinstance(self._value, NoValue) + + def clone(self, value=None, tagSet=None, subtypeSpec=None): + if value is None and tagSet is None and subtypeSpec is None: + return self + if value is None: + value = self._value + if tagSet is None: + tagSet = self._tagSet + if subtypeSpec is None: + subtypeSpec = self._subtypeSpec + return self.__class__(value, tagSet, subtypeSpec) + + def subtype(self, value=None, implicitTag=None, explicitTag=None, + subtypeSpec=None): + if value is None: + value = self._value + if implicitTag is not None: + tagSet = self._tagSet.tagImplicitly(implicitTag) + elif explicitTag is not None: + tagSet = self._tagSet.tagExplicitly(explicitTag) + else: + tagSet = self._tagSet + if subtypeSpec is None: + subtypeSpec = self._subtypeSpec + else: + subtypeSpec = subtypeSpec + self._subtypeSpec + return self.__class__(value, tagSet, subtypeSpec) + + def prettyIn(self, value): return value + def prettyOut(self, value): return str(value) + + def prettyPrint(self, scope=0): + if self.hasValue(): + return self.prettyOut(self._value) + else: + return '<no value>' + + # XXX Compatibility stub + def prettyPrinter(self, scope=0): return self.prettyPrint(scope) + + def prettyPrintType(self, scope=0): + return '%s -> %s' % (self.getTagSet(), self.__class__.__name__) + +# +# Constructed types: +# * There are five of them: Sequence, SequenceOf/SetOf, Set and Choice +# * ASN1 types and values are represened by Python class instances +# * Value initialization is made for defaulted components only +# * Primary method of component addressing is by-position. Data model for base +# type is Python sequence. Additional type-specific addressing methods +# may be implemented for particular types. +# * SequenceOf and SetOf types do not implement any additional methods +# * Sequence, Set and Choice types also implement by-identifier addressing +# * Sequence, Set and Choice types also implement by-asn1-type (tag) addressing +# * Sequence and Set types may include optional and defaulted +# components +# * Constructed types hold a reference to component types used for value +# verification and ordering. +# * Component type is a scalar type for SequenceOf/SetOf types and a list +# of types for Sequence/Set/Choice. +# + +class AbstractConstructedAsn1Item(Asn1ItemBase): + componentType = None + sizeSpec = constraint.ConstraintsIntersection() + def __init__(self, componentType=None, tagSet=None, + subtypeSpec=None, sizeSpec=None): + Asn1ItemBase.__init__(self, tagSet, subtypeSpec) + if componentType is None: + self._componentType = self.componentType + else: + self._componentType = componentType + if sizeSpec is None: + self._sizeSpec = self.sizeSpec + else: + self._sizeSpec = sizeSpec + self._componentValues = [] + self._componentValuesSet = 0 + + def __repr__(self): + r = [] + if self._componentType is not self.componentType: + r.append('componentType=%r' % (self._componentType,)) + if self._tagSet is not self.tagSet: + r.append('tagSet=%r' % (self._tagSet,)) + if self._subtypeSpec is not self.subtypeSpec: + r.append('subtypeSpec=%r' % (self._subtypeSpec,)) + r = '%s(%s)' % (self.__class__.__name__, ', '.join(r)) + if self._componentValues: + r += '.setComponents(%s)' % ', '.join([repr(x) for x in self._componentValues]) + return r + + def __eq__(self, other): + return self is other and True or self._componentValues == other + def __ne__(self, other): return self._componentValues != other + def __lt__(self, other): return self._componentValues < other + def __le__(self, other): return self._componentValues <= other + def __gt__(self, other): return self._componentValues > other + def __ge__(self, other): return self._componentValues >= other + if sys.version_info[0] <= 2: + def __nonzero__(self): return bool(self._componentValues) + else: + def __bool__(self): return bool(self._componentValues) + + def getComponentTagMap(self): + raise error.PyAsn1Error('Method not implemented') + + def _cloneComponentValues(self, myClone, cloneValueFlag): pass + + def clone(self, tagSet=None, subtypeSpec=None, sizeSpec=None, + cloneValueFlag=None): + if tagSet is None: + tagSet = self._tagSet + if subtypeSpec is None: + subtypeSpec = self._subtypeSpec + if sizeSpec is None: + sizeSpec = self._sizeSpec + r = self.__class__(self._componentType, tagSet, subtypeSpec, sizeSpec) + if cloneValueFlag: + self._cloneComponentValues(r, cloneValueFlag) + return r + + def subtype(self, implicitTag=None, explicitTag=None, subtypeSpec=None, + sizeSpec=None, cloneValueFlag=None): + if implicitTag is not None: + tagSet = self._tagSet.tagImplicitly(implicitTag) + elif explicitTag is not None: + tagSet = self._tagSet.tagExplicitly(explicitTag) + else: + tagSet = self._tagSet + if subtypeSpec is None: + subtypeSpec = self._subtypeSpec + else: + subtypeSpec = subtypeSpec + self._subtypeSpec + if sizeSpec is None: + sizeSpec = self._sizeSpec + else: + sizeSpec = sizeSpec + self._sizeSpec + r = self.__class__(self._componentType, tagSet, subtypeSpec, sizeSpec) + if cloneValueFlag: + self._cloneComponentValues(r, cloneValueFlag) + return r + + def _verifyComponent(self, idx, value): pass + + def verifySizeSpec(self): self._sizeSpec(self) + + def getComponentByPosition(self, idx): + raise error.PyAsn1Error('Method not implemented') + def setComponentByPosition(self, idx, value, verifyConstraints=True): + raise error.PyAsn1Error('Method not implemented') + + def setComponents(self, *args, **kwargs): + for idx in range(len(args)): + self[idx] = args[idx] + for k in kwargs: + self[k] = kwargs[k] + return self + + def getComponentType(self): return self._componentType + + def setDefaultComponents(self): pass + + def __getitem__(self, idx): return self.getComponentByPosition(idx) + def __setitem__(self, idx, value): self.setComponentByPosition(idx, value) + + def __len__(self): return len(self._componentValues) + + def clear(self): + self._componentValues = [] + self._componentValuesSet = 0 + diff --git a/lib/python3.4/site-packages/pyasn1/type/char.py b/lib/python3.4/site-packages/pyasn1/type/char.py new file mode 100644 index 0000000..af49ab3 --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1/type/char.py @@ -0,0 +1,64 @@ +# ASN.1 "character string" types +from pyasn1.type import univ, tag + +class NumericString(univ.OctetString): + tagSet = univ.OctetString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 18) + ) + +class PrintableString(univ.OctetString): + tagSet = univ.OctetString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 19) + ) + +class TeletexString(univ.OctetString): + tagSet = univ.OctetString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 20) + ) + +class T61String(TeletexString): pass + +class VideotexString(univ.OctetString): + tagSet = univ.OctetString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 21) + ) + +class IA5String(univ.OctetString): + tagSet = univ.OctetString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 22) + ) + +class GraphicString(univ.OctetString): + tagSet = univ.OctetString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 25) + ) + +class VisibleString(univ.OctetString): + tagSet = univ.OctetString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 26) + ) + +class ISO646String(VisibleString): pass + +class GeneralString(univ.OctetString): + tagSet = univ.OctetString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 27) + ) + +class UniversalString(univ.OctetString): + tagSet = univ.OctetString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 28) + ) + encoding = "utf-32-be" + +class BMPString(univ.OctetString): + tagSet = univ.OctetString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 30) + ) + encoding = "utf-16-be" + +class UTF8String(univ.OctetString): + tagSet = univ.OctetString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 12) + ) + encoding = "utf-8" diff --git a/lib/python3.4/site-packages/pyasn1/type/constraint.py b/lib/python3.4/site-packages/pyasn1/type/constraint.py new file mode 100644 index 0000000..6687393 --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1/type/constraint.py @@ -0,0 +1,200 @@ +# +# ASN.1 subtype constraints classes. +# +# Constraints are relatively rare, but every ASN1 object +# is doing checks all the time for whether they have any +# constraints and whether they are applicable to the object. +# +# What we're going to do is define objects/functions that +# can be called unconditionally if they are present, and that +# are simply not present if there are no constraints. +# +# Original concept and code by Mike C. Fletcher. +# +import sys +from pyasn1.type import error + +class AbstractConstraint: + """Abstract base-class for constraint objects + + Constraints should be stored in a simple sequence in the + namespace of their client Asn1Item sub-classes. + """ + def __init__(self, *values): + self._valueMap = {} + self._setValues(values) + self.__hashedValues = None + def __call__(self, value, idx=None): + try: + self._testValue(value, idx) + except error.ValueConstraintError: + raise error.ValueConstraintError( + '%s failed at: \"%s\"' % (self, sys.exc_info()[1]) + ) + def __repr__(self): + return '%s(%s)' % ( + self.__class__.__name__, + ', '.join([repr(x) for x in self._values]) + ) + def __eq__(self, other): + return self is other and True or self._values == other + def __ne__(self, other): return self._values != other + def __lt__(self, other): return self._values < other + def __le__(self, other): return self._values <= other + def __gt__(self, other): return self._values > other + def __ge__(self, other): return self._values >= other + if sys.version_info[0] <= 2: + def __nonzero__(self): return bool(self._values) + else: + def __bool__(self): return bool(self._values) + + def __hash__(self): + if self.__hashedValues is None: + self.__hashedValues = hash((self.__class__.__name__, self._values)) + return self.__hashedValues + + def _setValues(self, values): self._values = values + def _testValue(self, value, idx): + raise error.ValueConstraintError(value) + + # Constraints derivation logic + def getValueMap(self): return self._valueMap + def isSuperTypeOf(self, otherConstraint): + return self in otherConstraint.getValueMap() or \ + otherConstraint is self or otherConstraint == self + def isSubTypeOf(self, otherConstraint): + return otherConstraint in self._valueMap or \ + otherConstraint is self or otherConstraint == self + +class SingleValueConstraint(AbstractConstraint): + """Value must be part of defined values constraint""" + def _testValue(self, value, idx): + # XXX index vals for performance? + if value not in self._values: + raise error.ValueConstraintError(value) + +class ContainedSubtypeConstraint(AbstractConstraint): + """Value must satisfy all of defined set of constraints""" + def _testValue(self, value, idx): + for c in self._values: + c(value, idx) + +class ValueRangeConstraint(AbstractConstraint): + """Value must be within start and stop values (inclusive)""" + def _testValue(self, value, idx): + if value < self.start or value > self.stop: + raise error.ValueConstraintError(value) + + def _setValues(self, values): + if len(values) != 2: + raise error.PyAsn1Error( + '%s: bad constraint values' % (self.__class__.__name__,) + ) + self.start, self.stop = values + if self.start > self.stop: + raise error.PyAsn1Error( + '%s: screwed constraint values (start > stop): %s > %s' % ( + self.__class__.__name__, + self.start, self.stop + ) + ) + AbstractConstraint._setValues(self, values) + +class ValueSizeConstraint(ValueRangeConstraint): + """len(value) must be within start and stop values (inclusive)""" + def _testValue(self, value, idx): + l = len(value) + if l < self.start or l > self.stop: + raise error.ValueConstraintError(value) + +class PermittedAlphabetConstraint(SingleValueConstraint): + def _setValues(self, values): + self._values = () + for v in values: + self._values = self._values + tuple(v) + + def _testValue(self, value, idx): + for v in value: + if v not in self._values: + raise error.ValueConstraintError(value) + +# This is a bit kludgy, meaning two op modes within a single constraing +class InnerTypeConstraint(AbstractConstraint): + """Value must satisfy type and presense constraints""" + def _testValue(self, value, idx): + if self.__singleTypeConstraint: + self.__singleTypeConstraint(value) + elif self.__multipleTypeConstraint: + if idx not in self.__multipleTypeConstraint: + raise error.ValueConstraintError(value) + constraint, status = self.__multipleTypeConstraint[idx] + if status == 'ABSENT': # XXX presense is not checked! + raise error.ValueConstraintError(value) + constraint(value) + + def _setValues(self, values): + self.__multipleTypeConstraint = {} + self.__singleTypeConstraint = None + for v in values: + if isinstance(v, tuple): + self.__multipleTypeConstraint[v[0]] = v[1], v[2] + else: + self.__singleTypeConstraint = v + AbstractConstraint._setValues(self, values) + +# Boolean ops on constraints + +class ConstraintsExclusion(AbstractConstraint): + """Value must not fit the single constraint""" + def _testValue(self, value, idx): + try: + self._values[0](value, idx) + except error.ValueConstraintError: + return + else: + raise error.ValueConstraintError(value) + + def _setValues(self, values): + if len(values) != 1: + raise error.PyAsn1Error('Single constraint expected') + AbstractConstraint._setValues(self, values) + +class AbstractConstraintSet(AbstractConstraint): + """Value must not satisfy the single constraint""" + def __getitem__(self, idx): return self._values[idx] + + def __add__(self, value): return self.__class__(self, value) + def __radd__(self, value): return self.__class__(self, value) + + def __len__(self): return len(self._values) + + # Constraints inclusion in sets + + def _setValues(self, values): + self._values = values + for v in values: + self._valueMap[v] = 1 + self._valueMap.update(v.getValueMap()) + +class ConstraintsIntersection(AbstractConstraintSet): + """Value must satisfy all constraints""" + def _testValue(self, value, idx): + for v in self._values: + v(value, idx) + +class ConstraintsUnion(AbstractConstraintSet): + """Value must satisfy at least one constraint""" + def _testValue(self, value, idx): + for v in self._values: + try: + v(value, idx) + except error.ValueConstraintError: + pass + else: + return + raise error.ValueConstraintError( + 'all of %s failed for \"%s\"' % (self._values, value) + ) + +# XXX +# add tests for type check diff --git a/lib/python3.4/site-packages/pyasn1/type/error.py b/lib/python3.4/site-packages/pyasn1/type/error.py new file mode 100644 index 0000000..3e68484 --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1/type/error.py @@ -0,0 +1,3 @@ +from pyasn1.error import PyAsn1Error + +class ValueConstraintError(PyAsn1Error): pass diff --git a/lib/python3.4/site-packages/pyasn1/type/namedtype.py b/lib/python3.4/site-packages/pyasn1/type/namedtype.py new file mode 100644 index 0000000..aca4282 --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1/type/namedtype.py @@ -0,0 +1,149 @@ +# NamedType specification for constructed types +import sys +from pyasn1.type import tagmap +from pyasn1 import error + +class NamedType: + isOptional = 0 + isDefaulted = 0 + def __init__(self, name, t): + self.__name = name; self.__type = t + def __repr__(self): return '%s(%r, %r)' % ( + self.__class__.__name__, self.__name, self.__type + ) + def __eq__(self, other): return tuple(self) == tuple(other) + def __ne__(self, other): return tuple(self) != tuple(other) + def __lt__(self, other): return tuple(self) < tuple(other) + def __le__(self, other): return tuple(self) <= tuple(other) + def __gt__(self, other): return tuple(self) > tuple(other) + def __ge__(self, other): return tuple(self) >= tuple(other) + def __hash__(self): return hash(tuple(self)) + + def getType(self): return self.__type + def getName(self): return self.__name + def __getitem__(self, idx): + if idx == 0: return self.__name + if idx == 1: return self.__type + raise IndexError() + +class OptionalNamedType(NamedType): + isOptional = 1 +class DefaultedNamedType(NamedType): + isDefaulted = 1 + +class NamedTypes: + def __init__(self, *namedTypes): + self.__namedTypes = namedTypes + self.__namedTypesLen = len(self.__namedTypes) + self.__minTagSet = None + self.__tagToPosIdx = {}; self.__nameToPosIdx = {} + self.__tagMap = { False: None, True: None } + self.__ambigiousTypes = {} + + def __repr__(self): + return '%s(%s)' % ( + self.__class__.__name__, + ', '.join([ repr(x) for x in self.__namedTypes ]) + ) + def __eq__(self, other): return tuple(self) == tuple(other) + def __ne__(self, other): return tuple(self) != tuple(other) + def __lt__(self, other): return tuple(self) < tuple(other) + def __le__(self, other): return tuple(self) <= tuple(other) + def __gt__(self, other): return tuple(self) > tuple(other) + def __ge__(self, other): return tuple(self) >= tuple(other) + def __hash__(self): return hash(tuple(self)) + + def __getitem__(self, idx): return self.__namedTypes[idx] + + if sys.version_info[0] <= 2: + def __nonzero__(self): return bool(self.__namedTypesLen) + else: + def __bool__(self): return bool(self.__namedTypesLen) + def __len__(self): return self.__namedTypesLen + + def clone(self): return self.__class__(*self.__namedTypes) + + def getTypeByPosition(self, idx): + if idx < 0 or idx >= self.__namedTypesLen: + raise error.PyAsn1Error('Type position out of range') + else: + return self.__namedTypes[idx].getType() + + def getPositionByType(self, tagSet): + if not self.__tagToPosIdx: + idx = self.__namedTypesLen + while idx > 0: + idx = idx - 1 + tagMap = self.__namedTypes[idx].getType().getTagMap() + for t in tagMap.getPosMap(): + if t in self.__tagToPosIdx: + raise error.PyAsn1Error('Duplicate type %s' % (t,)) + self.__tagToPosIdx[t] = idx + try: + return self.__tagToPosIdx[tagSet] + except KeyError: + raise error.PyAsn1Error('Type %s not found' % (tagSet,)) + + def getNameByPosition(self, idx): + try: + return self.__namedTypes[idx].getName() + except IndexError: + raise error.PyAsn1Error('Type position out of range') + def getPositionByName(self, name): + if not self.__nameToPosIdx: + idx = self.__namedTypesLen + while idx > 0: + idx = idx - 1 + n = self.__namedTypes[idx].getName() + if n in self.__nameToPosIdx: + raise error.PyAsn1Error('Duplicate name %s' % (n,)) + self.__nameToPosIdx[n] = idx + try: + return self.__nameToPosIdx[name] + except KeyError: + raise error.PyAsn1Error('Name %s not found' % (name,)) + + def __buildAmbigiousTagMap(self): + ambigiousTypes = () + idx = self.__namedTypesLen + while idx > 0: + idx = idx - 1 + t = self.__namedTypes[idx] + if t.isOptional or t.isDefaulted: + ambigiousTypes = (t, ) + ambigiousTypes + else: + ambigiousTypes = (t, ) + self.__ambigiousTypes[idx] = NamedTypes(*ambigiousTypes) + + def getTagMapNearPosition(self, idx): + if not self.__ambigiousTypes: self.__buildAmbigiousTagMap() + try: + return self.__ambigiousTypes[idx].getTagMap() + except KeyError: + raise error.PyAsn1Error('Type position out of range') + + def getPositionNearType(self, tagSet, idx): + if not self.__ambigiousTypes: self.__buildAmbigiousTagMap() + try: + return idx+self.__ambigiousTypes[idx].getPositionByType(tagSet) + except KeyError: + raise error.PyAsn1Error('Type position out of range') + + def genMinTagSet(self): + if self.__minTagSet is None: + for t in self.__namedTypes: + __type = t.getType() + tagSet = getattr(__type,'getMinTagSet',__type.getTagSet)() + if self.__minTagSet is None or tagSet < self.__minTagSet: + self.__minTagSet = tagSet + return self.__minTagSet + + def getTagMap(self, uniq=False): + if self.__tagMap[uniq] is None: + tagMap = tagmap.TagMap() + for nt in self.__namedTypes: + tagMap = tagMap.clone( + nt.getType(), nt.getType().getTagMap(), uniq + ) + self.__tagMap[uniq] = tagMap + return self.__tagMap[uniq] diff --git a/lib/python3.4/site-packages/pyasn1/type/namedval.py b/lib/python3.4/site-packages/pyasn1/type/namedval.py new file mode 100644 index 0000000..676cb93 --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1/type/namedval.py @@ -0,0 +1,58 @@ +# ASN.1 named integers +from pyasn1 import error + +__all__ = [ 'NamedValues' ] + +class NamedValues: + def __init__(self, *namedValues): + self.nameToValIdx = {}; self.valToNameIdx = {} + self.namedValues = () + automaticVal = 1 + for namedValue in namedValues: + if isinstance(namedValue, tuple): + name, val = namedValue + else: + name = namedValue + val = automaticVal + if name in self.nameToValIdx: + raise error.PyAsn1Error('Duplicate name %s' % (name,)) + self.nameToValIdx[name] = val + if val in self.valToNameIdx: + raise error.PyAsn1Error('Duplicate value %s=%s' % (name, val)) + self.valToNameIdx[val] = name + self.namedValues = self.namedValues + ((name, val),) + automaticVal = automaticVal + 1 + + def __repr__(self): + return '%s(%s)' % (self.__class__.__name__, ', '.join([repr(x) for x in self.namedValues])) + + def __str__(self): return str(self.namedValues) + + def __eq__(self, other): return tuple(self) == tuple(other) + def __ne__(self, other): return tuple(self) != tuple(other) + def __lt__(self, other): return tuple(self) < tuple(other) + def __le__(self, other): return tuple(self) <= tuple(other) + def __gt__(self, other): return tuple(self) > tuple(other) + def __ge__(self, other): return tuple(self) >= tuple(other) + def __hash__(self): return hash(tuple(self)) + + def getName(self, value): + if value in self.valToNameIdx: + return self.valToNameIdx[value] + + def getValue(self, name): + if name in self.nameToValIdx: + return self.nameToValIdx[name] + + def __getitem__(self, i): return self.namedValues[i] + def __len__(self): return len(self.namedValues) + + def __add__(self, namedValues): + return self.__class__(*self.namedValues + namedValues) + def __radd__(self, namedValues): + return self.__class__(*namedValues + tuple(self)) + + def clone(self, *namedValues): + return self.__class__(*tuple(self) + namedValues) + +# XXX clone/subtype? diff --git a/lib/python3.4/site-packages/pyasn1/type/tag.py b/lib/python3.4/site-packages/pyasn1/type/tag.py new file mode 100644 index 0000000..7471a9b --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1/type/tag.py @@ -0,0 +1,128 @@ +# ASN.1 types tags +from operator import getitem +from pyasn1 import error + +tagClassUniversal = 0x00 +tagClassApplication = 0x40 +tagClassContext = 0x80 +tagClassPrivate = 0xC0 + +tagFormatSimple = 0x00 +tagFormatConstructed = 0x20 + +tagCategoryImplicit = 0x01 +tagCategoryExplicit = 0x02 +tagCategoryUntagged = 0x04 + +class Tag: + def __init__(self, tagClass, tagFormat, tagId): + if tagId < 0: + raise error.PyAsn1Error( + 'Negative tag ID (%s) not allowed' % (tagId,) + ) + self.__tag = (tagClass, tagFormat, tagId) + self.uniq = (tagClass, tagId) + self.__hashedUniqTag = hash(self.uniq) + + def __str__(self): + return '[%s:%s:%s]' % self.__tag + + def __repr__(self): + return '%s(tagClass=%s, tagFormat=%s, tagId=%s)' % ( + (self.__class__.__name__,) + self.__tag + ) + # These is really a hotspot -- expose public "uniq" attribute to save on + # function calls + def __eq__(self, other): return self.uniq == other.uniq + def __ne__(self, other): return self.uniq != other.uniq + def __lt__(self, other): return self.uniq < other.uniq + def __le__(self, other): return self.uniq <= other.uniq + def __gt__(self, other): return self.uniq > other.uniq + def __ge__(self, other): return self.uniq >= other.uniq + def __hash__(self): return self.__hashedUniqTag + def __getitem__(self, idx): return self.__tag[idx] + def __and__(self, otherTag): + (tagClass, tagFormat, tagId) = otherTag + return self.__class__( + self.__tag&tagClass, self.__tag&tagFormat, self.__tag&tagId + ) + def __or__(self, otherTag): + (tagClass, tagFormat, tagId) = otherTag + return self.__class__( + self.__tag[0]|tagClass, + self.__tag[1]|tagFormat, + self.__tag[2]|tagId + ) + def asTuple(self): return self.__tag # __getitem__() is slow + +class TagSet: + def __init__(self, baseTag=(), *superTags): + self.__baseTag = baseTag + self.__superTags = superTags + self.__hashedSuperTags = hash(superTags) + _uniq = () + for t in superTags: + _uniq = _uniq + t.uniq + self.uniq = _uniq + self.__lenOfSuperTags = len(superTags) + + def __str__(self): + return self.__superTags and '+'.join([str(x) for x in self.__superTags]) or '[untagged]' + + def __repr__(self): + return '%s(%s)' % ( + self.__class__.__name__, + '(), ' + ', '.join([repr(x) for x in self.__superTags]) + ) + + def __add__(self, superTag): + return self.__class__( + self.__baseTag, *self.__superTags + (superTag,) + ) + def __radd__(self, superTag): + return self.__class__( + self.__baseTag, *(superTag,) + self.__superTags + ) + + def tagExplicitly(self, superTag): + tagClass, tagFormat, tagId = superTag + if tagClass == tagClassUniversal: + raise error.PyAsn1Error( + 'Can\'t tag with UNIVERSAL-class tag' + ) + if tagFormat != tagFormatConstructed: + superTag = Tag(tagClass, tagFormatConstructed, tagId) + return self + superTag + + def tagImplicitly(self, superTag): + tagClass, tagFormat, tagId = superTag + if self.__superTags: + superTag = Tag(tagClass, self.__superTags[-1][1], tagId) + return self[:-1] + superTag + + def getBaseTag(self): return self.__baseTag + def __getitem__(self, idx): + if isinstance(idx, slice): + return self.__class__( + self.__baseTag, *getitem(self.__superTags, idx) + ) + return self.__superTags[idx] + def __eq__(self, other): return self.uniq == other.uniq + def __ne__(self, other): return self.uniq != other.uniq + def __lt__(self, other): return self.uniq < other.uniq + def __le__(self, other): return self.uniq <= other.uniq + def __gt__(self, other): return self.uniq > other.uniq + def __ge__(self, other): return self.uniq >= other.uniq + def __hash__(self): return self.__hashedSuperTags + def __len__(self): return self.__lenOfSuperTags + def isSuperTagSetOf(self, tagSet): + if len(tagSet) < self.__lenOfSuperTags: + return + idx = self.__lenOfSuperTags - 1 + while idx >= 0: + if self.__superTags[idx] != tagSet[idx]: + return + idx = idx - 1 + return 1 + +def initTagSet(tag): return TagSet(tag, tag) diff --git a/lib/python3.4/site-packages/pyasn1/type/tagmap.py b/lib/python3.4/site-packages/pyasn1/type/tagmap.py new file mode 100644 index 0000000..feb91ae --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1/type/tagmap.py @@ -0,0 +1,66 @@ +from pyasn1 import error + +class TagMap: + def __init__(self, posMap={}, negMap={}, defType=None): + self.__posMap = posMap.copy() + self.__negMap = negMap.copy() + self.__defType = defType + + def __contains__(self, tagSet): + return tagSet in self.__posMap or \ + self.__defType is not None and tagSet not in self.__negMap + + def __getitem__(self, tagSet): + if tagSet in self.__posMap: + return self.__posMap[tagSet] + elif tagSet in self.__negMap: + raise error.PyAsn1Error('Key in negative map') + elif self.__defType is not None: + return self.__defType + else: + raise KeyError() + + def __repr__(self): + s = self.__class__.__name__ + '(' + if self.__posMap: + s = s + 'posMap=%r, ' % (self.__posMap,) + if self.__negMap: + s = s + 'negMap=%r, ' % (self.__negMap,) + if self.__defType is not None: + s = s + 'defType=%r' % (self.__defType,) + return s + ')' + + def __str__(self): + s = self.__class__.__name__ + ':\n' + if self.__posMap: + s = s + 'posMap:\n%s, ' % ',\n '.join([ x.prettyPrintType() for x in self.__posMap.values()]) + if self.__negMap: + s = s + 'negMap:\n%s, ' % ',\n '.join([ x.prettyPrintType() for x in self.__negMap.values()]) + if self.__defType is not None: + s = s + 'defType:\n%s, ' % self.__defType.prettyPrintType() + return s + + def clone(self, parentType, tagMap, uniq=False): + if self.__defType is not None and tagMap.getDef() is not None: + raise error.PyAsn1Error('Duplicate default value at %s' % (self,)) + if tagMap.getDef() is not None: + defType = tagMap.getDef() + else: + defType = self.__defType + + posMap = self.__posMap.copy() + for k in tagMap.getPosMap(): + if uniq and k in posMap: + raise error.PyAsn1Error('Duplicate positive key %s' % (k,)) + posMap[k] = parentType + + negMap = self.__negMap.copy() + negMap.update(tagMap.getNegMap()) + + return self.__class__( + posMap, negMap, defType, + ) + + def getPosMap(self): return self.__posMap.copy() + def getNegMap(self): return self.__negMap.copy() + def getDef(self): return self.__defType diff --git a/lib/python3.4/site-packages/pyasn1/type/univ.py b/lib/python3.4/site-packages/pyasn1/type/univ.py new file mode 100644 index 0000000..4ed640f --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1/type/univ.py @@ -0,0 +1,1156 @@ +# ASN.1 "universal" data types +import operator, sys, math +from pyasn1.type import base, tag, constraint, namedtype, namedval, tagmap +from pyasn1.codec.ber import eoo +from pyasn1.compat import octets +from pyasn1 import error + +# "Simple" ASN.1 types (yet incomplete) + +class Integer(base.AbstractSimpleAsn1Item): + tagSet = baseTagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x02) + ) + namedValues = namedval.NamedValues() + def __init__(self, value=None, tagSet=None, subtypeSpec=None, + namedValues=None): + if namedValues is None: + self.__namedValues = self.namedValues + else: + self.__namedValues = namedValues + base.AbstractSimpleAsn1Item.__init__( + self, value, tagSet, subtypeSpec + ) + + def __repr__(self): + if self.__namedValues is not self.namedValues: + return '%s, %r)' % (base.AbstractSimpleAsn1Item.__repr__(self)[:-1], self.__namedValues) + else: + return base.AbstractSimpleAsn1Item.__repr__(self) + + def __and__(self, value): return self.clone(self._value & value) + def __rand__(self, value): return self.clone(value & self._value) + def __or__(self, value): return self.clone(self._value | value) + def __ror__(self, value): return self.clone(value | self._value) + def __xor__(self, value): return self.clone(self._value ^ value) + def __rxor__(self, value): return self.clone(value ^ self._value) + def __lshift__(self, value): return self.clone(self._value << value) + def __rshift__(self, value): return self.clone(self._value >> value) + + def __add__(self, value): return self.clone(self._value + value) + def __radd__(self, value): return self.clone(value + self._value) + def __sub__(self, value): return self.clone(self._value - value) + def __rsub__(self, value): return self.clone(value - self._value) + def __mul__(self, value): return self.clone(self._value * value) + def __rmul__(self, value): return self.clone(value * self._value) + def __mod__(self, value): return self.clone(self._value % value) + def __rmod__(self, value): return self.clone(value % self._value) + def __pow__(self, value, modulo=None): return self.clone(pow(self._value, value, modulo)) + def __rpow__(self, value): return self.clone(pow(value, self._value)) + + if sys.version_info[0] <= 2: + def __div__(self, value): return self.clone(self._value // value) + def __rdiv__(self, value): return self.clone(value // self._value) + else: + def __truediv__(self, value): return self.clone(self._value / value) + def __rtruediv__(self, value): return self.clone(value / self._value) + def __divmod__(self, value): return self.clone(self._value // value) + def __rdivmod__(self, value): return self.clone(value // self._value) + + __hash__ = base.AbstractSimpleAsn1Item.__hash__ + + def __int__(self): return int(self._value) + if sys.version_info[0] <= 2: + def __long__(self): return long(self._value) + def __float__(self): return float(self._value) + def __abs__(self): return self.clone(abs(self._value)) + def __index__(self): return int(self._value) + def __pos__(self): return self.clone(+self._value) + def __neg__(self): return self.clone(-self._value) + def __invert__(self): return self.clone(~self._value) + def __round__(self, n=0): + r = round(self._value, n) + if n: + return self.clone(r) + else: + return r + def __floor__(self): return math.floor(self._value) + def __ceil__(self): return math.ceil(self._value) + if sys.version_info[0:2] > (2, 5): + def __trunc__(self): return self.clone(math.trunc(self._value)) + + def __lt__(self, value): return self._value < value + def __le__(self, value): return self._value <= value + def __eq__(self, value): return self._value == value + def __ne__(self, value): return self._value != value + def __gt__(self, value): return self._value > value + def __ge__(self, value): return self._value >= value + + def prettyIn(self, value): + if not isinstance(value, str): + try: + return int(value) + except: + raise error.PyAsn1Error( + 'Can\'t coerce %r into integer: %s' % (value, sys.exc_info()[1]) + ) + r = self.__namedValues.getValue(value) + if r is not None: + return r + try: + return int(value) + except: + raise error.PyAsn1Error( + 'Can\'t coerce %r into integer: %s' % (value, sys.exc_info()[1]) + ) + + def prettyOut(self, value): + r = self.__namedValues.getName(value) + return r is None and str(value) or repr(r) + + def getNamedValues(self): return self.__namedValues + + def clone(self, value=None, tagSet=None, subtypeSpec=None, + namedValues=None): + if value is None and tagSet is None and subtypeSpec is None \ + and namedValues is None: + return self + if value is None: + value = self._value + if tagSet is None: + tagSet = self._tagSet + if subtypeSpec is None: + subtypeSpec = self._subtypeSpec + if namedValues is None: + namedValues = self.__namedValues + return self.__class__(value, tagSet, subtypeSpec, namedValues) + + def subtype(self, value=None, implicitTag=None, explicitTag=None, + subtypeSpec=None, namedValues=None): + if value is None: + value = self._value + if implicitTag is not None: + tagSet = self._tagSet.tagImplicitly(implicitTag) + elif explicitTag is not None: + tagSet = self._tagSet.tagExplicitly(explicitTag) + else: + tagSet = self._tagSet + if subtypeSpec is None: + subtypeSpec = self._subtypeSpec + else: + subtypeSpec = subtypeSpec + self._subtypeSpec + if namedValues is None: + namedValues = self.__namedValues + else: + namedValues = namedValues + self.__namedValues + return self.__class__(value, tagSet, subtypeSpec, namedValues) + +class Boolean(Integer): + tagSet = baseTagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x01), + ) + subtypeSpec = Integer.subtypeSpec+constraint.SingleValueConstraint(0,1) + namedValues = Integer.namedValues.clone(('False', 0), ('True', 1)) + +class BitString(base.AbstractSimpleAsn1Item): + tagSet = baseTagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x03) + ) + namedValues = namedval.NamedValues() + def __init__(self, value=None, tagSet=None, subtypeSpec=None, + namedValues=None): + if namedValues is None: + self.__namedValues = self.namedValues + else: + self.__namedValues = namedValues + base.AbstractSimpleAsn1Item.__init__( + self, value, tagSet, subtypeSpec + ) + + def clone(self, value=None, tagSet=None, subtypeSpec=None, + namedValues=None): + if value is None and tagSet is None and subtypeSpec is None \ + and namedValues is None: + return self + if value is None: + value = self._value + if tagSet is None: + tagSet = self._tagSet + if subtypeSpec is None: + subtypeSpec = self._subtypeSpec + if namedValues is None: + namedValues = self.__namedValues + return self.__class__(value, tagSet, subtypeSpec, namedValues) + + def subtype(self, value=None, implicitTag=None, explicitTag=None, + subtypeSpec=None, namedValues=None): + if value is None: + value = self._value + if implicitTag is not None: + tagSet = self._tagSet.tagImplicitly(implicitTag) + elif explicitTag is not None: + tagSet = self._tagSet.tagExplicitly(explicitTag) + else: + tagSet = self._tagSet + if subtypeSpec is None: + subtypeSpec = self._subtypeSpec + else: + subtypeSpec = subtypeSpec + self._subtypeSpec + if namedValues is None: + namedValues = self.__namedValues + else: + namedValues = namedValues + self.__namedValues + return self.__class__(value, tagSet, subtypeSpec, namedValues) + + def __str__(self): return str(tuple(self)) + + # Immutable sequence object protocol + + def __len__(self): + if self._len is None: + self._len = len(self._value) + return self._len + def __getitem__(self, i): + if isinstance(i, slice): + return self.clone(operator.getitem(self._value, i)) + else: + return self._value[i] + + def __add__(self, value): return self.clone(self._value + value) + def __radd__(self, value): return self.clone(value + self._value) + def __mul__(self, value): return self.clone(self._value * value) + def __rmul__(self, value): return self * value + + def prettyIn(self, value): + r = [] + if not value: + return () + elif isinstance(value, str): + if value[0] == '\'': + if value[-2:] == '\'B': + for v in value[1:-2]: + if v == '0': + r.append(0) + elif v == '1': + r.append(1) + else: + raise error.PyAsn1Error( + 'Non-binary BIT STRING initializer %s' % (v,) + ) + return tuple(r) + elif value[-2:] == '\'H': + for v in value[1:-2]: + i = 4 + v = int(v, 16) + while i: + i = i - 1 + r.append((v>>i)&0x01) + return tuple(r) + else: + raise error.PyAsn1Error( + 'Bad BIT STRING value notation %s' % (value,) + ) + else: + for i in value.split(','): + j = self.__namedValues.getValue(i) + if j is None: + raise error.PyAsn1Error( + 'Unknown bit identifier \'%s\'' % (i,) + ) + if j >= len(r): + r.extend([0]*(j-len(r)+1)) + r[j] = 1 + return tuple(r) + elif isinstance(value, (tuple, list)): + r = tuple(value) + for b in r: + if b and b != 1: + raise error.PyAsn1Error( + 'Non-binary BitString initializer \'%s\'' % (r,) + ) + return r + elif isinstance(value, BitString): + return tuple(value) + else: + raise error.PyAsn1Error( + 'Bad BitString initializer type \'%s\'' % (value,) + ) + + def prettyOut(self, value): + return '\"\'%s\'B\"' % ''.join([str(x) for x in value]) + +try: + all +except NameError: # Python 2.4 + def all(iterable): + for element in iterable: + if not element: + return False + return True + +class OctetString(base.AbstractSimpleAsn1Item): + tagSet = baseTagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x04) + ) + defaultBinValue = defaultHexValue = base.noValue + encoding = 'us-ascii' + def __init__(self, value=None, tagSet=None, subtypeSpec=None, + encoding=None, binValue=None, hexValue=None): + if encoding is None: + self._encoding = self.encoding + else: + self._encoding = encoding + if binValue is not None: + value = self.fromBinaryString(binValue) + if hexValue is not None: + value = self.fromHexString(hexValue) + if value is None or value is base.noValue: + value = self.defaultHexValue + if value is None or value is base.noValue: + value = self.defaultBinValue + self.__asNumbersCache = None + base.AbstractSimpleAsn1Item.__init__(self, value, tagSet, subtypeSpec) + + def clone(self, value=None, tagSet=None, subtypeSpec=None, + encoding=None, binValue=None, hexValue=None): + if value is None and tagSet is None and subtypeSpec is None and \ + encoding is None and binValue is None and hexValue is None: + return self + if value is None and binValue is None and hexValue is None: + value = self._value + if tagSet is None: + tagSet = self._tagSet + if subtypeSpec is None: + subtypeSpec = self._subtypeSpec + if encoding is None: + encoding = self._encoding + return self.__class__( + value, tagSet, subtypeSpec, encoding, binValue, hexValue + ) + + if sys.version_info[0] <= 2: + def prettyIn(self, value): + if isinstance(value, str): + return value + elif isinstance(value, unicode): + try: + return value.encode(self._encoding) + except (LookupError, UnicodeEncodeError): + raise error.PyAsn1Error( + 'Can\'t encode string \'%s\' with \'%s\' codec' % (value, self._encoding) + ) + elif isinstance(value, (tuple, list)): + try: + return ''.join([ chr(x) for x in value ]) + except ValueError: + raise error.PyAsn1Error( + 'Bad OctetString initializer \'%s\'' % (value,) + ) + else: + return str(value) + else: + def prettyIn(self, value): + if isinstance(value, bytes): + return value + elif isinstance(value, str): + try: + return value.encode(self._encoding) + except UnicodeEncodeError: + raise error.PyAsn1Error( + 'Can\'t encode string \'%s\' with \'%s\' codec' % (value, self._encoding) + ) + elif isinstance(value, OctetString): + return value.asOctets() + elif isinstance(value, (tuple, list, map)): + try: + return bytes(value) + except ValueError: + raise error.PyAsn1Error( + 'Bad OctetString initializer \'%s\'' % (value,) + ) + else: + try: + return str(value).encode(self._encoding) + except UnicodeEncodeError: + raise error.PyAsn1Error( + 'Can\'t encode string \'%s\' with \'%s\' codec' % (value, self._encoding) + ) + + + def fromBinaryString(self, value): + bitNo = 8; byte = 0; r = () + for v in value: + if bitNo: + bitNo = bitNo - 1 + else: + bitNo = 7 + r = r + (byte,) + byte = 0 + if v == '0': + v = 0 + elif v == '1': + v = 1 + else: + raise error.PyAsn1Error( + 'Non-binary OCTET STRING initializer %s' % (v,) + ) + byte = byte | (v << bitNo) + return octets.ints2octs(r + (byte,)) + + def fromHexString(self, value): + r = p = () + for v in value: + if p: + r = r + (int(p+v, 16),) + p = () + else: + p = v + if p: + r = r + (int(p+'0', 16),) + return octets.ints2octs(r) + + def prettyOut(self, value): + if sys.version_info[0] <= 2: + numbers = tuple(( ord(x) for x in value )) + else: + numbers = tuple(value) + if all(x >= 32 and x <= 126 for x in numbers): + return str(value) + else: + return '0x' + ''.join(( '%.2x' % x for x in numbers )) + + def __repr__(self): + r = [] + doHex = False + if self._value is not self.defaultValue: + for x in self.asNumbers(): + if x < 32 or x > 126: + doHex = True + break + if not doHex: + r.append('%r' % (self._value,)) + if self._tagSet is not self.tagSet: + r.append('tagSet=%r' % (self._tagSet,)) + if self._subtypeSpec is not self.subtypeSpec: + r.append('subtypeSpec=%r' % (self._subtypeSpec,)) + if self.encoding is not self._encoding: + r.append('encoding=%r' % (self._encoding,)) + if doHex: + r.append('hexValue=%r' % ''.join([ '%.2x' % x for x in self.asNumbers() ])) + return '%s(%s)' % (self.__class__.__name__, ', '.join(r)) + + if sys.version_info[0] <= 2: + def __str__(self): return str(self._value) + def __unicode__(self): + return self._value.decode(self._encoding, 'ignore') + def asOctets(self): return self._value + def asNumbers(self): + if self.__asNumbersCache is None: + self.__asNumbersCache = tuple([ ord(x) for x in self._value ]) + return self.__asNumbersCache + else: + def __str__(self): return self._value.decode(self._encoding, 'ignore') + def __bytes__(self): return self._value + def asOctets(self): return self._value + def asNumbers(self): + if self.__asNumbersCache is None: + self.__asNumbersCache = tuple(self._value) + return self.__asNumbersCache + + # Immutable sequence object protocol + + def __len__(self): + if self._len is None: + self._len = len(self._value) + return self._len + def __getitem__(self, i): + if isinstance(i, slice): + return self.clone(operator.getitem(self._value, i)) + else: + return self._value[i] + + def __add__(self, value): return self.clone(self._value + self.prettyIn(value)) + def __radd__(self, value): return self.clone(self.prettyIn(value) + self._value) + def __mul__(self, value): return self.clone(self._value * value) + def __rmul__(self, value): return self * value + def __int__(self): return int(self._value) + def __float__(self): return float(self._value) + +class Null(OctetString): + defaultValue = ''.encode() # This is tightly constrained + tagSet = baseTagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x05) + ) + subtypeSpec = OctetString.subtypeSpec+constraint.SingleValueConstraint(''.encode()) + +if sys.version_info[0] <= 2: + intTypes = (int, long) +else: + intTypes = (int,) + +numericTypes = intTypes + (float,) + +class ObjectIdentifier(base.AbstractSimpleAsn1Item): + tagSet = baseTagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x06) + ) + def __add__(self, other): return self.clone(self._value + other) + def __radd__(self, other): return self.clone(other + self._value) + + def asTuple(self): return self._value + + # Sequence object protocol + + def __len__(self): + if self._len is None: + self._len = len(self._value) + return self._len + def __getitem__(self, i): + if isinstance(i, slice): + return self.clone( + operator.getitem(self._value, i) + ) + else: + return self._value[i] + + def __str__(self): return self.prettyPrint() + def __repr__(self): + return '%s(%r)' % (self.__class__.__name__, self.prettyPrint()) + + def index(self, suboid): return self._value.index(suboid) + + def isPrefixOf(self, value): + """Returns true if argument OID resides deeper in the OID tree""" + l = len(self) + if l <= len(value): + if self._value[:l] == value[:l]: + return 1 + return 0 + + def prettyIn(self, value): + """Dotted -> tuple of numerics OID converter""" + if isinstance(value, tuple): + pass + elif isinstance(value, ObjectIdentifier): + return tuple(value) + elif octets.isStringType(value): + r = [] + for element in [ x for x in value.split('.') if x != '' ]: + try: + r.append(int(element, 0)) + except ValueError: + raise error.PyAsn1Error( + 'Malformed Object ID %s at %s: %s' % + (str(value), self.__class__.__name__, sys.exc_info()[1]) + ) + value = tuple(r) + else: + try: + value = tuple(value) + except TypeError: + raise error.PyAsn1Error( + 'Malformed Object ID %s at %s: %s' % + (str(value), self.__class__.__name__,sys.exc_info()[1]) + ) + + for x in value: + if not isinstance(x, intTypes) or x < 0: + raise error.PyAsn1Error( + 'Invalid sub-ID in %s at %s' % (value, self.__class__.__name__) + ) + + return value + + def prettyOut(self, value): return '.'.join([ str(x) for x in value ]) + +class Real(base.AbstractSimpleAsn1Item): + binEncBase = None # binEncBase = 16 is recommended for large numbers + try: + _plusInf = float('inf') + _minusInf = float('-inf') + _inf = (_plusInf, _minusInf) + except ValueError: + # Infinity support is platform and Python dependent + _plusInf = _minusInf = None + _inf = () + + tagSet = baseTagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x09) + ) + + def __normalizeBase10(self, value): + m, b, e = value + while m and m % 10 == 0: + m = m / 10 + e = e + 1 + return m, b, e + + def prettyIn(self, value): + if isinstance(value, tuple) and len(value) == 3: + if not isinstance(value[0], numericTypes) or \ + not isinstance(value[1], intTypes) or \ + not isinstance(value[2], intTypes): + raise error.PyAsn1Error('Lame Real value syntax: %s' % (value,)) + if isinstance(value[0], float) and \ + self._inf and value[0] in self._inf: + return value[0] + if value[1] not in (2, 10): + raise error.PyAsn1Error( + 'Prohibited base for Real value: %s' % (value[1],) + ) + if value[1] == 10: + value = self.__normalizeBase10(value) + return value + elif isinstance(value, intTypes): + return self.__normalizeBase10((value, 10, 0)) + elif isinstance(value, (str, float)): + if isinstance(value, str): + try: + value = float(value) + except ValueError: + raise error.PyAsn1Error( + 'Bad real value syntax: %s' % (value,) + ) + if self._inf and value in self._inf: + return value + else: + e = 0 + while int(value) != value: + value = value * 10 + e = e - 1 + return self.__normalizeBase10((int(value), 10, e)) + elif isinstance(value, Real): + return tuple(value) + raise error.PyAsn1Error( + 'Bad real value syntax: %s' % (value,) + ) + + def prettyOut(self, value): + if value in self._inf: + return '\'%s\'' % value + else: + return str(value) + + def prettyPrint(self, scope=0): + if self.isInfinity(): + return self.prettyOut(self._value) + else: + return str(float(self)) + + def isPlusInfinity(self): return self._value == self._plusInf + def isMinusInfinity(self): return self._value == self._minusInf + def isInfinity(self): return self._value in self._inf + + def __str__(self): return str(float(self)) + + def __add__(self, value): return self.clone(float(self) + value) + def __radd__(self, value): return self + value + def __mul__(self, value): return self.clone(float(self) * value) + def __rmul__(self, value): return self * value + def __sub__(self, value): return self.clone(float(self) - value) + def __rsub__(self, value): return self.clone(value - float(self)) + def __mod__(self, value): return self.clone(float(self) % value) + def __rmod__(self, value): return self.clone(value % float(self)) + def __pow__(self, value, modulo=None): return self.clone(pow(float(self), value, modulo)) + def __rpow__(self, value): return self.clone(pow(value, float(self))) + + if sys.version_info[0] <= 2: + def __div__(self, value): return self.clone(float(self) / value) + def __rdiv__(self, value): return self.clone(value / float(self)) + else: + def __truediv__(self, value): return self.clone(float(self) / value) + def __rtruediv__(self, value): return self.clone(value / float(self)) + def __divmod__(self, value): return self.clone(float(self) // value) + def __rdivmod__(self, value): return self.clone(value // float(self)) + + def __int__(self): return int(float(self)) + if sys.version_info[0] <= 2: + def __long__(self): return long(float(self)) + def __float__(self): + if self._value in self._inf: + return self._value + else: + return float( + self._value[0] * pow(self._value[1], self._value[2]) + ) + def __abs__(self): return self.clone(abs(float(self))) + def __pos__(self): return self.clone(+float(self)) + def __neg__(self): return self.clone(-float(self)) + def __round__(self, n=0): + r = round(float(self), n) + if n: + return self.clone(r) + else: + return r + def __floor__(self): return self.clone(math.floor(float(self))) + def __ceil__(self): return self.clone(math.ceil(float(self))) + if sys.version_info[0:2] > (2, 5): + def __trunc__(self): return self.clone(math.trunc(float(self))) + + def __lt__(self, value): return float(self) < value + def __le__(self, value): return float(self) <= value + def __eq__(self, value): return float(self) == value + def __ne__(self, value): return float(self) != value + def __gt__(self, value): return float(self) > value + def __ge__(self, value): return float(self) >= value + + if sys.version_info[0] <= 2: + def __nonzero__(self): return bool(float(self)) + else: + def __bool__(self): return bool(float(self)) + __hash__ = base.AbstractSimpleAsn1Item.__hash__ + + def __getitem__(self, idx): + if self._value in self._inf: + raise error.PyAsn1Error('Invalid infinite value operation') + else: + return self._value[idx] + +class Enumerated(Integer): + tagSet = baseTagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x0A) + ) + +# "Structured" ASN.1 types + +class SetOf(base.AbstractConstructedAsn1Item): + componentType = None + tagSet = baseTagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x11) + ) + typeId = 1 + strictConstraints = False + + def _cloneComponentValues(self, myClone, cloneValueFlag): + idx = 0; l = len(self._componentValues) + while idx < l: + c = self._componentValues[idx] + if c is not None: + if isinstance(c, base.AbstractConstructedAsn1Item): + myClone.setComponentByPosition( + idx, c.clone(cloneValueFlag=cloneValueFlag) + ) + else: + myClone.setComponentByPosition(idx, c.clone()) + idx = idx + 1 + + def _verifyComponent(self, idx, value): + t = self._componentType + if t is None: + return + if not t.isSameTypeWith(value,matchConstraints=self.strictConstraints): + raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, t)) + if self.strictConstraints and \ + not t.isSuperTypeOf(value, matchTags=False): + raise error.PyAsn1Error('Component value is constraints-incompatible: %r vs %r' % (value, t)) + + def getComponentByPosition(self, idx): return self._componentValues[idx] + def setComponentByPosition(self, idx, value=None, verifyConstraints=True): + l = len(self._componentValues) + if idx >= l: + self._componentValues = self._componentValues + (idx-l+1)*[None] + if value is None: + if self._componentValues[idx] is None: + if self._componentType is None: + raise error.PyAsn1Error('Component type not defined') + self._componentValues[idx] = self._componentType.clone() + self._componentValuesSet = self._componentValuesSet + 1 + return self + elif not isinstance(value, base.Asn1Item): + if self._componentType is None: + raise error.PyAsn1Error('Component type not defined') + if isinstance(self._componentType, base.AbstractSimpleAsn1Item): + value = self._componentType.clone(value=value) + else: + raise error.PyAsn1Error('Instance value required') + if verifyConstraints: + if self._componentType is not None: + self._verifyComponent(idx, value) + self._verifySubtypeSpec(value, idx) + if self._componentValues[idx] is None: + self._componentValuesSet = self._componentValuesSet + 1 + self._componentValues[idx] = value + return self + + def getComponentTagMap(self): + if self._componentType is not None: + return self._componentType.getTagMap() + + def prettyPrint(self, scope=0): + scope = scope + 1 + r = self.__class__.__name__ + ':\n' + for idx in range(len(self._componentValues)): + r = r + ' '*scope + if self._componentValues[idx] is None: + r = r + '<empty>' + else: + r = r + self._componentValues[idx].prettyPrint(scope) + return r + + def prettyPrintType(self, scope=0): + scope = scope + 1 + r = '%s -> %s {\n' % (self.getTagSet(), self.__class__.__name__) + if self._componentType is not None: + r = r + ' '*scope + r = r + self._componentType.prettyPrintType(scope) + return r + '\n' + ' '*(scope-1) + '}' + +class SequenceOf(SetOf): + tagSet = baseTagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x10) + ) + typeId = 2 + +class SequenceAndSetBase(base.AbstractConstructedAsn1Item): + componentType = namedtype.NamedTypes() + strictConstraints = False + def __init__(self, componentType=None, tagSet=None, + subtypeSpec=None, sizeSpec=None): + if componentType is None: + componentType = self.componentType + base.AbstractConstructedAsn1Item.__init__( + self, componentType.clone(), tagSet, subtypeSpec, sizeSpec + ) + self._componentTypeLen = len(self._componentType) + + def __getitem__(self, idx): + if isinstance(idx, str): + return self.getComponentByName(idx) + else: + return base.AbstractConstructedAsn1Item.__getitem__(self, idx) + + def __setitem__(self, idx, value): + if isinstance(idx, str): + self.setComponentByName(idx, value) + else: + base.AbstractConstructedAsn1Item.__setitem__(self, idx, value) + + def _cloneComponentValues(self, myClone, cloneValueFlag): + idx = 0; l = len(self._componentValues) + while idx < l: + c = self._componentValues[idx] + if c is not None: + if isinstance(c, base.AbstractConstructedAsn1Item): + myClone.setComponentByPosition( + idx, c.clone(cloneValueFlag=cloneValueFlag) + ) + else: + myClone.setComponentByPosition(idx, c.clone()) + idx = idx + 1 + + def _verifyComponent(self, idx, value): + if idx >= self._componentTypeLen: + raise error.PyAsn1Error( + 'Component type error out of range' + ) + t = self._componentType[idx].getType() + if not t.isSameTypeWith(value,matchConstraints=self.strictConstraints): + raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, t)) + if self.strictConstraints and \ + not t.isSuperTypeOf(value, matchTags=False): + raise error.PyAsn1Error('Component value is constraints-incompatible: %r vs %r' % (value, t)) + + def getComponentByName(self, name): + return self.getComponentByPosition( + self._componentType.getPositionByName(name) + ) + def setComponentByName(self, name, value=None, verifyConstraints=True): + return self.setComponentByPosition( + self._componentType.getPositionByName(name),value,verifyConstraints + ) + + def getComponentByPosition(self, idx): + try: + return self._componentValues[idx] + except IndexError: + if idx < self._componentTypeLen: + return + raise + def setComponentByPosition(self, idx, value=None, + verifyConstraints=True, + exactTypes=False, + matchTags=True, + matchConstraints=True): + l = len(self._componentValues) + if idx >= l: + self._componentValues = self._componentValues + (idx-l+1)*[None] + if value is None: + if self._componentValues[idx] is None: + self._componentValues[idx] = self._componentType.getTypeByPosition(idx).clone() + self._componentValuesSet = self._componentValuesSet + 1 + return self + elif not isinstance(value, base.Asn1Item): + t = self._componentType.getTypeByPosition(idx) + if isinstance(t, base.AbstractSimpleAsn1Item): + value = t.clone(value=value) + else: + raise error.PyAsn1Error('Instance value required') + if verifyConstraints: + if self._componentTypeLen: + self._verifyComponent(idx, value) + self._verifySubtypeSpec(value, idx) + if self._componentValues[idx] is None: + self._componentValuesSet = self._componentValuesSet + 1 + self._componentValues[idx] = value + return self + + def getNameByPosition(self, idx): + if self._componentTypeLen: + return self._componentType.getNameByPosition(idx) + + def getDefaultComponentByPosition(self, idx): + if self._componentTypeLen and self._componentType[idx].isDefaulted: + return self._componentType[idx].getType() + + def getComponentType(self): + if self._componentTypeLen: + return self._componentType + + def setDefaultComponents(self): + if self._componentTypeLen == self._componentValuesSet: + return + idx = self._componentTypeLen + while idx: + idx = idx - 1 + if self._componentType[idx].isDefaulted: + if self.getComponentByPosition(idx) is None: + self.setComponentByPosition(idx) + elif not self._componentType[idx].isOptional: + if self.getComponentByPosition(idx) is None: + raise error.PyAsn1Error( + 'Uninitialized component #%s at %r' % (idx, self) + ) + + def prettyPrint(self, scope=0): + scope = scope + 1 + r = self.__class__.__name__ + ':\n' + for idx in range(len(self._componentValues)): + if self._componentValues[idx] is not None: + r = r + ' '*scope + componentType = self.getComponentType() + if componentType is None: + r = r + '<no-name>' + else: + r = r + componentType.getNameByPosition(idx) + r = '%s=%s\n' % ( + r, self._componentValues[idx].prettyPrint(scope) + ) + return r + + def prettyPrintType(self, scope=0): + scope = scope + 1 + r = '%s -> %s {\n' % (self.getTagSet(), self.__class__.__name__) + for idx in range(len(self.componentType)): + r = r + ' '*scope + r = r + '"%s"' % self.componentType.getNameByPosition(idx) + r = '%s = %s\n' % ( + r, self._componentType.getTypeByPosition(idx).prettyPrintType(scope) + ) + return r + '\n' + ' '*(scope-1) + '}' + +class Sequence(SequenceAndSetBase): + tagSet = baseTagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x10) + ) + typeId = 3 + + def getComponentTagMapNearPosition(self, idx): + if self._componentType: + return self._componentType.getTagMapNearPosition(idx) + + def getComponentPositionNearType(self, tagSet, idx): + if self._componentType: + return self._componentType.getPositionNearType(tagSet, idx) + else: + return idx + +class Set(SequenceAndSetBase): + tagSet = baseTagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x11) + ) + typeId = 4 + + def getComponent(self, innerFlag=0): return self + + def getComponentByType(self, tagSet, innerFlag=0): + c = self.getComponentByPosition( + self._componentType.getPositionByType(tagSet) + ) + if innerFlag and isinstance(c, Set): + # get inner component by inner tagSet + return c.getComponent(1) + else: + # get outer component by inner tagSet + return c + + def setComponentByType(self, tagSet, value=None, innerFlag=0, + verifyConstraints=True): + idx = self._componentType.getPositionByType(tagSet) + t = self._componentType.getTypeByPosition(idx) + if innerFlag: # set inner component by inner tagSet + if t.getTagSet(): + return self.setComponentByPosition( + idx, value, verifyConstraints + ) + else: + t = self.setComponentByPosition(idx).getComponentByPosition(idx) + return t.setComponentByType( + tagSet, value, innerFlag, verifyConstraints + ) + else: # set outer component by inner tagSet + return self.setComponentByPosition( + idx, value, verifyConstraints + ) + + def getComponentTagMap(self): + if self._componentType: + return self._componentType.getTagMap(True) + + def getComponentPositionByType(self, tagSet): + if self._componentType: + return self._componentType.getPositionByType(tagSet) + +class Choice(Set): + tagSet = baseTagSet = tag.TagSet() # untagged + sizeSpec = constraint.ConstraintsIntersection( + constraint.ValueSizeConstraint(1, 1) + ) + typeId = 5 + _currentIdx = None + + def __eq__(self, other): + if self._componentValues: + return self._componentValues[self._currentIdx] == other + return NotImplemented + def __ne__(self, other): + if self._componentValues: + return self._componentValues[self._currentIdx] != other + return NotImplemented + def __lt__(self, other): + if self._componentValues: + return self._componentValues[self._currentIdx] < other + return NotImplemented + def __le__(self, other): + if self._componentValues: + return self._componentValues[self._currentIdx] <= other + return NotImplemented + def __gt__(self, other): + if self._componentValues: + return self._componentValues[self._currentIdx] > other + return NotImplemented + def __ge__(self, other): + if self._componentValues: + return self._componentValues[self._currentIdx] >= other + return NotImplemented + if sys.version_info[0] <= 2: + def __nonzero__(self): return bool(self._componentValues) + else: + def __bool__(self): return bool(self._componentValues) + + def __len__(self): return self._currentIdx is not None and 1 or 0 + + def verifySizeSpec(self): + if self._currentIdx is None: + raise error.PyAsn1Error('Component not chosen') + else: + self._sizeSpec(' ') + + def _cloneComponentValues(self, myClone, cloneValueFlag): + try: + c = self.getComponent() + except error.PyAsn1Error: + pass + else: + if isinstance(c, Choice): + tagSet = c.getEffectiveTagSet() + else: + tagSet = c.getTagSet() + if isinstance(c, base.AbstractConstructedAsn1Item): + myClone.setComponentByType( + tagSet, c.clone(cloneValueFlag=cloneValueFlag) + ) + else: + myClone.setComponentByType(tagSet, c.clone()) + + def setComponentByPosition(self, idx, value=None, verifyConstraints=True): + l = len(self._componentValues) + if idx >= l: + self._componentValues = self._componentValues + (idx-l+1)*[None] + if self._currentIdx is not None: + self._componentValues[self._currentIdx] = None + if value is None: + if self._componentValues[idx] is None: + self._componentValues[idx] = self._componentType.getTypeByPosition(idx).clone() + self._componentValuesSet = 1 + self._currentIdx = idx + return self + elif not isinstance(value, base.Asn1Item): + value = self._componentType.getTypeByPosition(idx).clone( + value=value + ) + if verifyConstraints: + if self._componentTypeLen: + self._verifyComponent(idx, value) + self._verifySubtypeSpec(value, idx) + self._componentValues[idx] = value + self._currentIdx = idx + self._componentValuesSet = 1 + return self + + def getMinTagSet(self): + if self._tagSet: + return self._tagSet + else: + return self._componentType.genMinTagSet() + + def getEffectiveTagSet(self): + if self._tagSet: + return self._tagSet + else: + c = self.getComponent() + if isinstance(c, Choice): + return c.getEffectiveTagSet() + else: + return c.getTagSet() + + def getTagMap(self): + if self._tagSet: + return Set.getTagMap(self) + else: + return Set.getComponentTagMap(self) + + def getComponent(self, innerFlag=0): + if self._currentIdx is None: + raise error.PyAsn1Error('Component not chosen') + else: + c = self._componentValues[self._currentIdx] + if innerFlag and isinstance(c, Choice): + return c.getComponent(innerFlag) + else: + return c + + def getName(self, innerFlag=0): + if self._currentIdx is None: + raise error.PyAsn1Error('Component not chosen') + else: + if innerFlag: + c = self._componentValues[self._currentIdx] + if isinstance(c, Choice): + return c.getName(innerFlag) + return self._componentType.getNameByPosition(self._currentIdx) + + def setDefaultComponents(self): pass + +class Any(OctetString): + tagSet = baseTagSet = tag.TagSet() # untagged + typeId = 6 + + def getTagMap(self): + return tagmap.TagMap( + { self.getTagSet(): self }, + { eoo.endOfOctets.getTagSet(): eoo.endOfOctets }, + self + ) + +# XXX +# coercion rules? diff --git a/lib/python3.4/site-packages/pyasn1/type/useful.py b/lib/python3.4/site-packages/pyasn1/type/useful.py new file mode 100644 index 0000000..1766534 --- /dev/null +++ b/lib/python3.4/site-packages/pyasn1/type/useful.py @@ -0,0 +1,17 @@ +# ASN.1 "useful" types +from pyasn1.type import char, tag + +class ObjectDescriptor(char.GraphicString): + tagSet = char.GraphicString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 7) + ) + +class GeneralizedTime(char.VisibleString): + tagSet = char.VisibleString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 24) + ) + +class UTCTime(char.VisibleString): + tagSet = char.VisibleString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 23) + ) diff --git a/lib/python3.4/site-packages/pycparser-2.14-py3.4.egg-info/PKG-INFO b/lib/python3.4/site-packages/pycparser-2.14-py3.4.egg-info/PKG-INFO new file mode 100644 index 0000000..80dd9e5 --- /dev/null +++ b/lib/python3.4/site-packages/pycparser-2.14-py3.4.egg-info/PKG-INFO @@ -0,0 +1,17 @@ +Metadata-Version: 1.1 +Name: pycparser +Version: 2.14 +Summary: C parser in Python +Home-page: https://github.com/eliben/pycparser +Author: Eli Bendersky +Author-email: eliben@gmail.com +License: BSD +Description: + pycparser is a complete parser of the C language, written in + pure Python using the PLY parsing library. + It parses C code into an AST and can serve as a front-end for + C compilers or analysis tools. + +Platform: Cross Platform +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 3 diff --git a/lib/python3.4/site-packages/pycparser-2.14-py3.4.egg-info/SOURCES.txt b/lib/python3.4/site-packages/pycparser-2.14-py3.4.egg-info/SOURCES.txt new file mode 100644 index 0000000..de60dcc --- /dev/null +++ b/lib/python3.4/site-packages/pycparser-2.14-py3.4.egg-info/SOURCES.txt @@ -0,0 +1,121 @@ +CHANGES +LICENSE +MANIFEST.in +README.rst +setup.cfg +setup.py +examples/c-to-c.py +examples/cdecl.py +examples/explore_ast.py +examples/func_calls.py +examples/func_defs.py +examples/rewrite_ast.py +examples/using_cpp_libc.py +examples/using_gcc_E_libc.py +examples/c_files/funky.c +examples/c_files/hash.c +examples/c_files/memmgr.c +examples/c_files/memmgr.h +examples/c_files/year.c +pycparser/__init__.py +pycparser/_ast_gen.py +pycparser/_build_tables.py +pycparser/_c_ast.cfg +pycparser/ast_transforms.py +pycparser/c_ast.py +pycparser/c_generator.py +pycparser/c_lexer.py +pycparser/c_parser.py +pycparser/lextab.py +pycparser/plyparser.py +pycparser/yacctab.py +pycparser.egg-info/PKG-INFO +pycparser.egg-info/SOURCES.txt +pycparser.egg-info/dependency_links.txt +pycparser.egg-info/top_level.txt +pycparser/ply/__init__.py +pycparser/ply/cpp.py +pycparser/ply/ctokens.py +pycparser/ply/lex.py +pycparser/ply/yacc.py +tests/all_tests.py +tests/test_c_ast.py +tests/test_c_generator.py +tests/test_c_lexer.py +tests/test_c_parser.py +tests/test_general.py +tests/c_files/cppd_with_stdio_h.c +tests/c_files/empty.h +tests/c_files/example_c_file.c +tests/c_files/memmgr.c +tests/c_files/memmgr.h +tests/c_files/memmgr_with_h.c +tests/c_files/simplemain.c +tests/c_files/year.c +tests/c_files/hdir/9/inc.h +utils/fake_libc_include/_ansi.h +utils/fake_libc_include/_fake_defines.h +utils/fake_libc_include/_fake_typedefs.h +utils/fake_libc_include/_syslist.h +utils/fake_libc_include/alloca.h +utils/fake_libc_include/ar.h +utils/fake_libc_include/argz.h +utils/fake_libc_include/assert.h +utils/fake_libc_include/complex.h +utils/fake_libc_include/ctype.h +utils/fake_libc_include/dirent.h +utils/fake_libc_include/dlfcn.h +utils/fake_libc_include/endian.h +utils/fake_libc_include/envz.h +utils/fake_libc_include/errno.h +utils/fake_libc_include/fastmath.h +utils/fake_libc_include/fcntl.h +utils/fake_libc_include/features.h +utils/fake_libc_include/fenv.h +utils/fake_libc_include/float.h +utils/fake_libc_include/getopt.h +utils/fake_libc_include/grp.h +utils/fake_libc_include/iconv.h +utils/fake_libc_include/ieeefp.h +utils/fake_libc_include/inttypes.h +utils/fake_libc_include/iso646.h +utils/fake_libc_include/langinfo.h +utils/fake_libc_include/libgen.h +utils/fake_libc_include/libintl.h +utils/fake_libc_include/limits.h +utils/fake_libc_include/locale.h +utils/fake_libc_include/malloc.h +utils/fake_libc_include/math.h +utils/fake_libc_include/netdb.h +utils/fake_libc_include/newlib.h +utils/fake_libc_include/paths.h +utils/fake_libc_include/process.h +utils/fake_libc_include/pthread.h +utils/fake_libc_include/pwd.h +utils/fake_libc_include/reent.h +utils/fake_libc_include/regdef.h +utils/fake_libc_include/regex.h +utils/fake_libc_include/sched.h +utils/fake_libc_include/search.h +utils/fake_libc_include/semaphore.h +utils/fake_libc_include/setjmp.h +utils/fake_libc_include/signal.h +utils/fake_libc_include/stdarg.h +utils/fake_libc_include/stdbool.h +utils/fake_libc_include/stddef.h +utils/fake_libc_include/stdint.h +utils/fake_libc_include/stdio.h +utils/fake_libc_include/stdlib.h +utils/fake_libc_include/string.h +utils/fake_libc_include/syslog.h +utils/fake_libc_include/tar.h +utils/fake_libc_include/termios.h +utils/fake_libc_include/tgmath.h +utils/fake_libc_include/time.h +utils/fake_libc_include/unctrl.h +utils/fake_libc_include/unistd.h +utils/fake_libc_include/utime.h +utils/fake_libc_include/utmp.h +utils/fake_libc_include/wchar.h +utils/fake_libc_include/wctype.h +utils/fake_libc_include/zlib.h \ No newline at end of file diff --git a/lib/python3.4/site-packages/pycparser-2.14-py3.4.egg-info/dependency_links.txt b/lib/python3.4/site-packages/pycparser-2.14-py3.4.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/lib/python3.4/site-packages/pycparser-2.14-py3.4.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/lib/python3.4/site-packages/pycparser-2.14-py3.4.egg-info/installed-files.txt b/lib/python3.4/site-packages/pycparser-2.14-py3.4.egg-info/installed-files.txt new file mode 100644 index 0000000..bf1feaf --- /dev/null +++ b/lib/python3.4/site-packages/pycparser-2.14-py3.4.egg-info/installed-files.txt @@ -0,0 +1,38 @@ +../pycparser/c_generator.py +../pycparser/c_parser.py +../pycparser/c_lexer.py +../pycparser/_ast_gen.py +../pycparser/c_ast.py +../pycparser/ast_transforms.py +../pycparser/__init__.py +../pycparser/yacctab.py +../pycparser/plyparser.py +../pycparser/lextab.py +../pycparser/_build_tables.py +../pycparser/ply/__init__.py +../pycparser/ply/lex.py +../pycparser/ply/ctokens.py +../pycparser/ply/cpp.py +../pycparser/ply/yacc.py +../pycparser/_c_ast.cfg +../pycparser/__pycache__/c_generator.cpython-34.pyc +../pycparser/__pycache__/c_parser.cpython-34.pyc +../pycparser/__pycache__/c_lexer.cpython-34.pyc +../pycparser/__pycache__/_ast_gen.cpython-34.pyc +../pycparser/__pycache__/c_ast.cpython-34.pyc +../pycparser/__pycache__/ast_transforms.cpython-34.pyc +../pycparser/__pycache__/__init__.cpython-34.pyc +../pycparser/__pycache__/yacctab.cpython-34.pyc +../pycparser/__pycache__/plyparser.cpython-34.pyc +../pycparser/__pycache__/lextab.cpython-34.pyc +../pycparser/__pycache__/_build_tables.cpython-34.pyc +../pycparser/ply/__pycache__/__init__.cpython-34.pyc +../pycparser/ply/__pycache__/lex.cpython-34.pyc +../pycparser/ply/__pycache__/ctokens.cpython-34.pyc +../pycparser/ply/__pycache__/cpp.cpython-34.pyc +../pycparser/ply/__pycache__/yacc.cpython-34.pyc +./ +PKG-INFO +dependency_links.txt +SOURCES.txt +top_level.txt diff --git a/lib/python3.4/site-packages/pycparser-2.14-py3.4.egg-info/top_level.txt b/lib/python3.4/site-packages/pycparser-2.14-py3.4.egg-info/top_level.txt new file mode 100644 index 0000000..dc1c9e1 --- /dev/null +++ b/lib/python3.4/site-packages/pycparser-2.14-py3.4.egg-info/top_level.txt @@ -0,0 +1 @@ +pycparser diff --git a/lib/python3.4/site-packages/pycparser/__init__.py b/lib/python3.4/site-packages/pycparser/__init__.py new file mode 100644 index 0000000..6585492 --- /dev/null +++ b/lib/python3.4/site-packages/pycparser/__init__.py @@ -0,0 +1,93 @@ +#----------------------------------------------------------------- +# pycparser: __init__.py +# +# This package file exports some convenience functions for +# interacting with pycparser +# +# Copyright (C) 2008-2015, Eli Bendersky +# License: BSD +#----------------------------------------------------------------- +__all__ = ['c_lexer', 'c_parser', 'c_ast'] +__version__ = '2.14' + +from subprocess import Popen, PIPE +from .c_parser import CParser + + +def preprocess_file(filename, cpp_path='cpp', cpp_args=''): + """ Preprocess a file using cpp. + + filename: + Name of the file you want to preprocess. + + cpp_path: + cpp_args: + Refer to the documentation of parse_file for the meaning of these + arguments. + + When successful, returns the preprocessed file's contents. + Errors from cpp will be printed out. + """ + path_list = [cpp_path] + if isinstance(cpp_args, list): + path_list += cpp_args + elif cpp_args != '': + path_list += [cpp_args] + path_list += [filename] + + try: + # Note the use of universal_newlines to treat all newlines + # as \n for Python's purpose + # + pipe = Popen( path_list, + stdout=PIPE, + universal_newlines=True) + text = pipe.communicate()[0] + except OSError as e: + raise RuntimeError("Unable to invoke 'cpp'. " + + 'Make sure its path was passed correctly\n' + + ('Original error: %s' % e)) + + return text + + +def parse_file(filename, use_cpp=False, cpp_path='cpp', cpp_args='', + parser=None): + """ Parse a C file using pycparser. + + filename: + Name of the file you want to parse. + + use_cpp: + Set to True if you want to execute the C pre-processor + on the file prior to parsing it. + + cpp_path: + If use_cpp is True, this is the path to 'cpp' on your + system. If no path is provided, it attempts to just + execute 'cpp', so it must be in your PATH. + + cpp_args: + If use_cpp is True, set this to the command line arguments strings + to cpp. Be careful with quotes - it's best to pass a raw string + (r'') here. For example: + r'-I../utils/fake_libc_include' + If several arguments are required, pass a list of strings. + + parser: + Optional parser object to be used instead of the default CParser + + When successful, an AST is returned. ParseError can be + thrown if the file doesn't parse successfully. + + Errors from cpp will be printed out. + """ + if use_cpp: + text = preprocess_file(filename, cpp_path, cpp_args) + else: + with open(filename, 'rU') as f: + text = f.read() + + if parser is None: + parser = CParser() + return parser.parse(text, filename) diff --git a/lib/python3.4/site-packages/pycparser/_ast_gen.py b/lib/python3.4/site-packages/pycparser/_ast_gen.py new file mode 100644 index 0000000..bf9cb29 --- /dev/null +++ b/lib/python3.4/site-packages/pycparser/_ast_gen.py @@ -0,0 +1,278 @@ +#----------------------------------------------------------------- +# _ast_gen.py +# +# Generates the AST Node classes from a specification given in +# a configuration file +# +# The design of this module was inspired by astgen.py from the +# Python 2.5 code-base. +# +# Copyright (C) 2008-2015, Eli Bendersky +# License: BSD +#----------------------------------------------------------------- +import pprint +from string import Template + + +class ASTCodeGenerator(object): + def __init__(self, cfg_filename='_c_ast.cfg'): + """ Initialize the code generator from a configuration + file. + """ + self.cfg_filename = cfg_filename + self.node_cfg = [NodeCfg(name, contents) + for (name, contents) in self.parse_cfgfile(cfg_filename)] + + def generate(self, file=None): + """ Generates the code into file, an open file buffer. + """ + src = Template(_PROLOGUE_COMMENT).substitute( + cfg_filename=self.cfg_filename) + + src += _PROLOGUE_CODE + for node_cfg in self.node_cfg: + src += node_cfg.generate_source() + '\n\n' + + file.write(src) + + def parse_cfgfile(self, filename): + """ Parse the configuration file and yield pairs of + (name, contents) for each node. + """ + with open(filename, "r") as f: + for line in f: + line = line.strip() + if not line or line.startswith('#'): + continue + colon_i = line.find(':') + lbracket_i = line.find('[') + rbracket_i = line.find(']') + if colon_i < 1 or lbracket_i <= colon_i or rbracket_i <= lbracket_i: + raise RuntimeError("Invalid line in %s:\n%s\n" % (filename, line)) + + name = line[:colon_i] + val = line[lbracket_i + 1:rbracket_i] + vallist = [v.strip() for v in val.split(',')] if val else [] + yield name, vallist + + +class NodeCfg(object): + """ Node configuration. + + name: node name + contents: a list of contents - attributes and child nodes + See comment at the top of the configuration file for details. + """ + def __init__(self, name, contents): + self.name = name + self.all_entries = [] + self.attr = [] + self.child = [] + self.seq_child = [] + + for entry in contents: + clean_entry = entry.rstrip('*') + self.all_entries.append(clean_entry) + + if entry.endswith('**'): + self.seq_child.append(clean_entry) + elif entry.endswith('*'): + self.child.append(clean_entry) + else: + self.attr.append(entry) + + def generate_source(self): + src = self._gen_init() + src += '\n' + self._gen_children() + src += '\n' + self._gen_attr_names() + return src + + def _gen_init(self): + src = "class %s(Node):\n" % self.name + + if self.all_entries: + args = ', '.join(self.all_entries) + slots = ', '.join("'{0}'".format(e) for e in self.all_entries) + slots += ", 'coord', '__weakref__'" + arglist = '(self, %s, coord=None)' % args + else: + slots = "'coord', '__weakref__'" + arglist = '(self, coord=None)' + + src += " __slots__ = (%s)\n" % slots + src += " def __init__%s:\n" % arglist + + for name in self.all_entries + ['coord']: + src += " self.%s = %s\n" % (name, name) + + return src + + def _gen_children(self): + src = ' def children(self):\n' + + if self.all_entries: + src += ' nodelist = []\n' + + for child in self.child: + src += ( + ' if self.%(child)s is not None:' + + ' nodelist.append(("%(child)s", self.%(child)s))\n') % ( + dict(child=child)) + + for seq_child in self.seq_child: + src += ( + ' for i, child in enumerate(self.%(child)s or []):\n' + ' nodelist.append(("%(child)s[%%d]" %% i, child))\n') % ( + dict(child=seq_child)) + + src += ' return tuple(nodelist)\n' + else: + src += ' return ()\n' + + return src + + def _gen_attr_names(self): + src = " attr_names = (" + ''.join("%r, " % nm for nm in self.attr) + ')' + return src + + +_PROLOGUE_COMMENT = \ +r'''#----------------------------------------------------------------- +# ** ATTENTION ** +# This code was automatically generated from the file: +# $cfg_filename +# +# Do not modify it directly. Modify the configuration file and +# run the generator again. +# ** ** *** ** ** +# +# pycparser: c_ast.py +# +# AST Node classes. +# +# Copyright (C) 2008-2015, Eli Bendersky +# License: BSD +#----------------------------------------------------------------- + +''' + +_PROLOGUE_CODE = r''' +import sys + + +class Node(object): + __slots__ = () + """ Abstract base class for AST nodes. + """ + def children(self): + """ A sequence of all children that are Nodes + """ + pass + + def show(self, buf=sys.stdout, offset=0, attrnames=False, nodenames=False, showcoord=False, _my_node_name=None): + """ Pretty print the Node and all its attributes and + children (recursively) to a buffer. + + buf: + Open IO buffer into which the Node is printed. + + offset: + Initial offset (amount of leading spaces) + + attrnames: + True if you want to see the attribute names in + name=value pairs. False to only see the values. + + nodenames: + True if you want to see the actual node names + within their parents. + + showcoord: + Do you want the coordinates of each Node to be + displayed. + """ + lead = ' ' * offset + if nodenames and _my_node_name is not None: + buf.write(lead + self.__class__.__name__+ ' <' + _my_node_name + '>: ') + else: + buf.write(lead + self.__class__.__name__+ ': ') + + if self.attr_names: + if attrnames: + nvlist = [(n, getattr(self,n)) for n in self.attr_names] + attrstr = ', '.join('%s=%s' % nv for nv in nvlist) + else: + vlist = [getattr(self, n) for n in self.attr_names] + attrstr = ', '.join('%s' % v for v in vlist) + buf.write(attrstr) + + if showcoord: + buf.write(' (at %s)' % self.coord) + buf.write('\n') + + for (child_name, child) in self.children(): + child.show( + buf, + offset=offset + 2, + attrnames=attrnames, + nodenames=nodenames, + showcoord=showcoord, + _my_node_name=child_name) + + +class NodeVisitor(object): + """ A base NodeVisitor class for visiting c_ast nodes. + Subclass it and define your own visit_XXX methods, where + XXX is the class name you want to visit with these + methods. + + For example: + + class ConstantVisitor(NodeVisitor): + def __init__(self): + self.values = [] + + def visit_Constant(self, node): + self.values.append(node.value) + + Creates a list of values of all the constant nodes + encountered below the given node. To use it: + + cv = ConstantVisitor() + cv.visit(node) + + Notes: + + * generic_visit() will be called for AST nodes for which + no visit_XXX method was defined. + * The children of nodes for which a visit_XXX was + defined will not be visited - if you need this, call + generic_visit() on the node. + You can use: + NodeVisitor.generic_visit(self, node) + * Modeled after Python's own AST visiting facilities + (the ast module of Python 3.0) + """ + def visit(self, node): + """ Visit a node. + """ + method = 'visit_' + node.__class__.__name__ + visitor = getattr(self, method, self.generic_visit) + return visitor(node) + + def generic_visit(self, node): + """ Called if no explicit visitor function exists for a + node. Implements preorder visiting of the node. + """ + for c_name, c in node.children(): + self.visit(c) + + +''' + + +if __name__ == "__main__": + import sys + ast_gen = ASTCodeGenerator('_c_ast.cfg') + ast_gen.generate(open('c_ast.py', 'w')) + diff --git a/lib/python3.4/site-packages/pycparser/_build_tables.py b/lib/python3.4/site-packages/pycparser/_build_tables.py new file mode 100644 index 0000000..151e594 --- /dev/null +++ b/lib/python3.4/site-packages/pycparser/_build_tables.py @@ -0,0 +1,33 @@ +#----------------------------------------------------------------- +# pycparser: _build_tables.py +# +# A dummy for generating the lexing/parsing tables and and +# compiling them into .pyc for faster execution in optimized mode. +# Also generates AST code from the configuration file. +# Should be called from the pycparser directory. +# +# Copyright (C) 2008-2015, Eli Bendersky +# License: BSD +#----------------------------------------------------------------- + +# Generate c_ast.py +from _ast_gen import ASTCodeGenerator +ast_gen = ASTCodeGenerator('_c_ast.cfg') +ast_gen.generate(open('c_ast.py', 'w')) + +import sys +sys.path[0:0] = ['.', '..'] +from pycparser import c_parser + +# Generates the tables +# +c_parser.CParser( + lex_optimize=True, + yacc_debug=False, + yacc_optimize=True) + +# Load to compile into .pyc +# +import lextab +import yacctab +import c_ast diff --git a/lib/python3.4/site-packages/pycparser/_c_ast.cfg b/lib/python3.4/site-packages/pycparser/_c_ast.cfg new file mode 100644 index 0000000..fad5691 --- /dev/null +++ b/lib/python3.4/site-packages/pycparser/_c_ast.cfg @@ -0,0 +1,189 @@ +#----------------------------------------------------------------- +# pycparser: _c_ast.cfg +# +# Defines the AST Node classes used in pycparser. +# +# Each entry is a Node sub-class name, listing the attributes +# and child nodes of the class: +# <name>* - a child node +# <name>** - a sequence of child nodes +# <name> - an attribute +# +# Copyright (C) 2008-2015, Eli Bendersky +# License: BSD +#----------------------------------------------------------------- + +# ArrayDecl is a nested declaration of an array with the given type. +# dim: the dimension (for example, constant 42) +# dim_quals: list of dimension qualifiers, to support C99's allowing 'const' +# and 'static' within the array dimension in function declarations. +ArrayDecl: [type*, dim*, dim_quals] + +ArrayRef: [name*, subscript*] + +# op: =, +=, /= etc. +# +Assignment: [op, lvalue*, rvalue*] + +BinaryOp: [op, left*, right*] + +Break: [] + +Case: [expr*, stmts**] + +Cast: [to_type*, expr*] + +# Compound statement in C99 is a list of block items (declarations or +# statements). +# +Compound: [block_items**] + +# Compound literal (anonymous aggregate) for C99. +# (type-name) {initializer_list} +# type: the typename +# init: InitList for the initializer list +# +CompoundLiteral: [type*, init*] + +# type: int, char, float, etc. see CLexer for constant token types +# +Constant: [type, value] + +Continue: [] + +# name: the variable being declared +# quals: list of qualifiers (const, volatile) +# funcspec: list function specifiers (i.e. inline in C99) +# storage: list of storage specifiers (extern, register, etc.) +# type: declaration type (probably nested with all the modifiers) +# init: initialization value, or None +# bitsize: bit field size, or None +# +Decl: [name, quals, storage, funcspec, type*, init*, bitsize*] + +DeclList: [decls**] + +Default: [stmts**] + +DoWhile: [cond*, stmt*] + +# Represents the ellipsis (...) parameter in a function +# declaration +# +EllipsisParam: [] + +# An empty statement (a semicolon ';' on its own) +# +EmptyStatement: [] + +# Enumeration type specifier +# name: an optional ID +# values: an EnumeratorList +# +Enum: [name, values*] + +# A name/value pair for enumeration values +# +Enumerator: [name, value*] + +# A list of enumerators +# +EnumeratorList: [enumerators**] + +# A list of expressions separated by the comma operator. +# +ExprList: [exprs**] + +# This is the top of the AST, representing a single C file (a +# translation unit in K&R jargon). It contains a list of +# "external-declaration"s, which is either declarations (Decl), +# Typedef or function definitions (FuncDef). +# +FileAST: [ext**] + +# for (init; cond; next) stmt +# +For: [init*, cond*, next*, stmt*] + +# name: Id +# args: ExprList +# +FuncCall: [name*, args*] + +# type <decl>(args) +# +FuncDecl: [args*, type*] + +# Function definition: a declarator for the function name and +# a body, which is a compound statement. +# There's an optional list of parameter declarations for old +# K&R-style definitions +# +FuncDef: [decl*, param_decls**, body*] + +Goto: [name] + +ID: [name] + +# Holder for types that are a simple identifier (e.g. the built +# ins void, char etc. and typedef-defined types) +# +IdentifierType: [names] + +If: [cond*, iftrue*, iffalse*] + +# An initialization list used for compound literals. +# +InitList: [exprs**] + +Label: [name, stmt*] + +# A named initializer for C99. +# The name of a NamedInitializer is a sequence of Nodes, because +# names can be hierarchical and contain constant expressions. +# +NamedInitializer: [name**, expr*] + +# a list of comma separated function parameter declarations +# +ParamList: [params**] + +PtrDecl: [quals, type*] + +Return: [expr*] + +# name: struct tag name +# decls: declaration of members +# +Struct: [name, decls**] + +# type: . or -> +# name.field or name->field +# +StructRef: [name*, type, field*] + +Switch: [cond*, stmt*] + +# cond ? iftrue : iffalse +# +TernaryOp: [cond*, iftrue*, iffalse*] + +# A base type declaration +# +TypeDecl: [declname, quals, type*] + +# A typedef declaration. +# Very similar to Decl, but without some attributes +# +Typedef: [name, quals, storage, type*] + +Typename: [name, quals, type*] + +UnaryOp: [op, expr*] + +# name: union tag name +# decls: declaration of members +# +Union: [name, decls**] + +While: [cond*, stmt*] diff --git a/lib/python3.4/site-packages/pycparser/ast_transforms.py b/lib/python3.4/site-packages/pycparser/ast_transforms.py new file mode 100644 index 0000000..36db1e8 --- /dev/null +++ b/lib/python3.4/site-packages/pycparser/ast_transforms.py @@ -0,0 +1,105 @@ +#------------------------------------------------------------------------------ +# pycparser: ast_transforms.py +# +# Some utilities used by the parser to create a friendlier AST. +# +# Copyright (C) 2008-2015, Eli Bendersky +# License: BSD +#------------------------------------------------------------------------------ + +from . import c_ast + + +def fix_switch_cases(switch_node): + """ The 'case' statements in a 'switch' come out of parsing with one + child node, so subsequent statements are just tucked to the parent + Compound. Additionally, consecutive (fall-through) case statements + come out messy. This is a peculiarity of the C grammar. The following: + + switch (myvar) { + case 10: + k = 10; + p = k + 1; + return 10; + case 20: + case 30: + return 20; + default: + break; + } + + Creates this tree (pseudo-dump): + + Switch + ID: myvar + Compound: + Case 10: + k = 10 + p = k + 1 + return 10 + Case 20: + Case 30: + return 20 + Default: + break + + The goal of this transform it to fix this mess, turning it into the + following: + + Switch + ID: myvar + Compound: + Case 10: + k = 10 + p = k + 1 + return 10 + Case 20: + Case 30: + return 20 + Default: + break + + A fixed AST node is returned. The argument may be modified. + """ + assert isinstance(switch_node, c_ast.Switch) + if not isinstance(switch_node.stmt, c_ast.Compound): + return switch_node + + # The new Compound child for the Switch, which will collect children in the + # correct order + new_compound = c_ast.Compound([], switch_node.stmt.coord) + + # The last Case/Default node + last_case = None + + # Goes over the children of the Compound below the Switch, adding them + # either directly below new_compound or below the last Case as appropriate + for child in switch_node.stmt.block_items: + if isinstance(child, (c_ast.Case, c_ast.Default)): + # If it's a Case/Default: + # 1. Add it to the Compound and mark as "last case" + # 2. If its immediate child is also a Case or Default, promote it + # to a sibling. + new_compound.block_items.append(child) + _extract_nested_case(child, new_compound.block_items) + last_case = new_compound.block_items[-1] + else: + # Other statements are added as children to the last case, if it + # exists. + if last_case is None: + new_compound.block_items.append(child) + else: + last_case.stmts.append(child) + + switch_node.stmt = new_compound + return switch_node + + +def _extract_nested_case(case_node, stmts_list): + """ Recursively extract consecutive Case statements that are made nested + by the parser and add them to the stmts_list. + """ + if isinstance(case_node.stmts[0], (c_ast.Case, c_ast.Default)): + stmts_list.append(case_node.stmts.pop()) + _extract_nested_case(stmts_list[-1], stmts_list) + diff --git a/lib/python3.4/site-packages/pycparser/c_ast.py b/lib/python3.4/site-packages/pycparser/c_ast.py new file mode 100644 index 0000000..4989f50 --- /dev/null +++ b/lib/python3.4/site-packages/pycparser/c_ast.py @@ -0,0 +1,797 @@ +#----------------------------------------------------------------- +# ** ATTENTION ** +# This code was automatically generated from the file: +# _c_ast.cfg +# +# Do not modify it directly. Modify the configuration file and +# run the generator again. +# ** ** *** ** ** +# +# pycparser: c_ast.py +# +# AST Node classes. +# +# Copyright (C) 2008-2015, Eli Bendersky +# License: BSD +#----------------------------------------------------------------- + + +import sys + + +class Node(object): + __slots__ = () + """ Abstract base class for AST nodes. + """ + def children(self): + """ A sequence of all children that are Nodes + """ + pass + + def show(self, buf=sys.stdout, offset=0, attrnames=False, nodenames=False, showcoord=False, _my_node_name=None): + """ Pretty print the Node and all its attributes and + children (recursively) to a buffer. + + buf: + Open IO buffer into which the Node is printed. + + offset: + Initial offset (amount of leading spaces) + + attrnames: + True if you want to see the attribute names in + name=value pairs. False to only see the values. + + nodenames: + True if you want to see the actual node names + within their parents. + + showcoord: + Do you want the coordinates of each Node to be + displayed. + """ + lead = ' ' * offset + if nodenames and _my_node_name is not None: + buf.write(lead + self.__class__.__name__+ ' <' + _my_node_name + '>: ') + else: + buf.write(lead + self.__class__.__name__+ ': ') + + if self.attr_names: + if attrnames: + nvlist = [(n, getattr(self,n)) for n in self.attr_names] + attrstr = ', '.join('%s=%s' % nv for nv in nvlist) + else: + vlist = [getattr(self, n) for n in self.attr_names] + attrstr = ', '.join('%s' % v for v in vlist) + buf.write(attrstr) + + if showcoord: + buf.write(' (at %s)' % self.coord) + buf.write('\n') + + for (child_name, child) in self.children(): + child.show( + buf, + offset=offset + 2, + attrnames=attrnames, + nodenames=nodenames, + showcoord=showcoord, + _my_node_name=child_name) + + +class NodeVisitor(object): + """ A base NodeVisitor class for visiting c_ast nodes. + Subclass it and define your own visit_XXX methods, where + XXX is the class name you want to visit with these + methods. + + For example: + + class ConstantVisitor(NodeVisitor): + def __init__(self): + self.values = [] + + def visit_Constant(self, node): + self.values.append(node.value) + + Creates a list of values of all the constant nodes + encountered below the given node. To use it: + + cv = ConstantVisitor() + cv.visit(node) + + Notes: + + * generic_visit() will be called for AST nodes for which + no visit_XXX method was defined. + * The children of nodes for which a visit_XXX was + defined will not be visited - if you need this, call + generic_visit() on the node. + You can use: + NodeVisitor.generic_visit(self, node) + * Modeled after Python's own AST visiting facilities + (the ast module of Python 3.0) + """ + def visit(self, node): + """ Visit a node. + """ + method = 'visit_' + node.__class__.__name__ + visitor = getattr(self, method, self.generic_visit) + return visitor(node) + + def generic_visit(self, node): + """ Called if no explicit visitor function exists for a + node. Implements preorder visiting of the node. + """ + for c_name, c in node.children(): + self.visit(c) + + +class ArrayDecl(Node): + __slots__ = ('type', 'dim', 'dim_quals', 'coord', '__weakref__') + def __init__(self, type, dim, dim_quals, coord=None): + self.type = type + self.dim = dim + self.dim_quals = dim_quals + self.coord = coord + + def children(self): + nodelist = [] + if self.type is not None: nodelist.append(("type", self.type)) + if self.dim is not None: nodelist.append(("dim", self.dim)) + return tuple(nodelist) + + attr_names = ('dim_quals', ) + +class ArrayRef(Node): + __slots__ = ('name', 'subscript', 'coord', '__weakref__') + def __init__(self, name, subscript, coord=None): + self.name = name + self.subscript = subscript + self.coord = coord + + def children(self): + nodelist = [] + if self.name is not None: nodelist.append(("name", self.name)) + if self.subscript is not None: nodelist.append(("subscript", self.subscript)) + return tuple(nodelist) + + attr_names = () + +class Assignment(Node): + __slots__ = ('op', 'lvalue', 'rvalue', 'coord', '__weakref__') + def __init__(self, op, lvalue, rvalue, coord=None): + self.op = op + self.lvalue = lvalue + self.rvalue = rvalue + self.coord = coord + + def children(self): + nodelist = [] + if self.lvalue is not None: nodelist.append(("lvalue", self.lvalue)) + if self.rvalue is not None: nodelist.append(("rvalue", self.rvalue)) + return tuple(nodelist) + + attr_names = ('op', ) + +class BinaryOp(Node): + __slots__ = ('op', 'left', 'right', 'coord', '__weakref__') + def __init__(self, op, left, right, coord=None): + self.op = op + self.left = left + self.right = right + self.coord = coord + + def children(self): + nodelist = [] + if self.left is not None: nodelist.append(("left", self.left)) + if self.right is not None: nodelist.append(("right", self.right)) + return tuple(nodelist) + + attr_names = ('op', ) + +class Break(Node): + __slots__ = ('coord', '__weakref__') + def __init__(self, coord=None): + self.coord = coord + + def children(self): + return () + + attr_names = () + +class Case(Node): + __slots__ = ('expr', 'stmts', 'coord', '__weakref__') + def __init__(self, expr, stmts, coord=None): + self.expr = expr + self.stmts = stmts + self.coord = coord + + def children(self): + nodelist = [] + if self.expr is not None: nodelist.append(("expr", self.expr)) + for i, child in enumerate(self.stmts or []): + nodelist.append(("stmts[%d]" % i, child)) + return tuple(nodelist) + + attr_names = () + +class Cast(Node): + __slots__ = ('to_type', 'expr', 'coord', '__weakref__') + def __init__(self, to_type, expr, coord=None): + self.to_type = to_type + self.expr = expr + self.coord = coord + + def children(self): + nodelist = [] + if self.to_type is not None: nodelist.append(("to_type", self.to_type)) + if self.expr is not None: nodelist.append(("expr", self.expr)) + return tuple(nodelist) + + attr_names = () + +class Compound(Node): + __slots__ = ('block_items', 'coord', '__weakref__') + def __init__(self, block_items, coord=None): + self.block_items = block_items + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.block_items or []): + nodelist.append(("block_items[%d]" % i, child)) + return tuple(nodelist) + + attr_names = () + +class CompoundLiteral(Node): + __slots__ = ('type', 'init', 'coord', '__weakref__') + def __init__(self, type, init, coord=None): + self.type = type + self.init = init + self.coord = coord + + def children(self): + nodelist = [] + if self.type is not None: nodelist.append(("type", self.type)) + if self.init is not None: nodelist.append(("init", self.init)) + return tuple(nodelist) + + attr_names = () + +class Constant(Node): + __slots__ = ('type', 'value', 'coord', '__weakref__') + def __init__(self, type, value, coord=None): + self.type = type + self.value = value + self.coord = coord + + def children(self): + nodelist = [] + return tuple(nodelist) + + attr_names = ('type', 'value', ) + +class Continue(Node): + __slots__ = ('coord', '__weakref__') + def __init__(self, coord=None): + self.coord = coord + + def children(self): + return () + + attr_names = () + +class Decl(Node): + __slots__ = ('name', 'quals', 'storage', 'funcspec', 'type', 'init', 'bitsize', 'coord', '__weakref__') + def __init__(self, name, quals, storage, funcspec, type, init, bitsize, coord=None): + self.name = name + self.quals = quals + self.storage = storage + self.funcspec = funcspec + self.type = type + self.init = init + self.bitsize = bitsize + self.coord = coord + + def children(self): + nodelist = [] + if self.type is not None: nodelist.append(("type", self.type)) + if self.init is not None: nodelist.append(("init", self.init)) + if self.bitsize is not None: nodelist.append(("bitsize", self.bitsize)) + return tuple(nodelist) + + attr_names = ('name', 'quals', 'storage', 'funcspec', ) + +class DeclList(Node): + __slots__ = ('decls', 'coord', '__weakref__') + def __init__(self, decls, coord=None): + self.decls = decls + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.decls or []): + nodelist.append(("decls[%d]" % i, child)) + return tuple(nodelist) + + attr_names = () + +class Default(Node): + __slots__ = ('stmts', 'coord', '__weakref__') + def __init__(self, stmts, coord=None): + self.stmts = stmts + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.stmts or []): + nodelist.append(("stmts[%d]" % i, child)) + return tuple(nodelist) + + attr_names = () + +class DoWhile(Node): + __slots__ = ('cond', 'stmt', 'coord', '__weakref__') + def __init__(self, cond, stmt, coord=None): + self.cond = cond + self.stmt = stmt + self.coord = coord + + def children(self): + nodelist = [] + if self.cond is not None: nodelist.append(("cond", self.cond)) + if self.stmt is not None: nodelist.append(("stmt", self.stmt)) + return tuple(nodelist) + + attr_names = () + +class EllipsisParam(Node): + __slots__ = ('coord', '__weakref__') + def __init__(self, coord=None): + self.coord = coord + + def children(self): + return () + + attr_names = () + +class EmptyStatement(Node): + __slots__ = ('coord', '__weakref__') + def __init__(self, coord=None): + self.coord = coord + + def children(self): + return () + + attr_names = () + +class Enum(Node): + __slots__ = ('name', 'values', 'coord', '__weakref__') + def __init__(self, name, values, coord=None): + self.name = name + self.values = values + self.coord = coord + + def children(self): + nodelist = [] + if self.values is not None: nodelist.append(("values", self.values)) + return tuple(nodelist) + + attr_names = ('name', ) + +class Enumerator(Node): + __slots__ = ('name', 'value', 'coord', '__weakref__') + def __init__(self, name, value, coord=None): + self.name = name + self.value = value + self.coord = coord + + def children(self): + nodelist = [] + if self.value is not None: nodelist.append(("value", self.value)) + return tuple(nodelist) + + attr_names = ('name', ) + +class EnumeratorList(Node): + __slots__ = ('enumerators', 'coord', '__weakref__') + def __init__(self, enumerators, coord=None): + self.enumerators = enumerators + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.enumerators or []): + nodelist.append(("enumerators[%d]" % i, child)) + return tuple(nodelist) + + attr_names = () + +class ExprList(Node): + __slots__ = ('exprs', 'coord', '__weakref__') + def __init__(self, exprs, coord=None): + self.exprs = exprs + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.exprs or []): + nodelist.append(("exprs[%d]" % i, child)) + return tuple(nodelist) + + attr_names = () + +class FileAST(Node): + __slots__ = ('ext', 'coord', '__weakref__') + def __init__(self, ext, coord=None): + self.ext = ext + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.ext or []): + nodelist.append(("ext[%d]" % i, child)) + return tuple(nodelist) + + attr_names = () + +class For(Node): + __slots__ = ('init', 'cond', 'next', 'stmt', 'coord', '__weakref__') + def __init__(self, init, cond, next, stmt, coord=None): + self.init = init + self.cond = cond + self.next = next + self.stmt = stmt + self.coord = coord + + def children(self): + nodelist = [] + if self.init is not None: nodelist.append(("init", self.init)) + if self.cond is not None: nodelist.append(("cond", self.cond)) + if self.next is not None: nodelist.append(("next", self.next)) + if self.stmt is not None: nodelist.append(("stmt", self.stmt)) + return tuple(nodelist) + + attr_names = () + +class FuncCall(Node): + __slots__ = ('name', 'args', 'coord', '__weakref__') + def __init__(self, name, args, coord=None): + self.name = name + self.args = args + self.coord = coord + + def children(self): + nodelist = [] + if self.name is not None: nodelist.append(("name", self.name)) + if self.args is not None: nodelist.append(("args", self.args)) + return tuple(nodelist) + + attr_names = () + +class FuncDecl(Node): + __slots__ = ('args', 'type', 'coord', '__weakref__') + def __init__(self, args, type, coord=None): + self.args = args + self.type = type + self.coord = coord + + def children(self): + nodelist = [] + if self.args is not None: nodelist.append(("args", self.args)) + if self.type is not None: nodelist.append(("type", self.type)) + return tuple(nodelist) + + attr_names = () + +class FuncDef(Node): + __slots__ = ('decl', 'param_decls', 'body', 'coord', '__weakref__') + def __init__(self, decl, param_decls, body, coord=None): + self.decl = decl + self.param_decls = param_decls + self.body = body + self.coord = coord + + def children(self): + nodelist = [] + if self.decl is not None: nodelist.append(("decl", self.decl)) + if self.body is not None: nodelist.append(("body", self.body)) + for i, child in enumerate(self.param_decls or []): + nodelist.append(("param_decls[%d]" % i, child)) + return tuple(nodelist) + + attr_names = () + +class Goto(Node): + __slots__ = ('name', 'coord', '__weakref__') + def __init__(self, name, coord=None): + self.name = name + self.coord = coord + + def children(self): + nodelist = [] + return tuple(nodelist) + + attr_names = ('name', ) + +class ID(Node): + __slots__ = ('name', 'coord', '__weakref__') + def __init__(self, name, coord=None): + self.name = name + self.coord = coord + + def children(self): + nodelist = [] + return tuple(nodelist) + + attr_names = ('name', ) + +class IdentifierType(Node): + __slots__ = ('names', 'coord', '__weakref__') + def __init__(self, names, coord=None): + self.names = names + self.coord = coord + + def children(self): + nodelist = [] + return tuple(nodelist) + + attr_names = ('names', ) + +class If(Node): + __slots__ = ('cond', 'iftrue', 'iffalse', 'coord', '__weakref__') + def __init__(self, cond, iftrue, iffalse, coord=None): + self.cond = cond + self.iftrue = iftrue + self.iffalse = iffalse + self.coord = coord + + def children(self): + nodelist = [] + if self.cond is not None: nodelist.append(("cond", self.cond)) + if self.iftrue is not None: nodelist.append(("iftrue", self.iftrue)) + if self.iffalse is not None: nodelist.append(("iffalse", self.iffalse)) + return tuple(nodelist) + + attr_names = () + +class InitList(Node): + __slots__ = ('exprs', 'coord', '__weakref__') + def __init__(self, exprs, coord=None): + self.exprs = exprs + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.exprs or []): + nodelist.append(("exprs[%d]" % i, child)) + return tuple(nodelist) + + attr_names = () + +class Label(Node): + __slots__ = ('name', 'stmt', 'coord', '__weakref__') + def __init__(self, name, stmt, coord=None): + self.name = name + self.stmt = stmt + self.coord = coord + + def children(self): + nodelist = [] + if self.stmt is not None: nodelist.append(("stmt", self.stmt)) + return tuple(nodelist) + + attr_names = ('name', ) + +class NamedInitializer(Node): + __slots__ = ('name', 'expr', 'coord', '__weakref__') + def __init__(self, name, expr, coord=None): + self.name = name + self.expr = expr + self.coord = coord + + def children(self): + nodelist = [] + if self.expr is not None: nodelist.append(("expr", self.expr)) + for i, child in enumerate(self.name or []): + nodelist.append(("name[%d]" % i, child)) + return tuple(nodelist) + + attr_names = () + +class ParamList(Node): + __slots__ = ('params', 'coord', '__weakref__') + def __init__(self, params, coord=None): + self.params = params + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.params or []): + nodelist.append(("params[%d]" % i, child)) + return tuple(nodelist) + + attr_names = () + +class PtrDecl(Node): + __slots__ = ('quals', 'type', 'coord', '__weakref__') + def __init__(self, quals, type, coord=None): + self.quals = quals + self.type = type + self.coord = coord + + def children(self): + nodelist = [] + if self.type is not None: nodelist.append(("type", self.type)) + return tuple(nodelist) + + attr_names = ('quals', ) + +class Return(Node): + __slots__ = ('expr', 'coord', '__weakref__') + def __init__(self, expr, coord=None): + self.expr = expr + self.coord = coord + + def children(self): + nodelist = [] + if self.expr is not None: nodelist.append(("expr", self.expr)) + return tuple(nodelist) + + attr_names = () + +class Struct(Node): + __slots__ = ('name', 'decls', 'coord', '__weakref__') + def __init__(self, name, decls, coord=None): + self.name = name + self.decls = decls + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.decls or []): + nodelist.append(("decls[%d]" % i, child)) + return tuple(nodelist) + + attr_names = ('name', ) + +class StructRef(Node): + __slots__ = ('name', 'type', 'field', 'coord', '__weakref__') + def __init__(self, name, type, field, coord=None): + self.name = name + self.type = type + self.field = field + self.coord = coord + + def children(self): + nodelist = [] + if self.name is not None: nodelist.append(("name", self.name)) + if self.field is not None: nodelist.append(("field", self.field)) + return tuple(nodelist) + + attr_names = ('type', ) + +class Switch(Node): + __slots__ = ('cond', 'stmt', 'coord', '__weakref__') + def __init__(self, cond, stmt, coord=None): + self.cond = cond + self.stmt = stmt + self.coord = coord + + def children(self): + nodelist = [] + if self.cond is not None: nodelist.append(("cond", self.cond)) + if self.stmt is not None: nodelist.append(("stmt", self.stmt)) + return tuple(nodelist) + + attr_names = () + +class TernaryOp(Node): + __slots__ = ('cond', 'iftrue', 'iffalse', 'coord', '__weakref__') + def __init__(self, cond, iftrue, iffalse, coord=None): + self.cond = cond + self.iftrue = iftrue + self.iffalse = iffalse + self.coord = coord + + def children(self): + nodelist = [] + if self.cond is not None: nodelist.append(("cond", self.cond)) + if self.iftrue is not None: nodelist.append(("iftrue", self.iftrue)) + if self.iffalse is not None: nodelist.append(("iffalse", self.iffalse)) + return tuple(nodelist) + + attr_names = () + +class TypeDecl(Node): + __slots__ = ('declname', 'quals', 'type', 'coord', '__weakref__') + def __init__(self, declname, quals, type, coord=None): + self.declname = declname + self.quals = quals + self.type = type + self.coord = coord + + def children(self): + nodelist = [] + if self.type is not None: nodelist.append(("type", self.type)) + return tuple(nodelist) + + attr_names = ('declname', 'quals', ) + +class Typedef(Node): + __slots__ = ('name', 'quals', 'storage', 'type', 'coord', '__weakref__') + def __init__(self, name, quals, storage, type, coord=None): + self.name = name + self.quals = quals + self.storage = storage + self.type = type + self.coord = coord + + def children(self): + nodelist = [] + if self.type is not None: nodelist.append(("type", self.type)) + return tuple(nodelist) + + attr_names = ('name', 'quals', 'storage', ) + +class Typename(Node): + __slots__ = ('name', 'quals', 'type', 'coord', '__weakref__') + def __init__(self, name, quals, type, coord=None): + self.name = name + self.quals = quals + self.type = type + self.coord = coord + + def children(self): + nodelist = [] + if self.type is not None: nodelist.append(("type", self.type)) + return tuple(nodelist) + + attr_names = ('name', 'quals', ) + +class UnaryOp(Node): + __slots__ = ('op', 'expr', 'coord', '__weakref__') + def __init__(self, op, expr, coord=None): + self.op = op + self.expr = expr + self.coord = coord + + def children(self): + nodelist = [] + if self.expr is not None: nodelist.append(("expr", self.expr)) + return tuple(nodelist) + + attr_names = ('op', ) + +class Union(Node): + __slots__ = ('name', 'decls', 'coord', '__weakref__') + def __init__(self, name, decls, coord=None): + self.name = name + self.decls = decls + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.decls or []): + nodelist.append(("decls[%d]" % i, child)) + return tuple(nodelist) + + attr_names = ('name', ) + +class While(Node): + __slots__ = ('cond', 'stmt', 'coord', '__weakref__') + def __init__(self, cond, stmt, coord=None): + self.cond = cond + self.stmt = stmt + self.coord = coord + + def children(self): + nodelist = [] + if self.cond is not None: nodelist.append(("cond", self.cond)) + if self.stmt is not None: nodelist.append(("stmt", self.stmt)) + return tuple(nodelist) + + attr_names = () + diff --git a/lib/python3.4/site-packages/pycparser/c_generator.py b/lib/python3.4/site-packages/pycparser/c_generator.py new file mode 100644 index 0000000..f4a5a12 --- /dev/null +++ b/lib/python3.4/site-packages/pycparser/c_generator.py @@ -0,0 +1,399 @@ +#------------------------------------------------------------------------------ +# pycparser: c_generator.py +# +# C code generator from pycparser AST nodes. +# +# Copyright (C) 2008-2015, Eli Bendersky +# License: BSD +#------------------------------------------------------------------------------ +from . import c_ast + + +class CGenerator(object): + """ Uses the same visitor pattern as c_ast.NodeVisitor, but modified to + return a value from each visit method, using string accumulation in + generic_visit. + """ + def __init__(self): + # Statements start with indentation of self.indent_level spaces, using + # the _make_indent method + # + self.indent_level = 0 + + def _make_indent(self): + return ' ' * self.indent_level + + def visit(self, node): + method = 'visit_' + node.__class__.__name__ + return getattr(self, method, self.generic_visit)(node) + + def generic_visit(self, node): + #~ print('generic:', type(node)) + if node is None: + return '' + else: + return ''.join(self.visit(c) for c_name, c in node.children()) + + def visit_Constant(self, n): + return n.value + + def visit_ID(self, n): + return n.name + + def visit_ArrayRef(self, n): + arrref = self._parenthesize_unless_simple(n.name) + return arrref + '[' + self.visit(n.subscript) + ']' + + def visit_StructRef(self, n): + sref = self._parenthesize_unless_simple(n.name) + return sref + n.type + self.visit(n.field) + + def visit_FuncCall(self, n): + fref = self._parenthesize_unless_simple(n.name) + return fref + '(' + self.visit(n.args) + ')' + + def visit_UnaryOp(self, n): + operand = self._parenthesize_unless_simple(n.expr) + if n.op == 'p++': + return '%s++' % operand + elif n.op == 'p--': + return '%s--' % operand + elif n.op == 'sizeof': + # Always parenthesize the argument of sizeof since it can be + # a name. + return 'sizeof(%s)' % self.visit(n.expr) + else: + return '%s%s' % (n.op, operand) + + def visit_BinaryOp(self, n): + lval_str = self._parenthesize_if(n.left, + lambda d: not self._is_simple_node(d)) + rval_str = self._parenthesize_if(n.right, + lambda d: not self._is_simple_node(d)) + return '%s %s %s' % (lval_str, n.op, rval_str) + + def visit_Assignment(self, n): + rval_str = self._parenthesize_if( + n.rvalue, + lambda n: isinstance(n, c_ast.Assignment)) + return '%s %s %s' % (self.visit(n.lvalue), n.op, rval_str) + + def visit_IdentifierType(self, n): + return ' '.join(n.names) + + def _visit_expr(self, n): + if isinstance(n, c_ast.InitList): + return '{' + self.visit(n) + '}' + elif isinstance(n, c_ast.ExprList): + return '(' + self.visit(n) + ')' + else: + return self.visit(n) + + def visit_Decl(self, n, no_type=False): + # no_type is used when a Decl is part of a DeclList, where the type is + # explicitly only for the first declaration in a list. + # + s = n.name if no_type else self._generate_decl(n) + if n.bitsize: s += ' : ' + self.visit(n.bitsize) + if n.init: + s += ' = ' + self._visit_expr(n.init) + return s + + def visit_DeclList(self, n): + s = self.visit(n.decls[0]) + if len(n.decls) > 1: + s += ', ' + ', '.join(self.visit_Decl(decl, no_type=True) + for decl in n.decls[1:]) + return s + + def visit_Typedef(self, n): + s = '' + if n.storage: s += ' '.join(n.storage) + ' ' + s += self._generate_type(n.type) + return s + + def visit_Cast(self, n): + s = '(' + self._generate_type(n.to_type) + ')' + return s + ' ' + self._parenthesize_unless_simple(n.expr) + + def visit_ExprList(self, n): + visited_subexprs = [] + for expr in n.exprs: + visited_subexprs.append(self._visit_expr(expr)) + return ', '.join(visited_subexprs) + + def visit_InitList(self, n): + visited_subexprs = [] + for expr in n.exprs: + visited_subexprs.append(self._visit_expr(expr)) + return ', '.join(visited_subexprs) + + def visit_Enum(self, n): + s = 'enum' + if n.name: s += ' ' + n.name + if n.values: + s += ' {' + for i, enumerator in enumerate(n.values.enumerators): + s += enumerator.name + if enumerator.value: + s += ' = ' + self.visit(enumerator.value) + if i != len(n.values.enumerators) - 1: + s += ', ' + s += '}' + return s + + def visit_FuncDef(self, n): + decl = self.visit(n.decl) + self.indent_level = 0 + body = self.visit(n.body) + if n.param_decls: + knrdecls = ';\n'.join(self.visit(p) for p in n.param_decls) + return decl + '\n' + knrdecls + ';\n' + body + '\n' + else: + return decl + '\n' + body + '\n' + + def visit_FileAST(self, n): + s = '' + for ext in n.ext: + if isinstance(ext, c_ast.FuncDef): + s += self.visit(ext) + else: + s += self.visit(ext) + ';\n' + return s + + def visit_Compound(self, n): + s = self._make_indent() + '{\n' + self.indent_level += 2 + if n.block_items: + s += ''.join(self._generate_stmt(stmt) for stmt in n.block_items) + self.indent_level -= 2 + s += self._make_indent() + '}\n' + return s + + def visit_EmptyStatement(self, n): + return ';' + + def visit_ParamList(self, n): + return ', '.join(self.visit(param) for param in n.params) + + def visit_Return(self, n): + s = 'return' + if n.expr: s += ' ' + self.visit(n.expr) + return s + ';' + + def visit_Break(self, n): + return 'break;' + + def visit_Continue(self, n): + return 'continue;' + + def visit_TernaryOp(self, n): + s = self._visit_expr(n.cond) + ' ? ' + s += self._visit_expr(n.iftrue) + ' : ' + s += self._visit_expr(n.iffalse) + return s + + def visit_If(self, n): + s = 'if (' + if n.cond: s += self.visit(n.cond) + s += ')\n' + s += self._generate_stmt(n.iftrue, add_indent=True) + if n.iffalse: + s += self._make_indent() + 'else\n' + s += self._generate_stmt(n.iffalse, add_indent=True) + return s + + def visit_For(self, n): + s = 'for (' + if n.init: s += self.visit(n.init) + s += ';' + if n.cond: s += ' ' + self.visit(n.cond) + s += ';' + if n.next: s += ' ' + self.visit(n.next) + s += ')\n' + s += self._generate_stmt(n.stmt, add_indent=True) + return s + + def visit_While(self, n): + s = 'while (' + if n.cond: s += self.visit(n.cond) + s += ')\n' + s += self._generate_stmt(n.stmt, add_indent=True) + return s + + def visit_DoWhile(self, n): + s = 'do\n' + s += self._generate_stmt(n.stmt, add_indent=True) + s += self._make_indent() + 'while (' + if n.cond: s += self.visit(n.cond) + s += ');' + return s + + def visit_Switch(self, n): + s = 'switch (' + self.visit(n.cond) + ')\n' + s += self._generate_stmt(n.stmt, add_indent=True) + return s + + def visit_Case(self, n): + s = 'case ' + self.visit(n.expr) + ':\n' + for stmt in n.stmts: + s += self._generate_stmt(stmt, add_indent=True) + return s + + def visit_Default(self, n): + s = 'default:\n' + for stmt in n.stmts: + s += self._generate_stmt(stmt, add_indent=True) + return s + + def visit_Label(self, n): + return n.name + ':\n' + self._generate_stmt(n.stmt) + + def visit_Goto(self, n): + return 'goto ' + n.name + ';' + + def visit_EllipsisParam(self, n): + return '...' + + def visit_Struct(self, n): + return self._generate_struct_union(n, 'struct') + + def visit_Typename(self, n): + return self._generate_type(n.type) + + def visit_Union(self, n): + return self._generate_struct_union(n, 'union') + + def visit_NamedInitializer(self, n): + s = '' + for name in n.name: + if isinstance(name, c_ast.ID): + s += '.' + name.name + elif isinstance(name, c_ast.Constant): + s += '[' + name.value + ']' + s += ' = ' + self.visit(n.expr) + return s + + def visit_FuncDecl(self, n): + return self._generate_type(n) + + def _generate_struct_union(self, n, name): + """ Generates code for structs and unions. name should be either + 'struct' or union. + """ + s = name + ' ' + (n.name or '') + if n.decls: + s += '\n' + s += self._make_indent() + self.indent_level += 2 + s += '{\n' + for decl in n.decls: + s += self._generate_stmt(decl) + self.indent_level -= 2 + s += self._make_indent() + '}' + return s + + def _generate_stmt(self, n, add_indent=False): + """ Generation from a statement node. This method exists as a wrapper + for individual visit_* methods to handle different treatment of + some statements in this context. + """ + typ = type(n) + if add_indent: self.indent_level += 2 + indent = self._make_indent() + if add_indent: self.indent_level -= 2 + + if typ in ( + c_ast.Decl, c_ast.Assignment, c_ast.Cast, c_ast.UnaryOp, + c_ast.BinaryOp, c_ast.TernaryOp, c_ast.FuncCall, c_ast.ArrayRef, + c_ast.StructRef, c_ast.Constant, c_ast.ID, c_ast.Typedef, + c_ast.ExprList): + # These can also appear in an expression context so no semicolon + # is added to them automatically + # + return indent + self.visit(n) + ';\n' + elif typ in (c_ast.Compound,): + # No extra indentation required before the opening brace of a + # compound - because it consists of multiple lines it has to + # compute its own indentation. + # + return self.visit(n) + else: + return indent + self.visit(n) + '\n' + + def _generate_decl(self, n): + """ Generation from a Decl node. + """ + s = '' + if n.funcspec: s = ' '.join(n.funcspec) + ' ' + if n.storage: s += ' '.join(n.storage) + ' ' + s += self._generate_type(n.type) + return s + + def _generate_type(self, n, modifiers=[]): + """ Recursive generation from a type node. n is the type node. + modifiers collects the PtrDecl, ArrayDecl and FuncDecl modifiers + encountered on the way down to a TypeDecl, to allow proper + generation from it. + """ + typ = type(n) + #~ print(n, modifiers) + + if typ == c_ast.TypeDecl: + s = '' + if n.quals: s += ' '.join(n.quals) + ' ' + s += self.visit(n.type) + + nstr = n.declname if n.declname else '' + # Resolve modifiers. + # Wrap in parens to distinguish pointer to array and pointer to + # function syntax. + # + for i, modifier in enumerate(modifiers): + if isinstance(modifier, c_ast.ArrayDecl): + if (i != 0 and isinstance(modifiers[i - 1], c_ast.PtrDecl)): + nstr = '(' + nstr + ')' + nstr += '[' + self.visit(modifier.dim) + ']' + elif isinstance(modifier, c_ast.FuncDecl): + if (i != 0 and isinstance(modifiers[i - 1], c_ast.PtrDecl)): + nstr = '(' + nstr + ')' + nstr += '(' + self.visit(modifier.args) + ')' + elif isinstance(modifier, c_ast.PtrDecl): + if modifier.quals: + nstr = '* %s %s' % (' '.join(modifier.quals), nstr) + else: + nstr = '*' + nstr + if nstr: s += ' ' + nstr + return s + elif typ == c_ast.Decl: + return self._generate_decl(n.type) + elif typ == c_ast.Typename: + return self._generate_type(n.type) + elif typ == c_ast.IdentifierType: + return ' '.join(n.names) + ' ' + elif typ in (c_ast.ArrayDecl, c_ast.PtrDecl, c_ast.FuncDecl): + return self._generate_type(n.type, modifiers + [n]) + else: + return self.visit(n) + + def _parenthesize_if(self, n, condition): + """ Visits 'n' and returns its string representation, parenthesized + if the condition function applied to the node returns True. + """ + s = self._visit_expr(n) + if condition(n): + return '(' + s + ')' + else: + return s + + def _parenthesize_unless_simple(self, n): + """ Common use case for _parenthesize_if + """ + return self._parenthesize_if(n, lambda d: not self._is_simple_node(d)) + + def _is_simple_node(self, n): + """ Returns True for nodes that are "simple" - i.e. nodes that always + have higher precedence than operators. + """ + return isinstance(n,( c_ast.Constant, c_ast.ID, c_ast.ArrayRef, + c_ast.StructRef, c_ast.FuncCall)) diff --git a/lib/python3.4/site-packages/pycparser/c_lexer.py b/lib/python3.4/site-packages/pycparser/c_lexer.py new file mode 100644 index 0000000..cbb9d26 --- /dev/null +++ b/lib/python3.4/site-packages/pycparser/c_lexer.py @@ -0,0 +1,485 @@ +#------------------------------------------------------------------------------ +# pycparser: c_lexer.py +# +# CLexer class: lexer for the C language +# +# Copyright (C) 2008-2015, Eli Bendersky +# License: BSD +#------------------------------------------------------------------------------ +import re +import sys + +from .ply import lex +from .ply.lex import TOKEN + + +class CLexer(object): + """ A lexer for the C language. After building it, set the + input text with input(), and call token() to get new + tokens. + + The public attribute filename can be set to an initial + filaneme, but the lexer will update it upon #line + directives. + """ + def __init__(self, error_func, on_lbrace_func, on_rbrace_func, + type_lookup_func): + """ Create a new Lexer. + + error_func: + An error function. Will be called with an error + message, line and column as arguments, in case of + an error during lexing. + + on_lbrace_func, on_rbrace_func: + Called when an LBRACE or RBRACE is encountered + (likely to push/pop type_lookup_func's scope) + + type_lookup_func: + A type lookup function. Given a string, it must + return True IFF this string is a name of a type + that was defined with a typedef earlier. + """ + self.error_func = error_func + self.on_lbrace_func = on_lbrace_func + self.on_rbrace_func = on_rbrace_func + self.type_lookup_func = type_lookup_func + self.filename = '' + + # Keeps track of the last token returned from self.token() + self.last_token = None + + # Allow either "# line" or "# <num>" to support GCC's + # cpp output + # + self.line_pattern = re.compile('([ \t]*line\W)|([ \t]*\d+)') + self.pragma_pattern = re.compile('[ \t]*pragma\W') + + def build(self, **kwargs): + """ Builds the lexer from the specification. Must be + called after the lexer object is created. + + This method exists separately, because the PLY + manual warns against calling lex.lex inside + __init__ + """ + self.lexer = lex.lex(object=self, **kwargs) + + def reset_lineno(self): + """ Resets the internal line number counter of the lexer. + """ + self.lexer.lineno = 1 + + def input(self, text): + self.lexer.input(text) + + def token(self): + self.last_token = self.lexer.token() + return self.last_token + + def find_tok_column(self, token): + """ Find the column of the token in its line. + """ + last_cr = self.lexer.lexdata.rfind('\n', 0, token.lexpos) + return token.lexpos - last_cr + + ######################-- PRIVATE --###################### + + ## + ## Internal auxiliary methods + ## + def _error(self, msg, token): + location = self._make_tok_location(token) + self.error_func(msg, location[0], location[1]) + self.lexer.skip(1) + + def _make_tok_location(self, token): + return (token.lineno, self.find_tok_column(token)) + + ## + ## Reserved keywords + ## + keywords = ( + '_BOOL', '_COMPLEX', 'AUTO', 'BREAK', 'CASE', 'CHAR', 'CONST', + 'CONTINUE', 'DEFAULT', 'DO', 'DOUBLE', 'ELSE', 'ENUM', 'EXTERN', + 'FLOAT', 'FOR', 'GOTO', 'IF', 'INLINE', 'INT', 'LONG', + 'REGISTER', 'OFFSETOF', + 'RESTRICT', 'RETURN', 'SHORT', 'SIGNED', 'SIZEOF', 'STATIC', 'STRUCT', + 'SWITCH', 'TYPEDEF', 'UNION', 'UNSIGNED', 'VOID', + 'VOLATILE', 'WHILE', + ) + + keyword_map = {} + for keyword in keywords: + if keyword == '_BOOL': + keyword_map['_Bool'] = keyword + elif keyword == '_COMPLEX': + keyword_map['_Complex'] = keyword + else: + keyword_map[keyword.lower()] = keyword + + ## + ## All the tokens recognized by the lexer + ## + tokens = keywords + ( + # Identifiers + 'ID', + + # Type identifiers (identifiers previously defined as + # types with typedef) + 'TYPEID', + + # constants + 'INT_CONST_DEC', 'INT_CONST_OCT', 'INT_CONST_HEX', 'INT_CONST_BIN', + 'FLOAT_CONST', 'HEX_FLOAT_CONST', + 'CHAR_CONST', + 'WCHAR_CONST', + + # String literals + 'STRING_LITERAL', + 'WSTRING_LITERAL', + + # Operators + 'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MOD', + 'OR', 'AND', 'NOT', 'XOR', 'LSHIFT', 'RSHIFT', + 'LOR', 'LAND', 'LNOT', + 'LT', 'LE', 'GT', 'GE', 'EQ', 'NE', + + # Assignment + 'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL', + 'PLUSEQUAL', 'MINUSEQUAL', + 'LSHIFTEQUAL','RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL', + 'OREQUAL', + + # Increment/decrement + 'PLUSPLUS', 'MINUSMINUS', + + # Structure dereference (->) + 'ARROW', + + # Conditional operator (?) + 'CONDOP', + + # Delimeters + 'LPAREN', 'RPAREN', # ( ) + 'LBRACKET', 'RBRACKET', # [ ] + 'LBRACE', 'RBRACE', # { } + 'COMMA', 'PERIOD', # . , + 'SEMI', 'COLON', # ; : + + # Ellipsis (...) + 'ELLIPSIS', + + # pre-processor + 'PPHASH', # '#' + ) + + ## + ## Regexes for use in tokens + ## + ## + + # valid C identifiers (K&R2: A.2.3), plus '$' (supported by some compilers) + identifier = r'[a-zA-Z_$][0-9a-zA-Z_$]*' + + hex_prefix = '0[xX]' + hex_digits = '[0-9a-fA-F]+' + bin_prefix = '0[bB]' + bin_digits = '[01]+' + + # integer constants (K&R2: A.2.5.1) + integer_suffix_opt = r'(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?' + decimal_constant = '(0'+integer_suffix_opt+')|([1-9][0-9]*'+integer_suffix_opt+')' + octal_constant = '0[0-7]*'+integer_suffix_opt + hex_constant = hex_prefix+hex_digits+integer_suffix_opt + bin_constant = bin_prefix+bin_digits+integer_suffix_opt + + bad_octal_constant = '0[0-7]*[89]' + + # character constants (K&R2: A.2.5.2) + # Note: a-zA-Z and '.-~^_!=&;,' are allowed as escape chars to support #line + # directives with Windows paths as filenames (..\..\dir\file) + # For the same reason, decimal_escape allows all digit sequences. We want to + # parse all correct code, even if it means to sometimes parse incorrect + # code. + # + simple_escape = r"""([a-zA-Z._~!=&\^\-\\?'"])""" + decimal_escape = r"""(\d+)""" + hex_escape = r"""(x[0-9a-fA-F]+)""" + bad_escape = r"""([\\][^a-zA-Z._~^!=&\^\-\\?'"x0-7])""" + + escape_sequence = r"""(\\("""+simple_escape+'|'+decimal_escape+'|'+hex_escape+'))' + cconst_char = r"""([^'\\\n]|"""+escape_sequence+')' + char_const = "'"+cconst_char+"'" + wchar_const = 'L'+char_const + unmatched_quote = "('"+cconst_char+"*\\n)|('"+cconst_char+"*$)" + bad_char_const = r"""('"""+cconst_char+"""[^'\n]+')|('')|('"""+bad_escape+r"""[^'\n]*')""" + + # string literals (K&R2: A.2.6) + string_char = r"""([^"\\\n]|"""+escape_sequence+')' + string_literal = '"'+string_char+'*"' + wstring_literal = 'L'+string_literal + bad_string_literal = '"'+string_char+'*'+bad_escape+string_char+'*"' + + # floating constants (K&R2: A.2.5.3) + exponent_part = r"""([eE][-+]?[0-9]+)""" + fractional_constant = r"""([0-9]*\.[0-9]+)|([0-9]+\.)""" + floating_constant = '(((('+fractional_constant+')'+exponent_part+'?)|([0-9]+'+exponent_part+'))[FfLl]?)' + binary_exponent_part = r'''([pP][+-]?[0-9]+)''' + hex_fractional_constant = '((('+hex_digits+r""")?\."""+hex_digits+')|('+hex_digits+r"""\.))""" + hex_floating_constant = '('+hex_prefix+'('+hex_digits+'|'+hex_fractional_constant+')'+binary_exponent_part+'[FfLl]?)' + + ## + ## Lexer states: used for preprocessor \n-terminated directives + ## + states = ( + # ppline: preprocessor line directives + # + ('ppline', 'exclusive'), + + # pppragma: pragma + # + ('pppragma', 'exclusive'), + ) + + def t_PPHASH(self, t): + r'[ \t]*\#' + if self.line_pattern.match(t.lexer.lexdata, pos=t.lexer.lexpos): + t.lexer.begin('ppline') + self.pp_line = self.pp_filename = None + elif self.pragma_pattern.match(t.lexer.lexdata, pos=t.lexer.lexpos): + t.lexer.begin('pppragma') + else: + t.type = 'PPHASH' + return t + + ## + ## Rules for the ppline state + ## + @TOKEN(string_literal) + def t_ppline_FILENAME(self, t): + if self.pp_line is None: + self._error('filename before line number in #line', t) + else: + self.pp_filename = t.value.lstrip('"').rstrip('"') + + @TOKEN(decimal_constant) + def t_ppline_LINE_NUMBER(self, t): + if self.pp_line is None: + self.pp_line = t.value + else: + # Ignore: GCC's cpp sometimes inserts a numeric flag + # after the file name + pass + + def t_ppline_NEWLINE(self, t): + r'\n' + + if self.pp_line is None: + self._error('line number missing in #line', t) + else: + self.lexer.lineno = int(self.pp_line) + + if self.pp_filename is not None: + self.filename = self.pp_filename + + t.lexer.begin('INITIAL') + + def t_ppline_PPLINE(self, t): + r'line' + pass + + t_ppline_ignore = ' \t' + + def t_ppline_error(self, t): + self._error('invalid #line directive', t) + + ## + ## Rules for the pppragma state + ## + def t_pppragma_NEWLINE(self, t): + r'\n' + t.lexer.lineno += 1 + t.lexer.begin('INITIAL') + + def t_pppragma_PPPRAGMA(self, t): + r'pragma' + pass + + t_pppragma_ignore = ' \t<>.-{}();=+-*/$%@&^~!?:,0123456789' + + @TOKEN(string_literal) + def t_pppragma_STR(self, t): pass + + @TOKEN(identifier) + def t_pppragma_ID(self, t): pass + + def t_pppragma_error(self, t): + self._error('invalid #pragma directive', t) + + ## + ## Rules for the normal state + ## + t_ignore = ' \t' + + # Newlines + def t_NEWLINE(self, t): + r'\n+' + t.lexer.lineno += t.value.count("\n") + + # Operators + t_PLUS = r'\+' + t_MINUS = r'-' + t_TIMES = r'\*' + t_DIVIDE = r'/' + t_MOD = r'%' + t_OR = r'\|' + t_AND = r'&' + t_NOT = r'~' + t_XOR = r'\^' + t_LSHIFT = r'<<' + t_RSHIFT = r'>>' + t_LOR = r'\|\|' + t_LAND = r'&&' + t_LNOT = r'!' + t_LT = r'<' + t_GT = r'>' + t_LE = r'<=' + t_GE = r'>=' + t_EQ = r'==' + t_NE = r'!=' + + # Assignment operators + t_EQUALS = r'=' + t_TIMESEQUAL = r'\*=' + t_DIVEQUAL = r'/=' + t_MODEQUAL = r'%=' + t_PLUSEQUAL = r'\+=' + t_MINUSEQUAL = r'-=' + t_LSHIFTEQUAL = r'<<=' + t_RSHIFTEQUAL = r'>>=' + t_ANDEQUAL = r'&=' + t_OREQUAL = r'\|=' + t_XOREQUAL = r'\^=' + + # Increment/decrement + t_PLUSPLUS = r'\+\+' + t_MINUSMINUS = r'--' + + # -> + t_ARROW = r'->' + + # ? + t_CONDOP = r'\?' + + # Delimeters + t_LPAREN = r'\(' + t_RPAREN = r'\)' + t_LBRACKET = r'\[' + t_RBRACKET = r'\]' + t_COMMA = r',' + t_PERIOD = r'\.' + t_SEMI = r';' + t_COLON = r':' + t_ELLIPSIS = r'\.\.\.' + + # Scope delimiters + # To see why on_lbrace_func is needed, consider: + # typedef char TT; + # void foo(int TT) { TT = 10; } + # TT x = 5; + # Outside the function, TT is a typedef, but inside (starting and ending + # with the braces) it's a parameter. The trouble begins with yacc's + # lookahead token. If we open a new scope in brace_open, then TT has + # already been read and incorrectly interpreted as TYPEID. So, we need + # to open and close scopes from within the lexer. + # Similar for the TT immediately outside the end of the function. + # + @TOKEN(r'\{') + def t_LBRACE(self, t): + self.on_lbrace_func() + return t + @TOKEN(r'\}') + def t_RBRACE(self, t): + self.on_rbrace_func() + return t + + t_STRING_LITERAL = string_literal + + # The following floating and integer constants are defined as + # functions to impose a strict order (otherwise, decimal + # is placed before the others because its regex is longer, + # and this is bad) + # + @TOKEN(floating_constant) + def t_FLOAT_CONST(self, t): + return t + + @TOKEN(hex_floating_constant) + def t_HEX_FLOAT_CONST(self, t): + return t + + @TOKEN(hex_constant) + def t_INT_CONST_HEX(self, t): + return t + + @TOKEN(bin_constant) + def t_INT_CONST_BIN(self, t): + return t + + @TOKEN(bad_octal_constant) + def t_BAD_CONST_OCT(self, t): + msg = "Invalid octal constant" + self._error(msg, t) + + @TOKEN(octal_constant) + def t_INT_CONST_OCT(self, t): + return t + + @TOKEN(decimal_constant) + def t_INT_CONST_DEC(self, t): + return t + + # Must come before bad_char_const, to prevent it from + # catching valid char constants as invalid + # + @TOKEN(char_const) + def t_CHAR_CONST(self, t): + return t + + @TOKEN(wchar_const) + def t_WCHAR_CONST(self, t): + return t + + @TOKEN(unmatched_quote) + def t_UNMATCHED_QUOTE(self, t): + msg = "Unmatched '" + self._error(msg, t) + + @TOKEN(bad_char_const) + def t_BAD_CHAR_CONST(self, t): + msg = "Invalid char constant %s" % t.value + self._error(msg, t) + + @TOKEN(wstring_literal) + def t_WSTRING_LITERAL(self, t): + return t + + # unmatched string literals are caught by the preprocessor + + @TOKEN(bad_string_literal) + def t_BAD_STRING_LITERAL(self, t): + msg = "String contains invalid escape code" + self._error(msg, t) + + @TOKEN(identifier) + def t_ID(self, t): + t.type = self.keyword_map.get(t.value, "ID") + if t.type == 'ID' and self.type_lookup_func(t.value): + t.type = "TYPEID" + return t + + def t_error(self, t): + msg = 'Illegal character %s' % repr(t.value[0]) + self._error(msg, t) + diff --git a/lib/python3.4/site-packages/pycparser/c_parser.py b/lib/python3.4/site-packages/pycparser/c_parser.py new file mode 100644 index 0000000..f4f7453 --- /dev/null +++ b/lib/python3.4/site-packages/pycparser/c_parser.py @@ -0,0 +1,1701 @@ +#------------------------------------------------------------------------------ +# pycparser: c_parser.py +# +# CParser class: Parser and AST builder for the C language +# +# Copyright (C) 2008-2015, Eli Bendersky +# License: BSD +#------------------------------------------------------------------------------ +import re + +from .ply import yacc + +from . import c_ast +from .c_lexer import CLexer +from .plyparser import PLYParser, Coord, ParseError +from .ast_transforms import fix_switch_cases + + +class CParser(PLYParser): + def __init__( + self, + lex_optimize=True, + lextab='pycparser.lextab', + yacc_optimize=True, + yacctab='pycparser.yacctab', + yacc_debug=False, + taboutputdir=''): + """ Create a new CParser. + + Some arguments for controlling the debug/optimization + level of the parser are provided. The defaults are + tuned for release/performance mode. + The simple rules for using them are: + *) When tweaking CParser/CLexer, set these to False + *) When releasing a stable parser, set to True + + lex_optimize: + Set to False when you're modifying the lexer. + Otherwise, changes in the lexer won't be used, if + some lextab.py file exists. + When releasing with a stable lexer, set to True + to save the re-generation of the lexer table on + each run. + + lextab: + Points to the lex table that's used for optimized + mode. Only if you're modifying the lexer and want + some tests to avoid re-generating the table, make + this point to a local lex table file (that's been + earlier generated with lex_optimize=True) + + yacc_optimize: + Set to False when you're modifying the parser. + Otherwise, changes in the parser won't be used, if + some parsetab.py file exists. + When releasing with a stable parser, set to True + to save the re-generation of the parser table on + each run. + + yacctab: + Points to the yacc table that's used for optimized + mode. Only if you're modifying the parser, make + this point to a local yacc table file + + yacc_debug: + Generate a parser.out file that explains how yacc + built the parsing table from the grammar. + + taboutputdir: + Set this parameter to control the location of generated + lextab and yacctab files. + """ + self.clex = CLexer( + error_func=self._lex_error_func, + on_lbrace_func=self._lex_on_lbrace_func, + on_rbrace_func=self._lex_on_rbrace_func, + type_lookup_func=self._lex_type_lookup_func) + + self.clex.build( + optimize=lex_optimize, + lextab=lextab, + outputdir=taboutputdir) + self.tokens = self.clex.tokens + + rules_with_opt = [ + 'abstract_declarator', + 'assignment_expression', + 'declaration_list', + 'declaration_specifiers', + 'designation', + 'expression', + 'identifier_list', + 'init_declarator_list', + 'initializer_list', + 'parameter_type_list', + 'specifier_qualifier_list', + 'block_item_list', + 'type_qualifier_list', + 'struct_declarator_list' + ] + + for rule in rules_with_opt: + self._create_opt_rule(rule) + + self.cparser = yacc.yacc( + module=self, + start='translation_unit_or_empty', + debug=yacc_debug, + optimize=yacc_optimize, + tabmodule=yacctab, + outputdir=taboutputdir) + + # Stack of scopes for keeping track of symbols. _scope_stack[-1] is + # the current (topmost) scope. Each scope is a dictionary that + # specifies whether a name is a type. If _scope_stack[n][name] is + # True, 'name' is currently a type in the scope. If it's False, + # 'name' is used in the scope but not as a type (for instance, if we + # saw: int name; + # If 'name' is not a key in _scope_stack[n] then 'name' was not defined + # in this scope at all. + self._scope_stack = [dict()] + + # Keeps track of the last token given to yacc (the lookahead token) + self._last_yielded_token = None + + def parse(self, text, filename='', debuglevel=0): + """ Parses C code and returns an AST. + + text: + A string containing the C source code + + filename: + Name of the file being parsed (for meaningful + error messages) + + debuglevel: + Debug level to yacc + """ + self.clex.filename = filename + self.clex.reset_lineno() + self._scope_stack = [dict()] + self._last_yielded_token = None + return self.cparser.parse( + input=text, + lexer=self.clex, + debug=debuglevel) + + ######################-- PRIVATE --###################### + + def _push_scope(self): + self._scope_stack.append(dict()) + + def _pop_scope(self): + assert len(self._scope_stack) > 1 + self._scope_stack.pop() + + def _add_typedef_name(self, name, coord): + """ Add a new typedef name (ie a TYPEID) to the current scope + """ + if not self._scope_stack[-1].get(name, True): + self._parse_error( + "Typedef %r previously declared as non-typedef " + "in this scope" % name, coord) + self._scope_stack[-1][name] = True + + def _add_identifier(self, name, coord): + """ Add a new object, function, or enum member name (ie an ID) to the + current scope + """ + if self._scope_stack[-1].get(name, False): + self._parse_error( + "Non-typedef %r previously declared as typedef " + "in this scope" % name, coord) + self._scope_stack[-1][name] = False + + def _is_type_in_scope(self, name): + """ Is *name* a typedef-name in the current scope? + """ + for scope in reversed(self._scope_stack): + # If name is an identifier in this scope it shadows typedefs in + # higher scopes. + in_scope = scope.get(name) + if in_scope is not None: return in_scope + return False + + def _lex_error_func(self, msg, line, column): + self._parse_error(msg, self._coord(line, column)) + + def _lex_on_lbrace_func(self): + self._push_scope() + + def _lex_on_rbrace_func(self): + self._pop_scope() + + def _lex_type_lookup_func(self, name): + """ Looks up types that were previously defined with + typedef. + Passed to the lexer for recognizing identifiers that + are types. + """ + is_type = self._is_type_in_scope(name) + return is_type + + def _get_yacc_lookahead_token(self): + """ We need access to yacc's lookahead token in certain cases. + This is the last token yacc requested from the lexer, so we + ask the lexer. + """ + return self.clex.last_token + + # To understand what's going on here, read sections A.8.5 and + # A.8.6 of K&R2 very carefully. + # + # A C type consists of a basic type declaration, with a list + # of modifiers. For example: + # + # int *c[5]; + # + # The basic declaration here is 'int c', and the pointer and + # the array are the modifiers. + # + # Basic declarations are represented by TypeDecl (from module c_ast) and the + # modifiers are FuncDecl, PtrDecl and ArrayDecl. + # + # The standard states that whenever a new modifier is parsed, it should be + # added to the end of the list of modifiers. For example: + # + # K&R2 A.8.6.2: Array Declarators + # + # In a declaration T D where D has the form + # D1 [constant-expression-opt] + # and the type of the identifier in the declaration T D1 is + # "type-modifier T", the type of the + # identifier of D is "type-modifier array of T" + # + # This is what this method does. The declarator it receives + # can be a list of declarators ending with TypeDecl. It + # tacks the modifier to the end of this list, just before + # the TypeDecl. + # + # Additionally, the modifier may be a list itself. This is + # useful for pointers, that can come as a chain from the rule + # p_pointer. In this case, the whole modifier list is spliced + # into the new location. + def _type_modify_decl(self, decl, modifier): + """ Tacks a type modifier on a declarator, and returns + the modified declarator. + + Note: the declarator and modifier may be modified + """ + #~ print '****' + #~ decl.show(offset=3) + #~ modifier.show(offset=3) + #~ print '****' + + modifier_head = modifier + modifier_tail = modifier + + # The modifier may be a nested list. Reach its tail. + # + while modifier_tail.type: + modifier_tail = modifier_tail.type + + # If the decl is a basic type, just tack the modifier onto + # it + # + if isinstance(decl, c_ast.TypeDecl): + modifier_tail.type = decl + return modifier + else: + # Otherwise, the decl is a list of modifiers. Reach + # its tail and splice the modifier onto the tail, + # pointing to the underlying basic type. + # + decl_tail = decl + + while not isinstance(decl_tail.type, c_ast.TypeDecl): + decl_tail = decl_tail.type + + modifier_tail.type = decl_tail.type + decl_tail.type = modifier_head + return decl + + # Due to the order in which declarators are constructed, + # they have to be fixed in order to look like a normal AST. + # + # When a declaration arrives from syntax construction, it has + # these problems: + # * The innermost TypeDecl has no type (because the basic + # type is only known at the uppermost declaration level) + # * The declaration has no variable name, since that is saved + # in the innermost TypeDecl + # * The typename of the declaration is a list of type + # specifiers, and not a node. Here, basic identifier types + # should be separated from more complex types like enums + # and structs. + # + # This method fixes these problems. + # + def _fix_decl_name_type(self, decl, typename): + """ Fixes a declaration. Modifies decl. + """ + # Reach the underlying basic type + # + type = decl + while not isinstance(type, c_ast.TypeDecl): + type = type.type + + decl.name = type.declname + type.quals = decl.quals + + # The typename is a list of types. If any type in this + # list isn't an IdentifierType, it must be the only + # type in the list (it's illegal to declare "int enum ..") + # If all the types are basic, they're collected in the + # IdentifierType holder. + # + for tn in typename: + if not isinstance(tn, c_ast.IdentifierType): + if len(typename) > 1: + self._parse_error( + "Invalid multiple types specified", tn.coord) + else: + type.type = tn + return decl + + if not typename: + # Functions default to returning int + # + if not isinstance(decl.type, c_ast.FuncDecl): + self._parse_error( + "Missing type in declaration", decl.coord) + type.type = c_ast.IdentifierType( + ['int'], + coord=decl.coord) + else: + # At this point, we know that typename is a list of IdentifierType + # nodes. Concatenate all the names into a single list. + # + type.type = c_ast.IdentifierType( + [name for id in typename for name in id.names], + coord=typename[0].coord) + return decl + + def _add_declaration_specifier(self, declspec, newspec, kind): + """ Declaration specifiers are represented by a dictionary + with the entries: + * qual: a list of type qualifiers + * storage: a list of storage type qualifiers + * type: a list of type specifiers + * function: a list of function specifiers + + This method is given a declaration specifier, and a + new specifier of a given kind. + Returns the declaration specifier, with the new + specifier incorporated. + """ + spec = declspec or dict(qual=[], storage=[], type=[], function=[]) + spec[kind].insert(0, newspec) + return spec + + def _build_declarations(self, spec, decls, typedef_namespace=False): + """ Builds a list of declarations all sharing the given specifiers. + If typedef_namespace is true, each declared name is added + to the "typedef namespace", which also includes objects, + functions, and enum constants. + """ + is_typedef = 'typedef' in spec['storage'] + declarations = [] + + # Bit-fields are allowed to be unnamed. + # + if decls[0].get('bitsize') is not None: + pass + + # When redeclaring typedef names as identifiers in inner scopes, a + # problem can occur where the identifier gets grouped into + # spec['type'], leaving decl as None. This can only occur for the + # first declarator. + # + elif decls[0]['decl'] is None: + if len(spec['type']) < 2 or len(spec['type'][-1].names) != 1 or \ + not self._is_type_in_scope(spec['type'][-1].names[0]): + coord = '?' + for t in spec['type']: + if hasattr(t, 'coord'): + coord = t.coord + break + self._parse_error('Invalid declaration', coord) + + # Make this look as if it came from "direct_declarator:ID" + decls[0]['decl'] = c_ast.TypeDecl( + declname=spec['type'][-1].names[0], + type=None, + quals=None, + coord=spec['type'][-1].coord) + # Remove the "new" type's name from the end of spec['type'] + del spec['type'][-1] + + # A similar problem can occur where the declaration ends up looking + # like an abstract declarator. Give it a name if this is the case. + # + elif not isinstance(decls[0]['decl'], + (c_ast.Struct, c_ast.Union, c_ast.IdentifierType)): + decls_0_tail = decls[0]['decl'] + while not isinstance(decls_0_tail, c_ast.TypeDecl): + decls_0_tail = decls_0_tail.type + if decls_0_tail.declname is None: + decls_0_tail.declname = spec['type'][-1].names[0] + del spec['type'][-1] + + for decl in decls: + assert decl['decl'] is not None + if is_typedef: + declaration = c_ast.Typedef( + name=None, + quals=spec['qual'], + storage=spec['storage'], + type=decl['decl'], + coord=decl['decl'].coord) + else: + declaration = c_ast.Decl( + name=None, + quals=spec['qual'], + storage=spec['storage'], + funcspec=spec['function'], + type=decl['decl'], + init=decl.get('init'), + bitsize=decl.get('bitsize'), + coord=decl['decl'].coord) + + if isinstance(declaration.type, + (c_ast.Struct, c_ast.Union, c_ast.IdentifierType)): + fixed_decl = declaration + else: + fixed_decl = self._fix_decl_name_type(declaration, spec['type']) + + # Add the type name defined by typedef to a + # symbol table (for usage in the lexer) + # + if typedef_namespace: + if is_typedef: + self._add_typedef_name(fixed_decl.name, fixed_decl.coord) + else: + self._add_identifier(fixed_decl.name, fixed_decl.coord) + + declarations.append(fixed_decl) + + return declarations + + def _build_function_definition(self, spec, decl, param_decls, body): + """ Builds a function definition. + """ + assert 'typedef' not in spec['storage'] + + declaration = self._build_declarations( + spec=spec, + decls=[dict(decl=decl, init=None)], + typedef_namespace=True)[0] + + return c_ast.FuncDef( + decl=declaration, + param_decls=param_decls, + body=body, + coord=decl.coord) + + def _select_struct_union_class(self, token): + """ Given a token (either STRUCT or UNION), selects the + appropriate AST class. + """ + if token == 'struct': + return c_ast.Struct + else: + return c_ast.Union + + ## + ## Precedence and associativity of operators + ## + precedence = ( + ('left', 'LOR'), + ('left', 'LAND'), + ('left', 'OR'), + ('left', 'XOR'), + ('left', 'AND'), + ('left', 'EQ', 'NE'), + ('left', 'GT', 'GE', 'LT', 'LE'), + ('left', 'RSHIFT', 'LSHIFT'), + ('left', 'PLUS', 'MINUS'), + ('left', 'TIMES', 'DIVIDE', 'MOD') + ) + + ## + ## Grammar productions + ## Implementation of the BNF defined in K&R2 A.13 + ## + + # Wrapper around a translation unit, to allow for empty input. + # Not strictly part of the C99 Grammar, but useful in practice. + # + def p_translation_unit_or_empty(self, p): + """ translation_unit_or_empty : translation_unit + | empty + """ + if p[1] is None: + p[0] = c_ast.FileAST([]) + else: + p[0] = c_ast.FileAST(p[1]) + + def p_translation_unit_1(self, p): + """ translation_unit : external_declaration + """ + # Note: external_declaration is already a list + # + p[0] = p[1] + + def p_translation_unit_2(self, p): + """ translation_unit : translation_unit external_declaration + """ + if p[2] is not None: + p[1].extend(p[2]) + p[0] = p[1] + + # Declarations always come as lists (because they can be + # several in one line), so we wrap the function definition + # into a list as well, to make the return value of + # external_declaration homogenous. + # + def p_external_declaration_1(self, p): + """ external_declaration : function_definition + """ + p[0] = [p[1]] + + def p_external_declaration_2(self, p): + """ external_declaration : declaration + """ + p[0] = p[1] + + def p_external_declaration_3(self, p): + """ external_declaration : pp_directive + """ + p[0] = p[1] + + def p_external_declaration_4(self, p): + """ external_declaration : SEMI + """ + p[0] = None + + def p_pp_directive(self, p): + """ pp_directive : PPHASH + """ + self._parse_error('Directives not supported yet', + self._coord(p.lineno(1))) + + # In function definitions, the declarator can be followed by + # a declaration list, for old "K&R style" function definitios. + # + def p_function_definition_1(self, p): + """ function_definition : declarator declaration_list_opt compound_statement + """ + # no declaration specifiers - 'int' becomes the default type + spec = dict( + qual=[], + storage=[], + type=[c_ast.IdentifierType(['int'], + coord=self._coord(p.lineno(1)))], + function=[]) + + p[0] = self._build_function_definition( + spec=spec, + decl=p[1], + param_decls=p[2], + body=p[3]) + + def p_function_definition_2(self, p): + """ function_definition : declaration_specifiers declarator declaration_list_opt compound_statement + """ + spec = p[1] + + p[0] = self._build_function_definition( + spec=spec, + decl=p[2], + param_decls=p[3], + body=p[4]) + + def p_statement(self, p): + """ statement : labeled_statement + | expression_statement + | compound_statement + | selection_statement + | iteration_statement + | jump_statement + """ + p[0] = p[1] + + # In C, declarations can come several in a line: + # int x, *px, romulo = 5; + # + # However, for the AST, we will split them to separate Decl + # nodes. + # + # This rule splits its declarations and always returns a list + # of Decl nodes, even if it's one element long. + # + def p_decl_body(self, p): + """ decl_body : declaration_specifiers init_declarator_list_opt + """ + spec = p[1] + + # p[2] (init_declarator_list_opt) is either a list or None + # + if p[2] is None: + # By the standard, you must have at least one declarator unless + # declaring a structure tag, a union tag, or the members of an + # enumeration. + # + ty = spec['type'] + s_u_or_e = (c_ast.Struct, c_ast.Union, c_ast.Enum) + if len(ty) == 1 and isinstance(ty[0], s_u_or_e): + decls = [c_ast.Decl( + name=None, + quals=spec['qual'], + storage=spec['storage'], + funcspec=spec['function'], + type=ty[0], + init=None, + bitsize=None, + coord=ty[0].coord)] + + # However, this case can also occur on redeclared identifiers in + # an inner scope. The trouble is that the redeclared type's name + # gets grouped into declaration_specifiers; _build_declarations + # compensates for this. + # + else: + decls = self._build_declarations( + spec=spec, + decls=[dict(decl=None, init=None)], + typedef_namespace=True) + + else: + decls = self._build_declarations( + spec=spec, + decls=p[2], + typedef_namespace=True) + + p[0] = decls + + # The declaration has been split to a decl_body sub-rule and + # SEMI, because having them in a single rule created a problem + # for defining typedefs. + # + # If a typedef line was directly followed by a line using the + # type defined with the typedef, the type would not be + # recognized. This is because to reduce the declaration rule, + # the parser's lookahead asked for the token after SEMI, which + # was the type from the next line, and the lexer had no chance + # to see the updated type symbol table. + # + # Splitting solves this problem, because after seeing SEMI, + # the parser reduces decl_body, which actually adds the new + # type into the table to be seen by the lexer before the next + # line is reached. + def p_declaration(self, p): + """ declaration : decl_body SEMI + """ + p[0] = p[1] + + # Since each declaration is a list of declarations, this + # rule will combine all the declarations and return a single + # list + # + def p_declaration_list(self, p): + """ declaration_list : declaration + | declaration_list declaration + """ + p[0] = p[1] if len(p) == 2 else p[1] + p[2] + + def p_declaration_specifiers_1(self, p): + """ declaration_specifiers : type_qualifier declaration_specifiers_opt + """ + p[0] = self._add_declaration_specifier(p[2], p[1], 'qual') + + def p_declaration_specifiers_2(self, p): + """ declaration_specifiers : type_specifier declaration_specifiers_opt + """ + p[0] = self._add_declaration_specifier(p[2], p[1], 'type') + + def p_declaration_specifiers_3(self, p): + """ declaration_specifiers : storage_class_specifier declaration_specifiers_opt + """ + p[0] = self._add_declaration_specifier(p[2], p[1], 'storage') + + def p_declaration_specifiers_4(self, p): + """ declaration_specifiers : function_specifier declaration_specifiers_opt + """ + p[0] = self._add_declaration_specifier(p[2], p[1], 'function') + + def p_storage_class_specifier(self, p): + """ storage_class_specifier : AUTO + | REGISTER + | STATIC + | EXTERN + | TYPEDEF + """ + p[0] = p[1] + + def p_function_specifier(self, p): + """ function_specifier : INLINE + """ + p[0] = p[1] + + def p_type_specifier_1(self, p): + """ type_specifier : VOID + | _BOOL + | CHAR + | SHORT + | INT + | LONG + | FLOAT + | DOUBLE + | _COMPLEX + | SIGNED + | UNSIGNED + """ + p[0] = c_ast.IdentifierType([p[1]], coord=self._coord(p.lineno(1))) + + def p_type_specifier_2(self, p): + """ type_specifier : typedef_name + | enum_specifier + | struct_or_union_specifier + """ + p[0] = p[1] + + def p_type_qualifier(self, p): + """ type_qualifier : CONST + | RESTRICT + | VOLATILE + """ + p[0] = p[1] + + def p_init_declarator_list_1(self, p): + """ init_declarator_list : init_declarator + | init_declarator_list COMMA init_declarator + """ + p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]] + + # If the code is declaring a variable that was declared a typedef in an + # outer scope, yacc will think the name is part of declaration_specifiers, + # not init_declarator, and will then get confused by EQUALS. Pass None + # up in place of declarator, and handle this at a higher level. + # + def p_init_declarator_list_2(self, p): + """ init_declarator_list : EQUALS initializer + """ + p[0] = [dict(decl=None, init=p[2])] + + # Similarly, if the code contains duplicate typedefs of, for example, + # array types, the array portion will appear as an abstract declarator. + # + def p_init_declarator_list_3(self, p): + """ init_declarator_list : abstract_declarator + """ + p[0] = [dict(decl=p[1], init=None)] + + # Returns a {decl=<declarator> : init=<initializer>} dictionary + # If there's no initializer, uses None + # + def p_init_declarator(self, p): + """ init_declarator : declarator + | declarator EQUALS initializer + """ + p[0] = dict(decl=p[1], init=(p[3] if len(p) > 2 else None)) + + def p_specifier_qualifier_list_1(self, p): + """ specifier_qualifier_list : type_qualifier specifier_qualifier_list_opt + """ + p[0] = self._add_declaration_specifier(p[2], p[1], 'qual') + + def p_specifier_qualifier_list_2(self, p): + """ specifier_qualifier_list : type_specifier specifier_qualifier_list_opt + """ + p[0] = self._add_declaration_specifier(p[2], p[1], 'type') + + # TYPEID is allowed here (and in other struct/enum related tag names), because + # struct/enum tags reside in their own namespace and can be named the same as types + # + def p_struct_or_union_specifier_1(self, p): + """ struct_or_union_specifier : struct_or_union ID + | struct_or_union TYPEID + """ + klass = self._select_struct_union_class(p[1]) + p[0] = klass( + name=p[2], + decls=None, + coord=self._coord(p.lineno(2))) + + def p_struct_or_union_specifier_2(self, p): + """ struct_or_union_specifier : struct_or_union brace_open struct_declaration_list brace_close + """ + klass = self._select_struct_union_class(p[1]) + p[0] = klass( + name=None, + decls=p[3], + coord=self._coord(p.lineno(2))) + + def p_struct_or_union_specifier_3(self, p): + """ struct_or_union_specifier : struct_or_union ID brace_open struct_declaration_list brace_close + | struct_or_union TYPEID brace_open struct_declaration_list brace_close + """ + klass = self._select_struct_union_class(p[1]) + p[0] = klass( + name=p[2], + decls=p[4], + coord=self._coord(p.lineno(2))) + + def p_struct_or_union(self, p): + """ struct_or_union : STRUCT + | UNION + """ + p[0] = p[1] + + # Combine all declarations into a single list + # + def p_struct_declaration_list(self, p): + """ struct_declaration_list : struct_declaration + | struct_declaration_list struct_declaration + """ + p[0] = p[1] if len(p) == 2 else p[1] + p[2] + + def p_struct_declaration_1(self, p): + """ struct_declaration : specifier_qualifier_list struct_declarator_list_opt SEMI + """ + spec = p[1] + assert 'typedef' not in spec['storage'] + + if p[2] is not None: + decls = self._build_declarations( + spec=spec, + decls=p[2]) + + elif len(spec['type']) == 1: + # Anonymous struct/union, gcc extension, C1x feature. + # Although the standard only allows structs/unions here, I see no + # reason to disallow other types since some compilers have typedefs + # here, and pycparser isn't about rejecting all invalid code. + # + node = spec['type'][0] + if isinstance(node, c_ast.Node): + decl_type = node + else: + decl_type = c_ast.IdentifierType(node) + + decls = self._build_declarations( + spec=spec, + decls=[dict(decl=decl_type)]) + + else: + # Structure/union members can have the same names as typedefs. + # The trouble is that the member's name gets grouped into + # specifier_qualifier_list; _build_declarations compensates. + # + decls = self._build_declarations( + spec=spec, + decls=[dict(decl=None, init=None)]) + + p[0] = decls + + def p_struct_declaration_2(self, p): + """ struct_declaration : specifier_qualifier_list abstract_declarator SEMI + """ + # "Abstract declarator?!", you ask? Structure members can have the + # same names as typedefs. The trouble is that the member's name gets + # grouped into specifier_qualifier_list, leaving any remainder to + # appear as an abstract declarator, as in: + # typedef int Foo; + # struct { Foo Foo[3]; }; + # + p[0] = self._build_declarations( + spec=p[1], + decls=[dict(decl=p[2], init=None)]) + + def p_struct_declarator_list(self, p): + """ struct_declarator_list : struct_declarator + | struct_declarator_list COMMA struct_declarator + """ + p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]] + + # struct_declarator passes up a dict with the keys: decl (for + # the underlying declarator) and bitsize (for the bitsize) + # + def p_struct_declarator_1(self, p): + """ struct_declarator : declarator + """ + p[0] = {'decl': p[1], 'bitsize': None} + + def p_struct_declarator_2(self, p): + """ struct_declarator : declarator COLON constant_expression + | COLON constant_expression + """ + if len(p) > 3: + p[0] = {'decl': p[1], 'bitsize': p[3]} + else: + p[0] = {'decl': c_ast.TypeDecl(None, None, None), 'bitsize': p[2]} + + def p_enum_specifier_1(self, p): + """ enum_specifier : ENUM ID + | ENUM TYPEID + """ + p[0] = c_ast.Enum(p[2], None, self._coord(p.lineno(1))) + + def p_enum_specifier_2(self, p): + """ enum_specifier : ENUM brace_open enumerator_list brace_close + """ + p[0] = c_ast.Enum(None, p[3], self._coord(p.lineno(1))) + + def p_enum_specifier_3(self, p): + """ enum_specifier : ENUM ID brace_open enumerator_list brace_close + | ENUM TYPEID brace_open enumerator_list brace_close + """ + p[0] = c_ast.Enum(p[2], p[4], self._coord(p.lineno(1))) + + def p_enumerator_list(self, p): + """ enumerator_list : enumerator + | enumerator_list COMMA + | enumerator_list COMMA enumerator + """ + if len(p) == 2: + p[0] = c_ast.EnumeratorList([p[1]], p[1].coord) + elif len(p) == 3: + p[0] = p[1] + else: + p[1].enumerators.append(p[3]) + p[0] = p[1] + + def p_enumerator(self, p): + """ enumerator : ID + | ID EQUALS constant_expression + """ + if len(p) == 2: + enumerator = c_ast.Enumerator( + p[1], None, + self._coord(p.lineno(1))) + else: + enumerator = c_ast.Enumerator( + p[1], p[3], + self._coord(p.lineno(1))) + self._add_identifier(enumerator.name, enumerator.coord) + + p[0] = enumerator + + def p_declarator_1(self, p): + """ declarator : direct_declarator + """ + p[0] = p[1] + + def p_declarator_2(self, p): + """ declarator : pointer direct_declarator + """ + p[0] = self._type_modify_decl(p[2], p[1]) + + # Since it's impossible for a type to be specified after a pointer, assume + # it's intended to be the name for this declaration. _add_identifier will + # raise an error if this TYPEID can't be redeclared. + # + def p_declarator_3(self, p): + """ declarator : pointer TYPEID + """ + decl = c_ast.TypeDecl( + declname=p[2], + type=None, + quals=None, + coord=self._coord(p.lineno(2))) + + p[0] = self._type_modify_decl(decl, p[1]) + + def p_direct_declarator_1(self, p): + """ direct_declarator : ID + """ + p[0] = c_ast.TypeDecl( + declname=p[1], + type=None, + quals=None, + coord=self._coord(p.lineno(1))) + + def p_direct_declarator_2(self, p): + """ direct_declarator : LPAREN declarator RPAREN + """ + p[0] = p[2] + + def p_direct_declarator_3(self, p): + """ direct_declarator : direct_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET + """ + quals = (p[3] if len(p) > 5 else []) or [] + # Accept dimension qualifiers + # Per C99 6.7.5.3 p7 + arr = c_ast.ArrayDecl( + type=None, + dim=p[4] if len(p) > 5 else p[3], + dim_quals=quals, + coord=p[1].coord) + + p[0] = self._type_modify_decl(decl=p[1], modifier=arr) + + def p_direct_declarator_4(self, p): + """ direct_declarator : direct_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET + | direct_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET + """ + # Using slice notation for PLY objects doesn't work in Python 3 for the + # version of PLY embedded with pycparser; see PLY Google Code issue 30. + # Work around that here by listing the two elements separately. + listed_quals = [item if isinstance(item, list) else [item] + for item in [p[3],p[4]]] + dim_quals = [qual for sublist in listed_quals for qual in sublist + if qual is not None] + arr = c_ast.ArrayDecl( + type=None, + dim=p[5], + dim_quals=dim_quals, + coord=p[1].coord) + + p[0] = self._type_modify_decl(decl=p[1], modifier=arr) + + # Special for VLAs + # + def p_direct_declarator_5(self, p): + """ direct_declarator : direct_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET + """ + arr = c_ast.ArrayDecl( + type=None, + dim=c_ast.ID(p[4], self._coord(p.lineno(4))), + dim_quals=p[3] if p[3] != None else [], + coord=p[1].coord) + + p[0] = self._type_modify_decl(decl=p[1], modifier=arr) + + def p_direct_declarator_6(self, p): + """ direct_declarator : direct_declarator LPAREN parameter_type_list RPAREN + | direct_declarator LPAREN identifier_list_opt RPAREN + """ + func = c_ast.FuncDecl( + args=p[3], + type=None, + coord=p[1].coord) + + # To see why _get_yacc_lookahead_token is needed, consider: + # typedef char TT; + # void foo(int TT) { TT = 10; } + # Outside the function, TT is a typedef, but inside (starting and + # ending with the braces) it's a parameter. The trouble begins with + # yacc's lookahead token. We don't know if we're declaring or + # defining a function until we see LBRACE, but if we wait for yacc to + # trigger a rule on that token, then TT will have already been read + # and incorrectly interpreted as TYPEID. We need to add the + # parameters to the scope the moment the lexer sees LBRACE. + # + if self._get_yacc_lookahead_token().type == "LBRACE": + if func.args is not None: + for param in func.args.params: + if isinstance(param, c_ast.EllipsisParam): break + self._add_identifier(param.name, param.coord) + + p[0] = self._type_modify_decl(decl=p[1], modifier=func) + + def p_pointer(self, p): + """ pointer : TIMES type_qualifier_list_opt + | TIMES type_qualifier_list_opt pointer + """ + coord = self._coord(p.lineno(1)) + # Pointer decls nest from inside out. This is important when different + # levels have different qualifiers. For example: + # + # char * const * p; + # + # Means "pointer to const pointer to char" + # + # While: + # + # char ** const p; + # + # Means "const pointer to pointer to char" + # + # So when we construct PtrDecl nestings, the leftmost pointer goes in + # as the most nested type. + nested_type = c_ast.PtrDecl(quals=p[2] or [], type=None, coord=coord) + if len(p) > 3: + tail_type = p[3] + while tail_type.type is not None: + tail_type = tail_type.type + tail_type.type = nested_type + p[0] = p[3] + else: + p[0] = nested_type + + def p_type_qualifier_list(self, p): + """ type_qualifier_list : type_qualifier + | type_qualifier_list type_qualifier + """ + p[0] = [p[1]] if len(p) == 2 else p[1] + [p[2]] + + def p_parameter_type_list(self, p): + """ parameter_type_list : parameter_list + | parameter_list COMMA ELLIPSIS + """ + if len(p) > 2: + p[1].params.append(c_ast.EllipsisParam(self._coord(p.lineno(3)))) + + p[0] = p[1] + + def p_parameter_list(self, p): + """ parameter_list : parameter_declaration + | parameter_list COMMA parameter_declaration + """ + if len(p) == 2: # single parameter + p[0] = c_ast.ParamList([p[1]], p[1].coord) + else: + p[1].params.append(p[3]) + p[0] = p[1] + + def p_parameter_declaration_1(self, p): + """ parameter_declaration : declaration_specifiers declarator + """ + spec = p[1] + if not spec['type']: + spec['type'] = [c_ast.IdentifierType(['int'], + coord=self._coord(p.lineno(1)))] + p[0] = self._build_declarations( + spec=spec, + decls=[dict(decl=p[2])])[0] + + def p_parameter_declaration_2(self, p): + """ parameter_declaration : declaration_specifiers abstract_declarator_opt + """ + spec = p[1] + if not spec['type']: + spec['type'] = [c_ast.IdentifierType(['int'], + coord=self._coord(p.lineno(1)))] + + # Parameters can have the same names as typedefs. The trouble is that + # the parameter's name gets grouped into declaration_specifiers, making + # it look like an old-style declaration; compensate. + # + if len(spec['type']) > 1 and len(spec['type'][-1].names) == 1 and \ + self._is_type_in_scope(spec['type'][-1].names[0]): + decl = self._build_declarations( + spec=spec, + decls=[dict(decl=p[2], init=None)])[0] + + # This truly is an old-style parameter declaration + # + else: + decl = c_ast.Typename( + name='', + quals=spec['qual'], + type=p[2] or c_ast.TypeDecl(None, None, None), + coord=self._coord(p.lineno(2))) + typename = spec['type'] + decl = self._fix_decl_name_type(decl, typename) + + p[0] = decl + + def p_identifier_list(self, p): + """ identifier_list : identifier + | identifier_list COMMA identifier + """ + if len(p) == 2: # single parameter + p[0] = c_ast.ParamList([p[1]], p[1].coord) + else: + p[1].params.append(p[3]) + p[0] = p[1] + + def p_initializer_1(self, p): + """ initializer : assignment_expression + """ + p[0] = p[1] + + def p_initializer_2(self, p): + """ initializer : brace_open initializer_list_opt brace_close + | brace_open initializer_list COMMA brace_close + """ + if p[2] is None: + p[0] = c_ast.InitList([], self._coord(p.lineno(1))) + else: + p[0] = p[2] + + def p_initializer_list(self, p): + """ initializer_list : designation_opt initializer + | initializer_list COMMA designation_opt initializer + """ + if len(p) == 3: # single initializer + init = p[2] if p[1] is None else c_ast.NamedInitializer(p[1], p[2]) + p[0] = c_ast.InitList([init], p[2].coord) + else: + init = p[4] if p[3] is None else c_ast.NamedInitializer(p[3], p[4]) + p[1].exprs.append(init) + p[0] = p[1] + + def p_designation(self, p): + """ designation : designator_list EQUALS + """ + p[0] = p[1] + + # Designators are represented as a list of nodes, in the order in which + # they're written in the code. + # + def p_designator_list(self, p): + """ designator_list : designator + | designator_list designator + """ + p[0] = [p[1]] if len(p) == 2 else p[1] + [p[2]] + + def p_designator(self, p): + """ designator : LBRACKET constant_expression RBRACKET + | PERIOD identifier + """ + p[0] = p[2] + + def p_type_name(self, p): + """ type_name : specifier_qualifier_list abstract_declarator_opt + """ + #~ print '==========' + #~ print p[1] + #~ print p[2] + #~ print p[2].children() + #~ print '==========' + + typename = c_ast.Typename( + name='', + quals=p[1]['qual'], + type=p[2] or c_ast.TypeDecl(None, None, None), + coord=self._coord(p.lineno(2))) + + p[0] = self._fix_decl_name_type(typename, p[1]['type']) + + def p_abstract_declarator_1(self, p): + """ abstract_declarator : pointer + """ + dummytype = c_ast.TypeDecl(None, None, None) + p[0] = self._type_modify_decl( + decl=dummytype, + modifier=p[1]) + + def p_abstract_declarator_2(self, p): + """ abstract_declarator : pointer direct_abstract_declarator + """ + p[0] = self._type_modify_decl(p[2], p[1]) + + def p_abstract_declarator_3(self, p): + """ abstract_declarator : direct_abstract_declarator + """ + p[0] = p[1] + + # Creating and using direct_abstract_declarator_opt here + # instead of listing both direct_abstract_declarator and the + # lack of it in the beginning of _1 and _2 caused two + # shift/reduce errors. + # + def p_direct_abstract_declarator_1(self, p): + """ direct_abstract_declarator : LPAREN abstract_declarator RPAREN """ + p[0] = p[2] + + def p_direct_abstract_declarator_2(self, p): + """ direct_abstract_declarator : direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET + """ + arr = c_ast.ArrayDecl( + type=None, + dim=p[3], + dim_quals=[], + coord=p[1].coord) + + p[0] = self._type_modify_decl(decl=p[1], modifier=arr) + + def p_direct_abstract_declarator_3(self, p): + """ direct_abstract_declarator : LBRACKET assignment_expression_opt RBRACKET + """ + p[0] = c_ast.ArrayDecl( + type=c_ast.TypeDecl(None, None, None), + dim=p[2], + dim_quals=[], + coord=self._coord(p.lineno(1))) + + def p_direct_abstract_declarator_4(self, p): + """ direct_abstract_declarator : direct_abstract_declarator LBRACKET TIMES RBRACKET + """ + arr = c_ast.ArrayDecl( + type=None, + dim=c_ast.ID(p[3], self._coord(p.lineno(3))), + dim_quals=[], + coord=p[1].coord) + + p[0] = self._type_modify_decl(decl=p[1], modifier=arr) + + def p_direct_abstract_declarator_5(self, p): + """ direct_abstract_declarator : LBRACKET TIMES RBRACKET + """ + p[0] = c_ast.ArrayDecl( + type=c_ast.TypeDecl(None, None, None), + dim=c_ast.ID(p[3], self._coord(p.lineno(3))), + dim_quals=[], + coord=self._coord(p.lineno(1))) + + def p_direct_abstract_declarator_6(self, p): + """ direct_abstract_declarator : direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN + """ + func = c_ast.FuncDecl( + args=p[3], + type=None, + coord=p[1].coord) + + p[0] = self._type_modify_decl(decl=p[1], modifier=func) + + def p_direct_abstract_declarator_7(self, p): + """ direct_abstract_declarator : LPAREN parameter_type_list_opt RPAREN + """ + p[0] = c_ast.FuncDecl( + args=p[2], + type=c_ast.TypeDecl(None, None, None), + coord=self._coord(p.lineno(1))) + + # declaration is a list, statement isn't. To make it consistent, block_item + # will always be a list + # + def p_block_item(self, p): + """ block_item : declaration + | statement + """ + p[0] = p[1] if isinstance(p[1], list) else [p[1]] + + # Since we made block_item a list, this just combines lists + # + def p_block_item_list(self, p): + """ block_item_list : block_item + | block_item_list block_item + """ + # Empty block items (plain ';') produce [None], so ignore them + p[0] = p[1] if (len(p) == 2 or p[2] == [None]) else p[1] + p[2] + + def p_compound_statement_1(self, p): + """ compound_statement : brace_open block_item_list_opt brace_close """ + p[0] = c_ast.Compound( + block_items=p[2], + coord=self._coord(p.lineno(1))) + + def p_labeled_statement_1(self, p): + """ labeled_statement : ID COLON statement """ + p[0] = c_ast.Label(p[1], p[3], self._coord(p.lineno(1))) + + def p_labeled_statement_2(self, p): + """ labeled_statement : CASE constant_expression COLON statement """ + p[0] = c_ast.Case(p[2], [p[4]], self._coord(p.lineno(1))) + + def p_labeled_statement_3(self, p): + """ labeled_statement : DEFAULT COLON statement """ + p[0] = c_ast.Default([p[3]], self._coord(p.lineno(1))) + + def p_selection_statement_1(self, p): + """ selection_statement : IF LPAREN expression RPAREN statement """ + p[0] = c_ast.If(p[3], p[5], None, self._coord(p.lineno(1))) + + def p_selection_statement_2(self, p): + """ selection_statement : IF LPAREN expression RPAREN statement ELSE statement """ + p[0] = c_ast.If(p[3], p[5], p[7], self._coord(p.lineno(1))) + + def p_selection_statement_3(self, p): + """ selection_statement : SWITCH LPAREN expression RPAREN statement """ + p[0] = fix_switch_cases( + c_ast.Switch(p[3], p[5], self._coord(p.lineno(1)))) + + def p_iteration_statement_1(self, p): + """ iteration_statement : WHILE LPAREN expression RPAREN statement """ + p[0] = c_ast.While(p[3], p[5], self._coord(p.lineno(1))) + + def p_iteration_statement_2(self, p): + """ iteration_statement : DO statement WHILE LPAREN expression RPAREN SEMI """ + p[0] = c_ast.DoWhile(p[5], p[2], self._coord(p.lineno(1))) + + def p_iteration_statement_3(self, p): + """ iteration_statement : FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN statement """ + p[0] = c_ast.For(p[3], p[5], p[7], p[9], self._coord(p.lineno(1))) + + def p_iteration_statement_4(self, p): + """ iteration_statement : FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN statement """ + p[0] = c_ast.For(c_ast.DeclList(p[3], self._coord(p.lineno(1))), + p[4], p[6], p[8], self._coord(p.lineno(1))) + + def p_jump_statement_1(self, p): + """ jump_statement : GOTO ID SEMI """ + p[0] = c_ast.Goto(p[2], self._coord(p.lineno(1))) + + def p_jump_statement_2(self, p): + """ jump_statement : BREAK SEMI """ + p[0] = c_ast.Break(self._coord(p.lineno(1))) + + def p_jump_statement_3(self, p): + """ jump_statement : CONTINUE SEMI """ + p[0] = c_ast.Continue(self._coord(p.lineno(1))) + + def p_jump_statement_4(self, p): + """ jump_statement : RETURN expression SEMI + | RETURN SEMI + """ + p[0] = c_ast.Return(p[2] if len(p) == 4 else None, self._coord(p.lineno(1))) + + def p_expression_statement(self, p): + """ expression_statement : expression_opt SEMI """ + if p[1] is None: + p[0] = c_ast.EmptyStatement(self._coord(p.lineno(1))) + else: + p[0] = p[1] + + def p_expression(self, p): + """ expression : assignment_expression + | expression COMMA assignment_expression + """ + if len(p) == 2: + p[0] = p[1] + else: + if not isinstance(p[1], c_ast.ExprList): + p[1] = c_ast.ExprList([p[1]], p[1].coord) + + p[1].exprs.append(p[3]) + p[0] = p[1] + + def p_typedef_name(self, p): + """ typedef_name : TYPEID """ + p[0] = c_ast.IdentifierType([p[1]], coord=self._coord(p.lineno(1))) + + def p_assignment_expression(self, p): + """ assignment_expression : conditional_expression + | unary_expression assignment_operator assignment_expression + """ + if len(p) == 2: + p[0] = p[1] + else: + p[0] = c_ast.Assignment(p[2], p[1], p[3], p[1].coord) + + # K&R2 defines these as many separate rules, to encode + # precedence and associativity. Why work hard ? I'll just use + # the built in precedence/associativity specification feature + # of PLY. (see precedence declaration above) + # + def p_assignment_operator(self, p): + """ assignment_operator : EQUALS + | XOREQUAL + | TIMESEQUAL + | DIVEQUAL + | MODEQUAL + | PLUSEQUAL + | MINUSEQUAL + | LSHIFTEQUAL + | RSHIFTEQUAL + | ANDEQUAL + | OREQUAL + """ + p[0] = p[1] + + def p_constant_expression(self, p): + """ constant_expression : conditional_expression """ + p[0] = p[1] + + def p_conditional_expression(self, p): + """ conditional_expression : binary_expression + | binary_expression CONDOP expression COLON conditional_expression + """ + if len(p) == 2: + p[0] = p[1] + else: + p[0] = c_ast.TernaryOp(p[1], p[3], p[5], p[1].coord) + + def p_binary_expression(self, p): + """ binary_expression : cast_expression + | binary_expression TIMES binary_expression + | binary_expression DIVIDE binary_expression + | binary_expression MOD binary_expression + | binary_expression PLUS binary_expression + | binary_expression MINUS binary_expression + | binary_expression RSHIFT binary_expression + | binary_expression LSHIFT binary_expression + | binary_expression LT binary_expression + | binary_expression LE binary_expression + | binary_expression GE binary_expression + | binary_expression GT binary_expression + | binary_expression EQ binary_expression + | binary_expression NE binary_expression + | binary_expression AND binary_expression + | binary_expression OR binary_expression + | binary_expression XOR binary_expression + | binary_expression LAND binary_expression + | binary_expression LOR binary_expression + """ + if len(p) == 2: + p[0] = p[1] + else: + p[0] = c_ast.BinaryOp(p[2], p[1], p[3], p[1].coord) + + def p_cast_expression_1(self, p): + """ cast_expression : unary_expression """ + p[0] = p[1] + + def p_cast_expression_2(self, p): + """ cast_expression : LPAREN type_name RPAREN cast_expression """ + p[0] = c_ast.Cast(p[2], p[4], self._coord(p.lineno(1))) + + def p_unary_expression_1(self, p): + """ unary_expression : postfix_expression """ + p[0] = p[1] + + def p_unary_expression_2(self, p): + """ unary_expression : PLUSPLUS unary_expression + | MINUSMINUS unary_expression + | unary_operator cast_expression + """ + p[0] = c_ast.UnaryOp(p[1], p[2], p[2].coord) + + def p_unary_expression_3(self, p): + """ unary_expression : SIZEOF unary_expression + | SIZEOF LPAREN type_name RPAREN + """ + p[0] = c_ast.UnaryOp( + p[1], + p[2] if len(p) == 3 else p[3], + self._coord(p.lineno(1))) + + def p_unary_operator(self, p): + """ unary_operator : AND + | TIMES + | PLUS + | MINUS + | NOT + | LNOT + """ + p[0] = p[1] + + def p_postfix_expression_1(self, p): + """ postfix_expression : primary_expression """ + p[0] = p[1] + + def p_postfix_expression_2(self, p): + """ postfix_expression : postfix_expression LBRACKET expression RBRACKET """ + p[0] = c_ast.ArrayRef(p[1], p[3], p[1].coord) + + def p_postfix_expression_3(self, p): + """ postfix_expression : postfix_expression LPAREN argument_expression_list RPAREN + | postfix_expression LPAREN RPAREN + """ + p[0] = c_ast.FuncCall(p[1], p[3] if len(p) == 5 else None, p[1].coord) + + def p_postfix_expression_4(self, p): + """ postfix_expression : postfix_expression PERIOD ID + | postfix_expression PERIOD TYPEID + | postfix_expression ARROW ID + | postfix_expression ARROW TYPEID + """ + field = c_ast.ID(p[3], self._coord(p.lineno(3))) + p[0] = c_ast.StructRef(p[1], p[2], field, p[1].coord) + + def p_postfix_expression_5(self, p): + """ postfix_expression : postfix_expression PLUSPLUS + | postfix_expression MINUSMINUS + """ + p[0] = c_ast.UnaryOp('p' + p[2], p[1], p[1].coord) + + def p_postfix_expression_6(self, p): + """ postfix_expression : LPAREN type_name RPAREN brace_open initializer_list brace_close + | LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close + """ + p[0] = c_ast.CompoundLiteral(p[2], p[5]) + + def p_primary_expression_1(self, p): + """ primary_expression : identifier """ + p[0] = p[1] + + def p_primary_expression_2(self, p): + """ primary_expression : constant """ + p[0] = p[1] + + def p_primary_expression_3(self, p): + """ primary_expression : unified_string_literal + | unified_wstring_literal + """ + p[0] = p[1] + + def p_primary_expression_4(self, p): + """ primary_expression : LPAREN expression RPAREN """ + p[0] = p[2] + + def p_primary_expression_5(self, p): + """ primary_expression : OFFSETOF LPAREN type_name COMMA identifier RPAREN + """ + coord = self._coord(p.lineno(1)) + p[0] = c_ast.FuncCall(c_ast.ID(p[1], coord), + c_ast.ExprList([p[3], p[5]], coord), + coord) + + def p_argument_expression_list(self, p): + """ argument_expression_list : assignment_expression + | argument_expression_list COMMA assignment_expression + """ + if len(p) == 2: # single expr + p[0] = c_ast.ExprList([p[1]], p[1].coord) + else: + p[1].exprs.append(p[3]) + p[0] = p[1] + + def p_identifier(self, p): + """ identifier : ID """ + p[0] = c_ast.ID(p[1], self._coord(p.lineno(1))) + + def p_constant_1(self, p): + """ constant : INT_CONST_DEC + | INT_CONST_OCT + | INT_CONST_HEX + | INT_CONST_BIN + """ + p[0] = c_ast.Constant( + 'int', p[1], self._coord(p.lineno(1))) + + def p_constant_2(self, p): + """ constant : FLOAT_CONST + | HEX_FLOAT_CONST + """ + p[0] = c_ast.Constant( + 'float', p[1], self._coord(p.lineno(1))) + + def p_constant_3(self, p): + """ constant : CHAR_CONST + | WCHAR_CONST + """ + p[0] = c_ast.Constant( + 'char', p[1], self._coord(p.lineno(1))) + + # The "unified" string and wstring literal rules are for supporting + # concatenation of adjacent string literals. + # I.e. "hello " "world" is seen by the C compiler as a single string literal + # with the value "hello world" + # + def p_unified_string_literal(self, p): + """ unified_string_literal : STRING_LITERAL + | unified_string_literal STRING_LITERAL + """ + if len(p) == 2: # single literal + p[0] = c_ast.Constant( + 'string', p[1], self._coord(p.lineno(1))) + else: + p[1].value = p[1].value[:-1] + p[2][1:] + p[0] = p[1] + + def p_unified_wstring_literal(self, p): + """ unified_wstring_literal : WSTRING_LITERAL + | unified_wstring_literal WSTRING_LITERAL + """ + if len(p) == 2: # single literal + p[0] = c_ast.Constant( + 'string', p[1], self._coord(p.lineno(1))) + else: + p[1].value = p[1].value.rstrip()[:-1] + p[2][2:] + p[0] = p[1] + + def p_brace_open(self, p): + """ brace_open : LBRACE + """ + p[0] = p[1] + + def p_brace_close(self, p): + """ brace_close : RBRACE + """ + p[0] = p[1] + + def p_empty(self, p): + 'empty : ' + p[0] = None + + def p_error(self, p): + # If error recovery is added here in the future, make sure + # _get_yacc_lookahead_token still works! + # + if p: + self._parse_error( + 'before: %s' % p.value, + self._coord(lineno=p.lineno, + column=self.clex.find_tok_column(p))) + else: + self._parse_error('At end of input', '') + + +#------------------------------------------------------------------------------ +if __name__ == "__main__": + import pprint + import time, sys + + #t1 = time.time() + #parser = CParser(lex_optimize=True, yacc_debug=True, yacc_optimize=False) + #sys.write(time.time() - t1) + + #buf = ''' + #int (*k)(int); + #''' + + ## set debuglevel to 2 for debugging + #t = parser.parse(buf, 'x.c', debuglevel=0) + #t.show(showcoord=True) + diff --git a/lib/python3.4/site-packages/pycparser/lextab.py b/lib/python3.4/site-packages/pycparser/lextab.py new file mode 100644 index 0000000..87870db --- /dev/null +++ b/lib/python3.4/site-packages/pycparser/lextab.py @@ -0,0 +1,9 @@ +# pycparser.lextab.py. This file automatically created by PLY (version 3.4). Don't edit! +_tabversion = '3.4' +_lextokens = {'VOID': 1, 'LBRACKET': 1, 'WCHAR_CONST': 1, 'FLOAT_CONST': 1, 'MINUS': 1, 'RPAREN': 1, 'LONG': 1, 'PLUS': 1, 'ELLIPSIS': 1, 'GT': 1, 'GOTO': 1, 'ENUM': 1, 'PERIOD': 1, 'GE': 1, 'INT_CONST_DEC': 1, 'ARROW': 1, 'HEX_FLOAT_CONST': 1, 'DOUBLE': 1, 'MINUSEQUAL': 1, 'INT_CONST_OCT': 1, 'TIMESEQUAL': 1, 'OR': 1, 'SHORT': 1, 'RETURN': 1, 'RSHIFTEQUAL': 1, 'RESTRICT': 1, 'STATIC': 1, 'SIZEOF': 1, 'UNSIGNED': 1, 'UNION': 1, 'COLON': 1, 'WSTRING_LITERAL': 1, 'DIVIDE': 1, 'FOR': 1, 'PLUSPLUS': 1, 'EQUALS': 1, 'ELSE': 1, 'INLINE': 1, 'EQ': 1, 'AND': 1, 'TYPEID': 1, 'LBRACE': 1, 'PPHASH': 1, 'INT': 1, 'SIGNED': 1, 'CONTINUE': 1, 'NOT': 1, 'OREQUAL': 1, 'MOD': 1, 'RSHIFT': 1, 'DEFAULT': 1, 'CHAR': 1, 'WHILE': 1, 'DIVEQUAL': 1, 'EXTERN': 1, 'CASE': 1, 'LAND': 1, 'REGISTER': 1, 'MODEQUAL': 1, 'NE': 1, 'SWITCH': 1, 'INT_CONST_HEX': 1, '_COMPLEX': 1, 'PLUSEQUAL': 1, 'STRUCT': 1, 'CONDOP': 1, 'BREAK': 1, 'VOLATILE': 1, 'ANDEQUAL': 1, 'INT_CONST_BIN': 1, 'DO': 1, 'LNOT': 1, 'CONST': 1, 'LOR': 1, 'CHAR_CONST': 1, 'LSHIFT': 1, 'RBRACE': 1, '_BOOL': 1, 'LE': 1, 'SEMI': 1, 'LT': 1, 'COMMA': 1, 'OFFSETOF': 1, 'TYPEDEF': 1, 'XOR': 1, 'AUTO': 1, 'TIMES': 1, 'LPAREN': 1, 'MINUSMINUS': 1, 'ID': 1, 'IF': 1, 'STRING_LITERAL': 1, 'FLOAT': 1, 'XOREQUAL': 1, 'LSHIFTEQUAL': 1, 'RBRACKET': 1} +_lexreflags = 0 +_lexliterals = '' +_lexstateinfo = {'ppline': 'exclusive', 'pppragma': 'exclusive', 'INITIAL': 'inclusive'} +_lexstatere = {'ppline': [('(?P<t_ppline_FILENAME>"([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*")|(?P<t_ppline_LINE_NUMBER>(0(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|([1-9][0-9]*(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?))|(?P<t_ppline_NEWLINE>\\n)|(?P<t_ppline_PPLINE>line)', [None, ('t_ppline_FILENAME', 'FILENAME'), None, None, None, None, None, None, ('t_ppline_LINE_NUMBER', 'LINE_NUMBER'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_ppline_NEWLINE', 'NEWLINE'), ('t_ppline_PPLINE', 'PPLINE')])], 'pppragma': [('(?P<t_pppragma_NEWLINE>\\n)|(?P<t_pppragma_PPPRAGMA>pragma)|(?P<t_pppragma_STR>"([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*")|(?P<t_pppragma_ID>[a-zA-Z_$][0-9a-zA-Z_$]*)', [None, ('t_pppragma_NEWLINE', 'NEWLINE'), ('t_pppragma_PPPRAGMA', 'PPPRAGMA'), ('t_pppragma_STR', 'STR'), None, None, None, None, None, None, ('t_pppragma_ID', 'ID')])], 'INITIAL': [('(?P<t_PPHASH>[ \\t]*\\#)|(?P<t_NEWLINE>\\n+)|(?P<t_LBRACE>\\{)|(?P<t_RBRACE>\\})|(?P<t_FLOAT_CONST>((((([0-9]*\\.[0-9]+)|([0-9]+\\.))([eE][-+]?[0-9]+)?)|([0-9]+([eE][-+]?[0-9]+)))[FfLl]?))|(?P<t_HEX_FLOAT_CONST>(0[xX]([0-9a-fA-F]+|((([0-9a-fA-F]+)?\\.[0-9a-fA-F]+)|([0-9a-fA-F]+\\.)))([pP][+-]?[0-9]+)[FfLl]?))|(?P<t_INT_CONST_HEX>0[xX][0-9a-fA-F]+(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)', [None, ('t_PPHASH', 'PPHASH'), ('t_NEWLINE', 'NEWLINE'), ('t_LBRACE', 'LBRACE'), ('t_RBRACE', 'RBRACE'), ('t_FLOAT_CONST', 'FLOAT_CONST'), None, None, None, None, None, None, None, None, None, ('t_HEX_FLOAT_CONST', 'HEX_FLOAT_CONST'), None, None, None, None, None, None, None, ('t_INT_CONST_HEX', 'INT_CONST_HEX')]), ('(?P<t_INT_CONST_BIN>0[bB][01]+(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|(?P<t_BAD_CONST_OCT>0[0-7]*[89])|(?P<t_INT_CONST_OCT>0[0-7]*(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|(?P<t_INT_CONST_DEC>(0(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|([1-9][0-9]*(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?))|(?P<t_CHAR_CONST>\'([^\'\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))\')|(?P<t_WCHAR_CONST>L\'([^\'\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))\')|(?P<t_UNMATCHED_QUOTE>(\'([^\'\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*\\n)|(\'([^\'\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*$))|(?P<t_BAD_CHAR_CONST>(\'([^\'\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))[^\'\n]+\')|(\'\')|(\'([\\\\][^a-zA-Z._~^!=&\\^\\-\\\\?\'"x0-7])[^\'\\n]*\'))', [None, ('t_INT_CONST_BIN', 'INT_CONST_BIN'), None, None, None, None, None, None, None, ('t_BAD_CONST_OCT', 'BAD_CONST_OCT'), ('t_INT_CONST_OCT', 'INT_CONST_OCT'), None, None, None, None, None, None, None, ('t_INT_CONST_DEC', 'INT_CONST_DEC'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_CHAR_CONST', 'CHAR_CONST'), None, None, None, None, None, None, ('t_WCHAR_CONST', 'WCHAR_CONST'), None, None, None, None, None, None, ('t_UNMATCHED_QUOTE', 'UNMATCHED_QUOTE'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_BAD_CHAR_CONST', 'BAD_CHAR_CONST')]), ('(?P<t_WSTRING_LITERAL>L"([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*")|(?P<t_BAD_STRING_LITERAL>"([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*([\\\\][^a-zA-Z._~^!=&\\^\\-\\\\?\'"x0-7])([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*")|(?P<t_ID>[a-zA-Z_$][0-9a-zA-Z_$]*)|(?P<t_STRING_LITERAL>"([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*")|(?P<t_ELLIPSIS>\\.\\.\\.)|(?P<t_PLUSPLUS>\\+\\+)|(?P<t_LOR>\\|\\|)|(?P<t_XOREQUAL>\\^=)|(?P<t_OREQUAL>\\|=)|(?P<t_LSHIFTEQUAL><<=)|(?P<t_RSHIFTEQUAL>>>=)|(?P<t_PLUSEQUAL>\\+=)|(?P<t_TIMESEQUAL>\\*=)|(?P<t_PLUS>\\+)|(?P<t_MODEQUAL>%=)|(?P<t_DIVEQUAL>/=)', [None, ('t_WSTRING_LITERAL', 'WSTRING_LITERAL'), None, None, None, None, None, None, ('t_BAD_STRING_LITERAL', 'BAD_STRING_LITERAL'), None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_ID', 'ID'), (None, 'STRING_LITERAL'), None, None, None, None, None, None, (None, 'ELLIPSIS'), (None, 'PLUSPLUS'), (None, 'LOR'), (None, 'XOREQUAL'), (None, 'OREQUAL'), (None, 'LSHIFTEQUAL'), (None, 'RSHIFTEQUAL'), (None, 'PLUSEQUAL'), (None, 'TIMESEQUAL'), (None, 'PLUS'), (None, 'MODEQUAL'), (None, 'DIVEQUAL')]), ('(?P<t_RBRACKET>\\])|(?P<t_CONDOP>\\?)|(?P<t_XOR>\\^)|(?P<t_LSHIFT><<)|(?P<t_LE><=)|(?P<t_LPAREN>\\()|(?P<t_ARROW>->)|(?P<t_EQ>==)|(?P<t_NE>!=)|(?P<t_MINUSMINUS>--)|(?P<t_OR>\\|)|(?P<t_TIMES>\\*)|(?P<t_LBRACKET>\\[)|(?P<t_GE>>=)|(?P<t_RPAREN>\\))|(?P<t_LAND>&&)|(?P<t_RSHIFT>>>)|(?P<t_MINUSEQUAL>-=)|(?P<t_PERIOD>\\.)|(?P<t_ANDEQUAL>&=)|(?P<t_EQUALS>=)|(?P<t_LT><)|(?P<t_COMMA>,)|(?P<t_DIVIDE>/)|(?P<t_AND>&)|(?P<t_MOD>%)|(?P<t_SEMI>;)|(?P<t_MINUS>-)|(?P<t_GT>>)|(?P<t_COLON>:)|(?P<t_NOT>~)|(?P<t_LNOT>!)', [None, (None, 'RBRACKET'), (None, 'CONDOP'), (None, 'XOR'), (None, 'LSHIFT'), (None, 'LE'), (None, 'LPAREN'), (None, 'ARROW'), (None, 'EQ'), (None, 'NE'), (None, 'MINUSMINUS'), (None, 'OR'), (None, 'TIMES'), (None, 'LBRACKET'), (None, 'GE'), (None, 'RPAREN'), (None, 'LAND'), (None, 'RSHIFT'), (None, 'MINUSEQUAL'), (None, 'PERIOD'), (None, 'ANDEQUAL'), (None, 'EQUALS'), (None, 'LT'), (None, 'COMMA'), (None, 'DIVIDE'), (None, 'AND'), (None, 'MOD'), (None, 'SEMI'), (None, 'MINUS'), (None, 'GT'), (None, 'COLON'), (None, 'NOT'), (None, 'LNOT')])]} +_lexstateignore = {'ppline': ' \t', 'pppragma': ' \t<>.-{}();=+-*/$%@&^~!?:,0123456789', 'INITIAL': ' \t'} +_lexstateerrorf = {'ppline': 't_ppline_error', 'pppragma': 't_pppragma_error', 'INITIAL': 't_error'} diff --git a/lib/python3.4/site-packages/pycparser/ply/__init__.py b/lib/python3.4/site-packages/pycparser/ply/__init__.py new file mode 100644 index 0000000..853a985 --- /dev/null +++ b/lib/python3.4/site-packages/pycparser/ply/__init__.py @@ -0,0 +1,4 @@ +# PLY package +# Author: David Beazley (dave@dabeaz.com) + +__all__ = ['lex','yacc'] diff --git a/lib/python3.4/site-packages/pycparser/ply/cpp.py b/lib/python3.4/site-packages/pycparser/ply/cpp.py new file mode 100644 index 0000000..5cad682 --- /dev/null +++ b/lib/python3.4/site-packages/pycparser/ply/cpp.py @@ -0,0 +1,898 @@ +# ----------------------------------------------------------------------------- +# cpp.py +# +# Author: David Beazley (http://www.dabeaz.com) +# Copyright (C) 2007 +# All rights reserved +# +# This module implements an ANSI-C style lexical preprocessor for PLY. +# ----------------------------------------------------------------------------- +from __future__ import generators + +# ----------------------------------------------------------------------------- +# Default preprocessor lexer definitions. These tokens are enough to get +# a basic preprocessor working. Other modules may import these if they want +# ----------------------------------------------------------------------------- + +tokens = ( + 'CPP_ID','CPP_INTEGER', 'CPP_FLOAT', 'CPP_STRING', 'CPP_CHAR', 'CPP_WS', 'CPP_COMMENT', 'CPP_POUND','CPP_DPOUND' +) + +literals = "+-*/%|&~^<>=!?()[]{}.,;:\\\'\"" + +# Whitespace +def t_CPP_WS(t): + r'\s+' + t.lexer.lineno += t.value.count("\n") + return t + +t_CPP_POUND = r'\#' +t_CPP_DPOUND = r'\#\#' + +# Identifier +t_CPP_ID = r'[A-Za-z_][\w_]*' + +# Integer literal +def CPP_INTEGER(t): + r'(((((0x)|(0X))[0-9a-fA-F]+)|(\d+))([uU]|[lL]|[uU][lL]|[lL][uU])?)' + return t + +t_CPP_INTEGER = CPP_INTEGER + +# Floating literal +t_CPP_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?' + +# String literal +def t_CPP_STRING(t): + r'\"([^\\\n]|(\\(.|\n)))*?\"' + t.lexer.lineno += t.value.count("\n") + return t + +# Character constant 'c' or L'c' +def t_CPP_CHAR(t): + r'(L)?\'([^\\\n]|(\\(.|\n)))*?\'' + t.lexer.lineno += t.value.count("\n") + return t + +# Comment +def t_CPP_COMMENT(t): + r'(/\*(.|\n)*?\*/)|(//.*?\n)' + t.lexer.lineno += t.value.count("\n") + return t + +def t_error(t): + t.type = t.value[0] + t.value = t.value[0] + t.lexer.skip(1) + return t + +import re +import copy +import time +import os.path + +# ----------------------------------------------------------------------------- +# trigraph() +# +# Given an input string, this function replaces all trigraph sequences. +# The following mapping is used: +# +# ??= # +# ??/ \ +# ??' ^ +# ??( [ +# ??) ] +# ??! | +# ??< { +# ??> } +# ??- ~ +# ----------------------------------------------------------------------------- + +_trigraph_pat = re.compile(r'''\?\?[=/\'\(\)\!<>\-]''') +_trigraph_rep = { + '=':'#', + '/':'\\', + "'":'^', + '(':'[', + ')':']', + '!':'|', + '<':'{', + '>':'}', + '-':'~' +} + +def trigraph(input): + return _trigraph_pat.sub(lambda g: _trigraph_rep[g.group()[-1]],input) + +# ------------------------------------------------------------------ +# Macro object +# +# This object holds information about preprocessor macros +# +# .name - Macro name (string) +# .value - Macro value (a list of tokens) +# .arglist - List of argument names +# .variadic - Boolean indicating whether or not variadic macro +# .vararg - Name of the variadic parameter +# +# When a macro is created, the macro replacement token sequence is +# pre-scanned and used to create patch lists that are later used +# during macro expansion +# ------------------------------------------------------------------ + +class Macro(object): + def __init__(self,name,value,arglist=None,variadic=False): + self.name = name + self.value = value + self.arglist = arglist + self.variadic = variadic + if variadic: + self.vararg = arglist[-1] + self.source = None + +# ------------------------------------------------------------------ +# Preprocessor object +# +# Object representing a preprocessor. Contains macro definitions, +# include directories, and other information +# ------------------------------------------------------------------ + +class Preprocessor(object): + def __init__(self,lexer=None): + if lexer is None: + lexer = lex.lexer + self.lexer = lexer + self.macros = { } + self.path = [] + self.temp_path = [] + + # Probe the lexer for selected tokens + self.lexprobe() + + tm = time.localtime() + self.define("__DATE__ \"%s\"" % time.strftime("%b %d %Y",tm)) + self.define("__TIME__ \"%s\"" % time.strftime("%H:%M:%S",tm)) + self.parser = None + + # ----------------------------------------------------------------------------- + # tokenize() + # + # Utility function. Given a string of text, tokenize into a list of tokens + # ----------------------------------------------------------------------------- + + def tokenize(self,text): + tokens = [] + self.lexer.input(text) + while True: + tok = self.lexer.token() + if not tok: break + tokens.append(tok) + return tokens + + # --------------------------------------------------------------------- + # error() + # + # Report a preprocessor error/warning of some kind + # ---------------------------------------------------------------------- + + def error(self,file,line,msg): + print("%s:%d %s" % (file,line,msg)) + + # ---------------------------------------------------------------------- + # lexprobe() + # + # This method probes the preprocessor lexer object to discover + # the token types of symbols that are important to the preprocessor. + # If this works right, the preprocessor will simply "work" + # with any suitable lexer regardless of how tokens have been named. + # ---------------------------------------------------------------------- + + def lexprobe(self): + + # Determine the token type for identifiers + self.lexer.input("identifier") + tok = self.lexer.token() + if not tok or tok.value != "identifier": + print("Couldn't determine identifier type") + else: + self.t_ID = tok.type + + # Determine the token type for integers + self.lexer.input("12345") + tok = self.lexer.token() + if not tok or int(tok.value) != 12345: + print("Couldn't determine integer type") + else: + self.t_INTEGER = tok.type + self.t_INTEGER_TYPE = type(tok.value) + + # Determine the token type for strings enclosed in double quotes + self.lexer.input("\"filename\"") + tok = self.lexer.token() + if not tok or tok.value != "\"filename\"": + print("Couldn't determine string type") + else: + self.t_STRING = tok.type + + # Determine the token type for whitespace--if any + self.lexer.input(" ") + tok = self.lexer.token() + if not tok or tok.value != " ": + self.t_SPACE = None + else: + self.t_SPACE = tok.type + + # Determine the token type for newlines + self.lexer.input("\n") + tok = self.lexer.token() + if not tok or tok.value != "\n": + self.t_NEWLINE = None + print("Couldn't determine token for newlines") + else: + self.t_NEWLINE = tok.type + + self.t_WS = (self.t_SPACE, self.t_NEWLINE) + + # Check for other characters used by the preprocessor + chars = [ '<','>','#','##','\\','(',')',',','.'] + for c in chars: + self.lexer.input(c) + tok = self.lexer.token() + if not tok or tok.value != c: + print("Unable to lex '%s' required for preprocessor" % c) + + # ---------------------------------------------------------------------- + # add_path() + # + # Adds a search path to the preprocessor. + # ---------------------------------------------------------------------- + + def add_path(self,path): + self.path.append(path) + + # ---------------------------------------------------------------------- + # group_lines() + # + # Given an input string, this function splits it into lines. Trailing whitespace + # is removed. Any line ending with \ is grouped with the next line. This + # function forms the lowest level of the preprocessor---grouping into text into + # a line-by-line format. + # ---------------------------------------------------------------------- + + def group_lines(self,input): + lex = self.lexer.clone() + lines = [x.rstrip() for x in input.splitlines()] + for i in xrange(len(lines)): + j = i+1 + while lines[i].endswith('\\') and (j < len(lines)): + lines[i] = lines[i][:-1]+lines[j] + lines[j] = "" + j += 1 + + input = "\n".join(lines) + lex.input(input) + lex.lineno = 1 + + current_line = [] + while True: + tok = lex.token() + if not tok: + break + current_line.append(tok) + if tok.type in self.t_WS and '\n' in tok.value: + yield current_line + current_line = [] + + if current_line: + yield current_line + + # ---------------------------------------------------------------------- + # tokenstrip() + # + # Remove leading/trailing whitespace tokens from a token list + # ---------------------------------------------------------------------- + + def tokenstrip(self,tokens): + i = 0 + while i < len(tokens) and tokens[i].type in self.t_WS: + i += 1 + del tokens[:i] + i = len(tokens)-1 + while i >= 0 and tokens[i].type in self.t_WS: + i -= 1 + del tokens[i+1:] + return tokens + + + # ---------------------------------------------------------------------- + # collect_args() + # + # Collects comma separated arguments from a list of tokens. The arguments + # must be enclosed in parenthesis. Returns a tuple (tokencount,args,positions) + # where tokencount is the number of tokens consumed, args is a list of arguments, + # and positions is a list of integers containing the starting index of each + # argument. Each argument is represented by a list of tokens. + # + # When collecting arguments, leading and trailing whitespace is removed + # from each argument. + # + # This function properly handles nested parenthesis and commas---these do not + # define new arguments. + # ---------------------------------------------------------------------- + + def collect_args(self,tokenlist): + args = [] + positions = [] + current_arg = [] + nesting = 1 + tokenlen = len(tokenlist) + + # Search for the opening '('. + i = 0 + while (i < tokenlen) and (tokenlist[i].type in self.t_WS): + i += 1 + + if (i < tokenlen) and (tokenlist[i].value == '('): + positions.append(i+1) + else: + self.error(self.source,tokenlist[0].lineno,"Missing '(' in macro arguments") + return 0, [], [] + + i += 1 + + while i < tokenlen: + t = tokenlist[i] + if t.value == '(': + current_arg.append(t) + nesting += 1 + elif t.value == ')': + nesting -= 1 + if nesting == 0: + if current_arg: + args.append(self.tokenstrip(current_arg)) + positions.append(i) + return i+1,args,positions + current_arg.append(t) + elif t.value == ',' and nesting == 1: + args.append(self.tokenstrip(current_arg)) + positions.append(i+1) + current_arg = [] + else: + current_arg.append(t) + i += 1 + + # Missing end argument + self.error(self.source,tokenlist[-1].lineno,"Missing ')' in macro arguments") + return 0, [],[] + + # ---------------------------------------------------------------------- + # macro_prescan() + # + # Examine the macro value (token sequence) and identify patch points + # This is used to speed up macro expansion later on---we'll know + # right away where to apply patches to the value to form the expansion + # ---------------------------------------------------------------------- + + def macro_prescan(self,macro): + macro.patch = [] # Standard macro arguments + macro.str_patch = [] # String conversion expansion + macro.var_comma_patch = [] # Variadic macro comma patch + i = 0 + while i < len(macro.value): + if macro.value[i].type == self.t_ID and macro.value[i].value in macro.arglist: + argnum = macro.arglist.index(macro.value[i].value) + # Conversion of argument to a string + if i > 0 and macro.value[i-1].value == '#': + macro.value[i] = copy.copy(macro.value[i]) + macro.value[i].type = self.t_STRING + del macro.value[i-1] + macro.str_patch.append((argnum,i-1)) + continue + # Concatenation + elif (i > 0 and macro.value[i-1].value == '##'): + macro.patch.append(('c',argnum,i-1)) + del macro.value[i-1] + continue + elif ((i+1) < len(macro.value) and macro.value[i+1].value == '##'): + macro.patch.append(('c',argnum,i)) + i += 1 + continue + # Standard expansion + else: + macro.patch.append(('e',argnum,i)) + elif macro.value[i].value == '##': + if macro.variadic and (i > 0) and (macro.value[i-1].value == ',') and \ + ((i+1) < len(macro.value)) and (macro.value[i+1].type == self.t_ID) and \ + (macro.value[i+1].value == macro.vararg): + macro.var_comma_patch.append(i-1) + i += 1 + macro.patch.sort(key=lambda x: x[2],reverse=True) + + # ---------------------------------------------------------------------- + # macro_expand_args() + # + # Given a Macro and list of arguments (each a token list), this method + # returns an expanded version of a macro. The return value is a token sequence + # representing the replacement macro tokens + # ---------------------------------------------------------------------- + + def macro_expand_args(self,macro,args): + # Make a copy of the macro token sequence + rep = [copy.copy(_x) for _x in macro.value] + + # Make string expansion patches. These do not alter the length of the replacement sequence + + str_expansion = {} + for argnum, i in macro.str_patch: + if argnum not in str_expansion: + str_expansion[argnum] = ('"%s"' % "".join([x.value for x in args[argnum]])).replace("\\","\\\\") + rep[i] = copy.copy(rep[i]) + rep[i].value = str_expansion[argnum] + + # Make the variadic macro comma patch. If the variadic macro argument is empty, we get rid + comma_patch = False + if macro.variadic and not args[-1]: + for i in macro.var_comma_patch: + rep[i] = None + comma_patch = True + + # Make all other patches. The order of these matters. It is assumed that the patch list + # has been sorted in reverse order of patch location since replacements will cause the + # size of the replacement sequence to expand from the patch point. + + expanded = { } + for ptype, argnum, i in macro.patch: + # Concatenation. Argument is left unexpanded + if ptype == 'c': + rep[i:i+1] = args[argnum] + # Normal expansion. Argument is macro expanded first + elif ptype == 'e': + if argnum not in expanded: + expanded[argnum] = self.expand_macros(args[argnum]) + rep[i:i+1] = expanded[argnum] + + # Get rid of removed comma if necessary + if comma_patch: + rep = [_i for _i in rep if _i] + + return rep + + + # ---------------------------------------------------------------------- + # expand_macros() + # + # Given a list of tokens, this function performs macro expansion. + # The expanded argument is a dictionary that contains macros already + # expanded. This is used to prevent infinite recursion. + # ---------------------------------------------------------------------- + + def expand_macros(self,tokens,expanded=None): + if expanded is None: + expanded = {} + i = 0 + while i < len(tokens): + t = tokens[i] + if t.type == self.t_ID: + if t.value in self.macros and t.value not in expanded: + # Yes, we found a macro match + expanded[t.value] = True + + m = self.macros[t.value] + if not m.arglist: + # A simple macro + ex = self.expand_macros([copy.copy(_x) for _x in m.value],expanded) + for e in ex: + e.lineno = t.lineno + tokens[i:i+1] = ex + i += len(ex) + else: + # A macro with arguments + j = i + 1 + while j < len(tokens) and tokens[j].type in self.t_WS: + j += 1 + if tokens[j].value == '(': + tokcount,args,positions = self.collect_args(tokens[j:]) + if not m.variadic and len(args) != len(m.arglist): + self.error(self.source,t.lineno,"Macro %s requires %d arguments" % (t.value,len(m.arglist))) + i = j + tokcount + elif m.variadic and len(args) < len(m.arglist)-1: + if len(m.arglist) > 2: + self.error(self.source,t.lineno,"Macro %s must have at least %d arguments" % (t.value, len(m.arglist)-1)) + else: + self.error(self.source,t.lineno,"Macro %s must have at least %d argument" % (t.value, len(m.arglist)-1)) + i = j + tokcount + else: + if m.variadic: + if len(args) == len(m.arglist)-1: + args.append([]) + else: + args[len(m.arglist)-1] = tokens[j+positions[len(m.arglist)-1]:j+tokcount-1] + del args[len(m.arglist):] + + # Get macro replacement text + rep = self.macro_expand_args(m,args) + rep = self.expand_macros(rep,expanded) + for r in rep: + r.lineno = t.lineno + tokens[i:j+tokcount] = rep + i += len(rep) + del expanded[t.value] + continue + elif t.value == '__LINE__': + t.type = self.t_INTEGER + t.value = self.t_INTEGER_TYPE(t.lineno) + + i += 1 + return tokens + + # ---------------------------------------------------------------------- + # evalexpr() + # + # Evaluate an expression token sequence for the purposes of evaluating + # integral expressions. + # ---------------------------------------------------------------------- + + def evalexpr(self,tokens): + # tokens = tokenize(line) + # Search for defined macros + i = 0 + while i < len(tokens): + if tokens[i].type == self.t_ID and tokens[i].value == 'defined': + j = i + 1 + needparen = False + result = "0L" + while j < len(tokens): + if tokens[j].type in self.t_WS: + j += 1 + continue + elif tokens[j].type == self.t_ID: + if tokens[j].value in self.macros: + result = "1L" + else: + result = "0L" + if not needparen: break + elif tokens[j].value == '(': + needparen = True + elif tokens[j].value == ')': + break + else: + self.error(self.source,tokens[i].lineno,"Malformed defined()") + j += 1 + tokens[i].type = self.t_INTEGER + tokens[i].value = self.t_INTEGER_TYPE(result) + del tokens[i+1:j+1] + i += 1 + tokens = self.expand_macros(tokens) + for i,t in enumerate(tokens): + if t.type == self.t_ID: + tokens[i] = copy.copy(t) + tokens[i].type = self.t_INTEGER + tokens[i].value = self.t_INTEGER_TYPE("0L") + elif t.type == self.t_INTEGER: + tokens[i] = copy.copy(t) + # Strip off any trailing suffixes + tokens[i].value = str(tokens[i].value) + while tokens[i].value[-1] not in "0123456789abcdefABCDEF": + tokens[i].value = tokens[i].value[:-1] + + expr = "".join([str(x.value) for x in tokens]) + expr = expr.replace("&&"," and ") + expr = expr.replace("||"," or ") + expr = expr.replace("!"," not ") + try: + result = eval(expr) + except StandardError: + self.error(self.source,tokens[0].lineno,"Couldn't evaluate expression") + result = 0 + return result + + # ---------------------------------------------------------------------- + # parsegen() + # + # Parse an input string/ + # ---------------------------------------------------------------------- + def parsegen(self,input,source=None): + + # Replace trigraph sequences + t = trigraph(input) + lines = self.group_lines(t) + + if not source: + source = "" + + self.define("__FILE__ \"%s\"" % source) + + self.source = source + chunk = [] + enable = True + iftrigger = False + ifstack = [] + + for x in lines: + for i,tok in enumerate(x): + if tok.type not in self.t_WS: break + if tok.value == '#': + # Preprocessor directive + + for tok in x: + if tok in self.t_WS and '\n' in tok.value: + chunk.append(tok) + + dirtokens = self.tokenstrip(x[i+1:]) + if dirtokens: + name = dirtokens[0].value + args = self.tokenstrip(dirtokens[1:]) + else: + name = "" + args = [] + + if name == 'define': + if enable: + for tok in self.expand_macros(chunk): + yield tok + chunk = [] + self.define(args) + elif name == 'include': + if enable: + for tok in self.expand_macros(chunk): + yield tok + chunk = [] + oldfile = self.macros['__FILE__'] + for tok in self.include(args): + yield tok + self.macros['__FILE__'] = oldfile + self.source = source + elif name == 'undef': + if enable: + for tok in self.expand_macros(chunk): + yield tok + chunk = [] + self.undef(args) + elif name == 'ifdef': + ifstack.append((enable,iftrigger)) + if enable: + if not args[0].value in self.macros: + enable = False + iftrigger = False + else: + iftrigger = True + elif name == 'ifndef': + ifstack.append((enable,iftrigger)) + if enable: + if args[0].value in self.macros: + enable = False + iftrigger = False + else: + iftrigger = True + elif name == 'if': + ifstack.append((enable,iftrigger)) + if enable: + result = self.evalexpr(args) + if not result: + enable = False + iftrigger = False + else: + iftrigger = True + elif name == 'elif': + if ifstack: + if ifstack[-1][0]: # We only pay attention if outer "if" allows this + if enable: # If already true, we flip enable False + enable = False + elif not iftrigger: # If False, but not triggered yet, we'll check expression + result = self.evalexpr(args) + if result: + enable = True + iftrigger = True + else: + self.error(self.source,dirtokens[0].lineno,"Misplaced #elif") + + elif name == 'else': + if ifstack: + if ifstack[-1][0]: + if enable: + enable = False + elif not iftrigger: + enable = True + iftrigger = True + else: + self.error(self.source,dirtokens[0].lineno,"Misplaced #else") + + elif name == 'endif': + if ifstack: + enable,iftrigger = ifstack.pop() + else: + self.error(self.source,dirtokens[0].lineno,"Misplaced #endif") + else: + # Unknown preprocessor directive + pass + + else: + # Normal text + if enable: + chunk.extend(x) + + for tok in self.expand_macros(chunk): + yield tok + chunk = [] + + # ---------------------------------------------------------------------- + # include() + # + # Implementation of file-inclusion + # ---------------------------------------------------------------------- + + def include(self,tokens): + # Try to extract the filename and then process an include file + if not tokens: + return + if tokens: + if tokens[0].value != '<' and tokens[0].type != self.t_STRING: + tokens = self.expand_macros(tokens) + + if tokens[0].value == '<': + # Include <...> + i = 1 + while i < len(tokens): + if tokens[i].value == '>': + break + i += 1 + else: + print("Malformed #include <...>") + return + filename = "".join([x.value for x in tokens[1:i]]) + path = self.path + [""] + self.temp_path + elif tokens[0].type == self.t_STRING: + filename = tokens[0].value[1:-1] + path = self.temp_path + [""] + self.path + else: + print("Malformed #include statement") + return + for p in path: + iname = os.path.join(p,filename) + try: + data = open(iname,"r").read() + dname = os.path.dirname(iname) + if dname: + self.temp_path.insert(0,dname) + for tok in self.parsegen(data,filename): + yield tok + if dname: + del self.temp_path[0] + break + except IOError: + pass + else: + print("Couldn't find '%s'" % filename) + + # ---------------------------------------------------------------------- + # define() + # + # Define a new macro + # ---------------------------------------------------------------------- + + def define(self,tokens): + if isinstance(tokens,(str,unicode)): + tokens = self.tokenize(tokens) + + linetok = tokens + try: + name = linetok[0] + if len(linetok) > 1: + mtype = linetok[1] + else: + mtype = None + if not mtype: + m = Macro(name.value,[]) + self.macros[name.value] = m + elif mtype.type in self.t_WS: + # A normal macro + m = Macro(name.value,self.tokenstrip(linetok[2:])) + self.macros[name.value] = m + elif mtype.value == '(': + # A macro with arguments + tokcount, args, positions = self.collect_args(linetok[1:]) + variadic = False + for a in args: + if variadic: + print("No more arguments may follow a variadic argument") + break + astr = "".join([str(_i.value) for _i in a]) + if astr == "...": + variadic = True + a[0].type = self.t_ID + a[0].value = '__VA_ARGS__' + variadic = True + del a[1:] + continue + elif astr[-3:] == "..." and a[0].type == self.t_ID: + variadic = True + del a[1:] + # If, for some reason, "." is part of the identifier, strip off the name for the purposes + # of macro expansion + if a[0].value[-3:] == '...': + a[0].value = a[0].value[:-3] + continue + if len(a) > 1 or a[0].type != self.t_ID: + print("Invalid macro argument") + break + else: + mvalue = self.tokenstrip(linetok[1+tokcount:]) + i = 0 + while i < len(mvalue): + if i+1 < len(mvalue): + if mvalue[i].type in self.t_WS and mvalue[i+1].value == '##': + del mvalue[i] + continue + elif mvalue[i].value == '##' and mvalue[i+1].type in self.t_WS: + del mvalue[i+1] + i += 1 + m = Macro(name.value,mvalue,[x[0].value for x in args],variadic) + self.macro_prescan(m) + self.macros[name.value] = m + else: + print("Bad macro definition") + except LookupError: + print("Bad macro definition") + + # ---------------------------------------------------------------------- + # undef() + # + # Undefine a macro + # ---------------------------------------------------------------------- + + def undef(self,tokens): + id = tokens[0].value + try: + del self.macros[id] + except LookupError: + pass + + # ---------------------------------------------------------------------- + # parse() + # + # Parse input text. + # ---------------------------------------------------------------------- + def parse(self,input,source=None,ignore={}): + self.ignore = ignore + self.parser = self.parsegen(input,source) + + # ---------------------------------------------------------------------- + # token() + # + # Method to return individual tokens + # ---------------------------------------------------------------------- + def token(self): + try: + while True: + tok = next(self.parser) + if tok.type not in self.ignore: return tok + except StopIteration: + self.parser = None + return None + +if __name__ == '__main__': + import ply.lex as lex + lexer = lex.lex() + + # Run a preprocessor + import sys + f = open(sys.argv[1]) + input = f.read() + + p = Preprocessor(lexer) + p.parse(input,sys.argv[1]) + while True: + tok = p.token() + if not tok: break + print(p.source, tok) + + + + + + + + + + + diff --git a/lib/python3.4/site-packages/pycparser/ply/ctokens.py b/lib/python3.4/site-packages/pycparser/ply/ctokens.py new file mode 100644 index 0000000..dd5f102 --- /dev/null +++ b/lib/python3.4/site-packages/pycparser/ply/ctokens.py @@ -0,0 +1,133 @@ +# ---------------------------------------------------------------------- +# ctokens.py +# +# Token specifications for symbols in ANSI C and C++. This file is +# meant to be used as a library in other tokenizers. +# ---------------------------------------------------------------------- + +# Reserved words + +tokens = [ + # Literals (identifier, integer constant, float constant, string constant, char const) + 'ID', 'TYPEID', 'ICONST', 'FCONST', 'SCONST', 'CCONST', + + # Operators (+,-,*,/,%,|,&,~,^,<<,>>, ||, &&, !, <, <=, >, >=, ==, !=) + 'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MOD', + 'OR', 'AND', 'NOT', 'XOR', 'LSHIFT', 'RSHIFT', + 'LOR', 'LAND', 'LNOT', + 'LT', 'LE', 'GT', 'GE', 'EQ', 'NE', + + # Assignment (=, *=, /=, %=, +=, -=, <<=, >>=, &=, ^=, |=) + 'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL', 'PLUSEQUAL', 'MINUSEQUAL', + 'LSHIFTEQUAL','RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL', 'OREQUAL', + + # Increment/decrement (++,--) + 'PLUSPLUS', 'MINUSMINUS', + + # Structure dereference (->) + 'ARROW', + + # Ternary operator (?) + 'TERNARY', + + # Delimeters ( ) [ ] { } , . ; : + 'LPAREN', 'RPAREN', + 'LBRACKET', 'RBRACKET', + 'LBRACE', 'RBRACE', + 'COMMA', 'PERIOD', 'SEMI', 'COLON', + + # Ellipsis (...) + 'ELLIPSIS', +] + +# Operators +t_PLUS = r'\+' +t_MINUS = r'-' +t_TIMES = r'\*' +t_DIVIDE = r'/' +t_MODULO = r'%' +t_OR = r'\|' +t_AND = r'&' +t_NOT = r'~' +t_XOR = r'\^' +t_LSHIFT = r'<<' +t_RSHIFT = r'>>' +t_LOR = r'\|\|' +t_LAND = r'&&' +t_LNOT = r'!' +t_LT = r'<' +t_GT = r'>' +t_LE = r'<=' +t_GE = r'>=' +t_EQ = r'==' +t_NE = r'!=' + +# Assignment operators + +t_EQUALS = r'=' +t_TIMESEQUAL = r'\*=' +t_DIVEQUAL = r'/=' +t_MODEQUAL = r'%=' +t_PLUSEQUAL = r'\+=' +t_MINUSEQUAL = r'-=' +t_LSHIFTEQUAL = r'<<=' +t_RSHIFTEQUAL = r'>>=' +t_ANDEQUAL = r'&=' +t_OREQUAL = r'\|=' +t_XOREQUAL = r'^=' + +# Increment/decrement +t_INCREMENT = r'\+\+' +t_DECREMENT = r'--' + +# -> +t_ARROW = r'->' + +# ? +t_TERNARY = r'\?' + +# Delimeters +t_LPAREN = r'\(' +t_RPAREN = r'\)' +t_LBRACKET = r'\[' +t_RBRACKET = r'\]' +t_LBRACE = r'\{' +t_RBRACE = r'\}' +t_COMMA = r',' +t_PERIOD = r'\.' +t_SEMI = r';' +t_COLON = r':' +t_ELLIPSIS = r'\.\.\.' + +# Identifiers +t_ID = r'[A-Za-z_][A-Za-z0-9_]*' + +# Integer literal +t_INTEGER = r'\d+([uU]|[lL]|[uU][lL]|[lL][uU])?' + +# Floating literal +t_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?' + +# String literal +t_STRING = r'\"([^\\\n]|(\\.))*?\"' + +# Character constant 'c' or L'c' +t_CHARACTER = r'(L)?\'([^\\\n]|(\\.))*?\'' + +# Comment (C-Style) +def t_COMMENT(t): + r'/\*(.|\n)*?\*/' + t.lexer.lineno += t.value.count('\n') + return t + +# Comment (C++-Style) +def t_CPPCOMMENT(t): + r'//.*\n' + t.lexer.lineno += 1 + return t + + + + + + diff --git a/lib/python3.4/site-packages/pycparser/ply/lex.py b/lib/python3.4/site-packages/pycparser/ply/lex.py new file mode 100644 index 0000000..bd32da9 --- /dev/null +++ b/lib/python3.4/site-packages/pycparser/ply/lex.py @@ -0,0 +1,1058 @@ +# ----------------------------------------------------------------------------- +# ply: lex.py +# +# Copyright (C) 2001-2011, +# David M. Beazley (Dabeaz LLC) +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# * Neither the name of the David Beazley or Dabeaz LLC may be used to +# endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# ----------------------------------------------------------------------------- + +__version__ = "3.4" +__tabversion__ = "3.2" # Version of table file used + +import re, sys, types, copy, os + +# This tuple contains known string types +try: + # Python 2.6 + StringTypes = (types.StringType, types.UnicodeType) +except AttributeError: + # Python 3.0 + StringTypes = (str, bytes) + +# Extract the code attribute of a function. Different implementations +# are for Python 2/3 compatibility. + +if sys.version_info[0] < 3: + def func_code(f): + return f.func_code +else: + def func_code(f): + return f.__code__ + +# This regular expression is used to match valid token names +_is_identifier = re.compile(r'^[a-zA-Z0-9_]+$') + +# Exception thrown when invalid token encountered and no default error +# handler is defined. + +class LexError(Exception): + def __init__(self,message,s): + self.args = (message,) + self.text = s + +# Token class. This class is used to represent the tokens produced. +class LexToken(object): + def __str__(self): + return "LexToken(%s,%r,%d,%d)" % (self.type,self.value,self.lineno,self.lexpos) + def __repr__(self): + return str(self) + +# This object is a stand-in for a logging object created by the +# logging module. + +class PlyLogger(object): + def __init__(self,f): + self.f = f + def critical(self,msg,*args,**kwargs): + self.f.write((msg % args) + "\n") + + def warning(self,msg,*args,**kwargs): + self.f.write("WARNING: "+ (msg % args) + "\n") + + def error(self,msg,*args,**kwargs): + self.f.write("ERROR: " + (msg % args) + "\n") + + info = critical + debug = critical + +# Null logger is used when no output is generated. Does nothing. +class NullLogger(object): + def __getattribute__(self,name): + return self + def __call__(self,*args,**kwargs): + return self + +# ----------------------------------------------------------------------------- +# === Lexing Engine === +# +# The following Lexer class implements the lexer runtime. There are only +# a few public methods and attributes: +# +# input() - Store a new string in the lexer +# token() - Get the next token +# clone() - Clone the lexer +# +# lineno - Current line number +# lexpos - Current position in the input string +# ----------------------------------------------------------------------------- + +class Lexer: + def __init__(self): + self.lexre = None # Master regular expression. This is a list of + # tuples (re,findex) where re is a compiled + # regular expression and findex is a list + # mapping regex group numbers to rules + self.lexretext = None # Current regular expression strings + self.lexstatere = {} # Dictionary mapping lexer states to master regexs + self.lexstateretext = {} # Dictionary mapping lexer states to regex strings + self.lexstaterenames = {} # Dictionary mapping lexer states to symbol names + self.lexstate = "INITIAL" # Current lexer state + self.lexstatestack = [] # Stack of lexer states + self.lexstateinfo = None # State information + self.lexstateignore = {} # Dictionary of ignored characters for each state + self.lexstateerrorf = {} # Dictionary of error functions for each state + self.lexreflags = 0 # Optional re compile flags + self.lexdata = None # Actual input data (as a string) + self.lexpos = 0 # Current position in input text + self.lexlen = 0 # Length of the input text + self.lexerrorf = None # Error rule (if any) + self.lextokens = None # List of valid tokens + self.lexignore = "" # Ignored characters + self.lexliterals = "" # Literal characters that can be passed through + self.lexmodule = None # Module + self.lineno = 1 # Current line number + self.lexoptimize = 0 # Optimized mode + + def clone(self,object=None): + c = copy.copy(self) + + # If the object parameter has been supplied, it means we are attaching the + # lexer to a new object. In this case, we have to rebind all methods in + # the lexstatere and lexstateerrorf tables. + + if object: + newtab = { } + for key, ritem in self.lexstatere.items(): + newre = [] + for cre, findex in ritem: + newfindex = [] + for f in findex: + if not f or not f[0]: + newfindex.append(f) + continue + newfindex.append((getattr(object,f[0].__name__),f[1])) + newre.append((cre,newfindex)) + newtab[key] = newre + c.lexstatere = newtab + c.lexstateerrorf = { } + for key, ef in self.lexstateerrorf.items(): + c.lexstateerrorf[key] = getattr(object,ef.__name__) + c.lexmodule = object + return c + + # ------------------------------------------------------------ + # writetab() - Write lexer information to a table file + # ------------------------------------------------------------ + def writetab(self,tabfile,outputdir=""): + if isinstance(tabfile,types.ModuleType): + return + basetabfilename = tabfile.split(".")[-1] + filename = os.path.join(outputdir,basetabfilename)+".py" + tf = open(filename,"w") + tf.write("# %s.py. This file automatically created by PLY (version %s). Don't edit!\n" % (tabfile,__version__)) + tf.write("_tabversion = %s\n" % repr(__version__)) + tf.write("_lextokens = %s\n" % repr(self.lextokens)) + tf.write("_lexreflags = %s\n" % repr(self.lexreflags)) + tf.write("_lexliterals = %s\n" % repr(self.lexliterals)) + tf.write("_lexstateinfo = %s\n" % repr(self.lexstateinfo)) + + tabre = { } + # Collect all functions in the initial state + initial = self.lexstatere["INITIAL"] + initialfuncs = [] + for part in initial: + for f in part[1]: + if f and f[0]: + initialfuncs.append(f) + + for key, lre in self.lexstatere.items(): + titem = [] + for i in range(len(lre)): + titem.append((self.lexstateretext[key][i],_funcs_to_names(lre[i][1],self.lexstaterenames[key][i]))) + tabre[key] = titem + + tf.write("_lexstatere = %s\n" % repr(tabre)) + tf.write("_lexstateignore = %s\n" % repr(self.lexstateignore)) + + taberr = { } + for key, ef in self.lexstateerrorf.items(): + if ef: + taberr[key] = ef.__name__ + else: + taberr[key] = None + tf.write("_lexstateerrorf = %s\n" % repr(taberr)) + tf.close() + + # ------------------------------------------------------------ + # readtab() - Read lexer information from a tab file + # ------------------------------------------------------------ + def readtab(self,tabfile,fdict): + if isinstance(tabfile,types.ModuleType): + lextab = tabfile + else: + if sys.version_info[0] < 3: + exec("import %s as lextab" % tabfile) + else: + env = { } + exec("import %s as lextab" % tabfile, env,env) + lextab = env['lextab'] + + if getattr(lextab,"_tabversion","0.0") != __version__: + raise ImportError("Inconsistent PLY version") + + self.lextokens = lextab._lextokens + self.lexreflags = lextab._lexreflags + self.lexliterals = lextab._lexliterals + self.lexstateinfo = lextab._lexstateinfo + self.lexstateignore = lextab._lexstateignore + self.lexstatere = { } + self.lexstateretext = { } + for key,lre in lextab._lexstatere.items(): + titem = [] + txtitem = [] + for i in range(len(lre)): + titem.append((re.compile(lre[i][0],lextab._lexreflags | re.VERBOSE),_names_to_funcs(lre[i][1],fdict))) + txtitem.append(lre[i][0]) + self.lexstatere[key] = titem + self.lexstateretext[key] = txtitem + self.lexstateerrorf = { } + for key,ef in lextab._lexstateerrorf.items(): + self.lexstateerrorf[key] = fdict[ef] + self.begin('INITIAL') + + # ------------------------------------------------------------ + # input() - Push a new string into the lexer + # ------------------------------------------------------------ + def input(self,s): + # Pull off the first character to see if s looks like a string + c = s[:1] + if not isinstance(c,StringTypes): + raise ValueError("Expected a string") + self.lexdata = s + self.lexpos = 0 + self.lexlen = len(s) + + # ------------------------------------------------------------ + # begin() - Changes the lexing state + # ------------------------------------------------------------ + def begin(self,state): + if not state in self.lexstatere: + raise ValueError("Undefined state") + self.lexre = self.lexstatere[state] + self.lexretext = self.lexstateretext[state] + self.lexignore = self.lexstateignore.get(state,"") + self.lexerrorf = self.lexstateerrorf.get(state,None) + self.lexstate = state + + # ------------------------------------------------------------ + # push_state() - Changes the lexing state and saves old on stack + # ------------------------------------------------------------ + def push_state(self,state): + self.lexstatestack.append(self.lexstate) + self.begin(state) + + # ------------------------------------------------------------ + # pop_state() - Restores the previous state + # ------------------------------------------------------------ + def pop_state(self): + self.begin(self.lexstatestack.pop()) + + # ------------------------------------------------------------ + # current_state() - Returns the current lexing state + # ------------------------------------------------------------ + def current_state(self): + return self.lexstate + + # ------------------------------------------------------------ + # skip() - Skip ahead n characters + # ------------------------------------------------------------ + def skip(self,n): + self.lexpos += n + + # ------------------------------------------------------------ + # opttoken() - Return the next token from the Lexer + # + # Note: This function has been carefully implemented to be as fast + # as possible. Don't make changes unless you really know what + # you are doing + # ------------------------------------------------------------ + def token(self): + # Make local copies of frequently referenced attributes + lexpos = self.lexpos + lexlen = self.lexlen + lexignore = self.lexignore + lexdata = self.lexdata + + while lexpos < lexlen: + # This code provides some short-circuit code for whitespace, tabs, and other ignored characters + if lexdata[lexpos] in lexignore: + lexpos += 1 + continue + + # Look for a regular expression match + for lexre,lexindexfunc in self.lexre: + m = lexre.match(lexdata,lexpos) + if not m: continue + + # Create a token for return + tok = LexToken() + tok.value = m.group() + tok.lineno = self.lineno + tok.lexpos = lexpos + + i = m.lastindex + func,tok.type = lexindexfunc[i] + + if not func: + # If no token type was set, it's an ignored token + if tok.type: + self.lexpos = m.end() + return tok + else: + lexpos = m.end() + break + + lexpos = m.end() + + # If token is processed by a function, call it + + tok.lexer = self # Set additional attributes useful in token rules + self.lexmatch = m + self.lexpos = lexpos + + newtok = func(tok) + + # Every function must return a token, if nothing, we just move to next token + if not newtok: + lexpos = self.lexpos # This is here in case user has updated lexpos. + lexignore = self.lexignore # This is here in case there was a state change + break + + # Verify type of the token. If not in the token map, raise an error + if not self.lexoptimize: + if not newtok.type in self.lextokens: + raise LexError("%s:%d: Rule '%s' returned an unknown token type '%s'" % ( + func_code(func).co_filename, func_code(func).co_firstlineno, + func.__name__, newtok.type),lexdata[lexpos:]) + + return newtok + else: + # No match, see if in literals + if lexdata[lexpos] in self.lexliterals: + tok = LexToken() + tok.value = lexdata[lexpos] + tok.lineno = self.lineno + tok.type = tok.value + tok.lexpos = lexpos + self.lexpos = lexpos + 1 + return tok + + # No match. Call t_error() if defined. + if self.lexerrorf: + tok = LexToken() + tok.value = self.lexdata[lexpos:] + tok.lineno = self.lineno + tok.type = "error" + tok.lexer = self + tok.lexpos = lexpos + self.lexpos = lexpos + newtok = self.lexerrorf(tok) + if lexpos == self.lexpos: + # Error method didn't change text position at all. This is an error. + raise LexError("Scanning error. Illegal character '%s'" % (lexdata[lexpos]), lexdata[lexpos:]) + lexpos = self.lexpos + if not newtok: continue + return newtok + + self.lexpos = lexpos + raise LexError("Illegal character '%s' at index %d" % (lexdata[lexpos],lexpos), lexdata[lexpos:]) + + self.lexpos = lexpos + 1 + if self.lexdata is None: + raise RuntimeError("No input string given with input()") + return None + + # Iterator interface + def __iter__(self): + return self + + def next(self): + t = self.token() + if t is None: + raise StopIteration + return t + + __next__ = next + +# ----------------------------------------------------------------------------- +# ==== Lex Builder === +# +# The functions and classes below are used to collect lexing information +# and build a Lexer object from it. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# get_caller_module_dict() +# +# This function returns a dictionary containing all of the symbols defined within +# a caller further down the call stack. This is used to get the environment +# associated with the yacc() call if none was provided. +# ----------------------------------------------------------------------------- + +def get_caller_module_dict(levels): + try: + raise RuntimeError + except RuntimeError: + e,b,t = sys.exc_info() + f = t.tb_frame + while levels > 0: + f = f.f_back + levels -= 1 + ldict = f.f_globals.copy() + if f.f_globals != f.f_locals: + ldict.update(f.f_locals) + + return ldict + +# ----------------------------------------------------------------------------- +# _funcs_to_names() +# +# Given a list of regular expression functions, this converts it to a list +# suitable for output to a table file +# ----------------------------------------------------------------------------- + +def _funcs_to_names(funclist,namelist): + result = [] + for f,name in zip(funclist,namelist): + if f and f[0]: + result.append((name, f[1])) + else: + result.append(f) + return result + +# ----------------------------------------------------------------------------- +# _names_to_funcs() +# +# Given a list of regular expression function names, this converts it back to +# functions. +# ----------------------------------------------------------------------------- + +def _names_to_funcs(namelist,fdict): + result = [] + for n in namelist: + if n and n[0]: + result.append((fdict[n[0]],n[1])) + else: + result.append(n) + return result + +# ----------------------------------------------------------------------------- +# _form_master_re() +# +# This function takes a list of all of the regex components and attempts to +# form the master regular expression. Given limitations in the Python re +# module, it may be necessary to break the master regex into separate expressions. +# ----------------------------------------------------------------------------- + +def _form_master_re(relist,reflags,ldict,toknames): + if not relist: return [] + regex = "|".join(relist) + try: + lexre = re.compile(regex,re.VERBOSE | reflags) + + # Build the index to function map for the matching engine + lexindexfunc = [ None ] * (max(lexre.groupindex.values())+1) + lexindexnames = lexindexfunc[:] + + for f,i in lexre.groupindex.items(): + handle = ldict.get(f,None) + if type(handle) in (types.FunctionType, types.MethodType): + lexindexfunc[i] = (handle,toknames[f]) + lexindexnames[i] = f + elif handle is not None: + lexindexnames[i] = f + if f.find("ignore_") > 0: + lexindexfunc[i] = (None,None) + else: + lexindexfunc[i] = (None, toknames[f]) + + return [(lexre,lexindexfunc)],[regex],[lexindexnames] + except Exception: + m = int(len(relist)/2) + if m == 0: m = 1 + llist, lre, lnames = _form_master_re(relist[:m],reflags,ldict,toknames) + rlist, rre, rnames = _form_master_re(relist[m:],reflags,ldict,toknames) + return llist+rlist, lre+rre, lnames+rnames + +# ----------------------------------------------------------------------------- +# def _statetoken(s,names) +# +# Given a declaration name s of the form "t_" and a dictionary whose keys are +# state names, this function returns a tuple (states,tokenname) where states +# is a tuple of state names and tokenname is the name of the token. For example, +# calling this with s = "t_foo_bar_SPAM" might return (('foo','bar'),'SPAM') +# ----------------------------------------------------------------------------- + +def _statetoken(s,names): + nonstate = 1 + parts = s.split("_") + for i in range(1,len(parts)): + if not parts[i] in names and parts[i] != 'ANY': break + if i > 1: + states = tuple(parts[1:i]) + else: + states = ('INITIAL',) + + if 'ANY' in states: + states = tuple(names) + + tokenname = "_".join(parts[i:]) + return (states,tokenname) + + +# ----------------------------------------------------------------------------- +# LexerReflect() +# +# This class represents information needed to build a lexer as extracted from a +# user's input file. +# ----------------------------------------------------------------------------- +class LexerReflect(object): + def __init__(self,ldict,log=None,reflags=0): + self.ldict = ldict + self.error_func = None + self.tokens = [] + self.reflags = reflags + self.stateinfo = { 'INITIAL' : 'inclusive'} + self.files = {} + self.error = 0 + + if log is None: + self.log = PlyLogger(sys.stderr) + else: + self.log = log + + # Get all of the basic information + def get_all(self): + self.get_tokens() + self.get_literals() + self.get_states() + self.get_rules() + + # Validate all of the information + def validate_all(self): + self.validate_tokens() + self.validate_literals() + self.validate_rules() + return self.error + + # Get the tokens map + def get_tokens(self): + tokens = self.ldict.get("tokens",None) + if not tokens: + self.log.error("No token list is defined") + self.error = 1 + return + + if not isinstance(tokens,(list, tuple)): + self.log.error("tokens must be a list or tuple") + self.error = 1 + return + + if not tokens: + self.log.error("tokens is empty") + self.error = 1 + return + + self.tokens = tokens + + # Validate the tokens + def validate_tokens(self): + terminals = {} + for n in self.tokens: + if not _is_identifier.match(n): + self.log.error("Bad token name '%s'",n) + self.error = 1 + if n in terminals: + self.log.warning("Token '%s' multiply defined", n) + terminals[n] = 1 + + # Get the literals specifier + def get_literals(self): + self.literals = self.ldict.get("literals","") + + # Validate literals + def validate_literals(self): + try: + for c in self.literals: + if not isinstance(c,StringTypes) or len(c) > 1: + self.log.error("Invalid literal %s. Must be a single character", repr(c)) + self.error = 1 + continue + + except TypeError: + self.log.error("Invalid literals specification. literals must be a sequence of characters") + self.error = 1 + + def get_states(self): + self.states = self.ldict.get("states",None) + # Build statemap + if self.states: + if not isinstance(self.states,(tuple,list)): + self.log.error("states must be defined as a tuple or list") + self.error = 1 + else: + for s in self.states: + if not isinstance(s,tuple) or len(s) != 2: + self.log.error("Invalid state specifier %s. Must be a tuple (statename,'exclusive|inclusive')",repr(s)) + self.error = 1 + continue + name, statetype = s + if not isinstance(name,StringTypes): + self.log.error("State name %s must be a string", repr(name)) + self.error = 1 + continue + if not (statetype == 'inclusive' or statetype == 'exclusive'): + self.log.error("State type for state %s must be 'inclusive' or 'exclusive'",name) + self.error = 1 + continue + if name in self.stateinfo: + self.log.error("State '%s' already defined",name) + self.error = 1 + continue + self.stateinfo[name] = statetype + + # Get all of the symbols with a t_ prefix and sort them into various + # categories (functions, strings, error functions, and ignore characters) + + def get_rules(self): + tsymbols = [f for f in self.ldict if f[:2] == 't_' ] + + # Now build up a list of functions and a list of strings + + self.toknames = { } # Mapping of symbols to token names + self.funcsym = { } # Symbols defined as functions + self.strsym = { } # Symbols defined as strings + self.ignore = { } # Ignore strings by state + self.errorf = { } # Error functions by state + + for s in self.stateinfo: + self.funcsym[s] = [] + self.strsym[s] = [] + + if len(tsymbols) == 0: + self.log.error("No rules of the form t_rulename are defined") + self.error = 1 + return + + for f in tsymbols: + t = self.ldict[f] + states, tokname = _statetoken(f,self.stateinfo) + self.toknames[f] = tokname + + if hasattr(t,"__call__"): + if tokname == 'error': + for s in states: + self.errorf[s] = t + elif tokname == 'ignore': + line = func_code(t).co_firstlineno + file = func_code(t).co_filename + self.log.error("%s:%d: Rule '%s' must be defined as a string",file,line,t.__name__) + self.error = 1 + else: + for s in states: + self.funcsym[s].append((f,t)) + elif isinstance(t, StringTypes): + if tokname == 'ignore': + for s in states: + self.ignore[s] = t + if "\\" in t: + self.log.warning("%s contains a literal backslash '\\'",f) + + elif tokname == 'error': + self.log.error("Rule '%s' must be defined as a function", f) + self.error = 1 + else: + for s in states: + self.strsym[s].append((f,t)) + else: + self.log.error("%s not defined as a function or string", f) + self.error = 1 + + # Sort the functions by line number + for f in self.funcsym.values(): + if sys.version_info[0] < 3: + f.sort(lambda x,y: cmp(func_code(x[1]).co_firstlineno,func_code(y[1]).co_firstlineno)) + else: + # Python 3.0 + f.sort(key=lambda x: func_code(x[1]).co_firstlineno) + + # Sort the strings by regular expression length + for s in self.strsym.values(): + if sys.version_info[0] < 3: + s.sort(lambda x,y: (len(x[1]) < len(y[1])) - (len(x[1]) > len(y[1]))) + else: + # Python 3.0 + s.sort(key=lambda x: len(x[1]),reverse=True) + + # Validate all of the t_rules collected + def validate_rules(self): + for state in self.stateinfo: + # Validate all rules defined by functions + + + + for fname, f in self.funcsym[state]: + line = func_code(f).co_firstlineno + file = func_code(f).co_filename + self.files[file] = 1 + + tokname = self.toknames[fname] + if isinstance(f, types.MethodType): + reqargs = 2 + else: + reqargs = 1 + nargs = func_code(f).co_argcount + if nargs > reqargs: + self.log.error("%s:%d: Rule '%s' has too many arguments",file,line,f.__name__) + self.error = 1 + continue + + if nargs < reqargs: + self.log.error("%s:%d: Rule '%s' requires an argument", file,line,f.__name__) + self.error = 1 + continue + + if not f.__doc__: + self.log.error("%s:%d: No regular expression defined for rule '%s'",file,line,f.__name__) + self.error = 1 + continue + + try: + c = re.compile("(?P<%s>%s)" % (fname,f.__doc__), re.VERBOSE | self.reflags) + if c.match(""): + self.log.error("%s:%d: Regular expression for rule '%s' matches empty string", file,line,f.__name__) + self.error = 1 + except re.error: + _etype, e, _etrace = sys.exc_info() + self.log.error("%s:%d: Invalid regular expression for rule '%s'. %s", file,line,f.__name__,e) + if '#' in f.__doc__: + self.log.error("%s:%d. Make sure '#' in rule '%s' is escaped with '\\#'",file,line, f.__name__) + self.error = 1 + + # Validate all rules defined by strings + for name,r in self.strsym[state]: + tokname = self.toknames[name] + if tokname == 'error': + self.log.error("Rule '%s' must be defined as a function", name) + self.error = 1 + continue + + if not tokname in self.tokens and tokname.find("ignore_") < 0: + self.log.error("Rule '%s' defined for an unspecified token %s",name,tokname) + self.error = 1 + continue + + try: + c = re.compile("(?P<%s>%s)" % (name,r),re.VERBOSE | self.reflags) + if (c.match("")): + self.log.error("Regular expression for rule '%s' matches empty string",name) + self.error = 1 + except re.error: + _etype, e, _etrace = sys.exc_info() + self.log.error("Invalid regular expression for rule '%s'. %s",name,e) + if '#' in r: + self.log.error("Make sure '#' in rule '%s' is escaped with '\\#'",name) + self.error = 1 + + if not self.funcsym[state] and not self.strsym[state]: + self.log.error("No rules defined for state '%s'",state) + self.error = 1 + + # Validate the error function + efunc = self.errorf.get(state,None) + if efunc: + f = efunc + line = func_code(f).co_firstlineno + file = func_code(f).co_filename + self.files[file] = 1 + + if isinstance(f, types.MethodType): + reqargs = 2 + else: + reqargs = 1 + nargs = func_code(f).co_argcount + if nargs > reqargs: + self.log.error("%s:%d: Rule '%s' has too many arguments",file,line,f.__name__) + self.error = 1 + + if nargs < reqargs: + self.log.error("%s:%d: Rule '%s' requires an argument", file,line,f.__name__) + self.error = 1 + + for f in self.files: + self.validate_file(f) + + + # ----------------------------------------------------------------------------- + # validate_file() + # + # This checks to see if there are duplicated t_rulename() functions or strings + # in the parser input file. This is done using a simple regular expression + # match on each line in the given file. + # ----------------------------------------------------------------------------- + + def validate_file(self,filename): + import os.path + base,ext = os.path.splitext(filename) + if ext != '.py': return # No idea what the file is. Return OK + + try: + f = open(filename) + lines = f.readlines() + f.close() + except IOError: + return # Couldn't find the file. Don't worry about it + + fre = re.compile(r'\s*def\s+(t_[a-zA-Z_0-9]*)\(') + sre = re.compile(r'\s*(t_[a-zA-Z_0-9]*)\s*=') + + counthash = { } + linen = 1 + for l in lines: + m = fre.match(l) + if not m: + m = sre.match(l) + if m: + name = m.group(1) + prev = counthash.get(name) + if not prev: + counthash[name] = linen + else: + self.log.error("%s:%d: Rule %s redefined. Previously defined on line %d",filename,linen,name,prev) + self.error = 1 + linen += 1 + +# ----------------------------------------------------------------------------- +# lex(module) +# +# Build all of the regular expression rules from definitions in the supplied module +# ----------------------------------------------------------------------------- +def lex(module=None,object=None,debug=0,optimize=0,lextab="lextab",reflags=0,nowarn=0,outputdir="", debuglog=None, errorlog=None): + global lexer + ldict = None + stateinfo = { 'INITIAL' : 'inclusive'} + lexobj = Lexer() + lexobj.lexoptimize = optimize + global token,input + + if errorlog is None: + errorlog = PlyLogger(sys.stderr) + + if debug: + if debuglog is None: + debuglog = PlyLogger(sys.stderr) + + # Get the module dictionary used for the lexer + if object: module = object + + if module: + _items = [(k,getattr(module,k)) for k in dir(module)] + ldict = dict(_items) + else: + ldict = get_caller_module_dict(2) + + # Collect parser information from the dictionary + linfo = LexerReflect(ldict,log=errorlog,reflags=reflags) + linfo.get_all() + if not optimize: + if linfo.validate_all(): + raise SyntaxError("Can't build lexer") + + if optimize and lextab: + try: + lexobj.readtab(lextab,ldict) + token = lexobj.token + input = lexobj.input + lexer = lexobj + return lexobj + + except ImportError: + pass + + # Dump some basic debugging information + if debug: + debuglog.info("lex: tokens = %r", linfo.tokens) + debuglog.info("lex: literals = %r", linfo.literals) + debuglog.info("lex: states = %r", linfo.stateinfo) + + # Build a dictionary of valid token names + lexobj.lextokens = { } + for n in linfo.tokens: + lexobj.lextokens[n] = 1 + + # Get literals specification + if isinstance(linfo.literals,(list,tuple)): + lexobj.lexliterals = type(linfo.literals[0])().join(linfo.literals) + else: + lexobj.lexliterals = linfo.literals + + # Get the stateinfo dictionary + stateinfo = linfo.stateinfo + + regexs = { } + # Build the master regular expressions + for state in stateinfo: + regex_list = [] + + # Add rules defined by functions first + for fname, f in linfo.funcsym[state]: + line = func_code(f).co_firstlineno + file = func_code(f).co_filename + regex_list.append("(?P<%s>%s)" % (fname,f.__doc__)) + if debug: + debuglog.info("lex: Adding rule %s -> '%s' (state '%s')",fname,f.__doc__, state) + + # Now add all of the simple rules + for name,r in linfo.strsym[state]: + regex_list.append("(?P<%s>%s)" % (name,r)) + if debug: + debuglog.info("lex: Adding rule %s -> '%s' (state '%s')",name,r, state) + + regexs[state] = regex_list + + # Build the master regular expressions + + if debug: + debuglog.info("lex: ==== MASTER REGEXS FOLLOW ====") + + for state in regexs: + lexre, re_text, re_names = _form_master_re(regexs[state],reflags,ldict,linfo.toknames) + lexobj.lexstatere[state] = lexre + lexobj.lexstateretext[state] = re_text + lexobj.lexstaterenames[state] = re_names + if debug: + for i in range(len(re_text)): + debuglog.info("lex: state '%s' : regex[%d] = '%s'",state, i, re_text[i]) + + # For inclusive states, we need to add the regular expressions from the INITIAL state + for state,stype in stateinfo.items(): + if state != "INITIAL" and stype == 'inclusive': + lexobj.lexstatere[state].extend(lexobj.lexstatere['INITIAL']) + lexobj.lexstateretext[state].extend(lexobj.lexstateretext['INITIAL']) + lexobj.lexstaterenames[state].extend(lexobj.lexstaterenames['INITIAL']) + + lexobj.lexstateinfo = stateinfo + lexobj.lexre = lexobj.lexstatere["INITIAL"] + lexobj.lexretext = lexobj.lexstateretext["INITIAL"] + lexobj.lexreflags = reflags + + # Set up ignore variables + lexobj.lexstateignore = linfo.ignore + lexobj.lexignore = lexobj.lexstateignore.get("INITIAL","") + + # Set up error functions + lexobj.lexstateerrorf = linfo.errorf + lexobj.lexerrorf = linfo.errorf.get("INITIAL",None) + if not lexobj.lexerrorf: + errorlog.warning("No t_error rule is defined") + + # Check state information for ignore and error rules + for s,stype in stateinfo.items(): + if stype == 'exclusive': + if not s in linfo.errorf: + errorlog.warning("No error rule is defined for exclusive state '%s'", s) + if not s in linfo.ignore and lexobj.lexignore: + errorlog.warning("No ignore rule is defined for exclusive state '%s'", s) + elif stype == 'inclusive': + if not s in linfo.errorf: + linfo.errorf[s] = linfo.errorf.get("INITIAL",None) + if not s in linfo.ignore: + linfo.ignore[s] = linfo.ignore.get("INITIAL","") + + # Create global versions of the token() and input() functions + token = lexobj.token + input = lexobj.input + lexer = lexobj + + # If in optimize mode, we write the lextab + if lextab and optimize: + lexobj.writetab(lextab,outputdir) + + return lexobj + +# ----------------------------------------------------------------------------- +# runmain() +# +# This runs the lexer as a main program +# ----------------------------------------------------------------------------- + +def runmain(lexer=None,data=None): + if not data: + try: + filename = sys.argv[1] + f = open(filename) + data = f.read() + f.close() + except IndexError: + sys.stdout.write("Reading from standard input (type EOF to end):\n") + data = sys.stdin.read() + + if lexer: + _input = lexer.input + else: + _input = input + _input(data) + if lexer: + _token = lexer.token + else: + _token = token + + while 1: + tok = _token() + if not tok: break + sys.stdout.write("(%s,%r,%d,%d)\n" % (tok.type, tok.value, tok.lineno,tok.lexpos)) + +# ----------------------------------------------------------------------------- +# @TOKEN(regex) +# +# This decorator function can be used to set the regex expression on a function +# when its docstring might need to be set in an alternative way +# ----------------------------------------------------------------------------- + +def TOKEN(r): + def set_doc(f): + if hasattr(r,"__call__"): + f.__doc__ = r.__doc__ + else: + f.__doc__ = r + return f + return set_doc + +# Alternative spelling of the TOKEN decorator +Token = TOKEN + diff --git a/lib/python3.4/site-packages/pycparser/ply/yacc.py b/lib/python3.4/site-packages/pycparser/ply/yacc.py new file mode 100644 index 0000000..f70439e --- /dev/null +++ b/lib/python3.4/site-packages/pycparser/ply/yacc.py @@ -0,0 +1,3276 @@ +# ----------------------------------------------------------------------------- +# ply: yacc.py +# +# Copyright (C) 2001-2011, +# David M. Beazley (Dabeaz LLC) +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# * Neither the name of the David Beazley or Dabeaz LLC may be used to +# endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# ----------------------------------------------------------------------------- +# +# This implements an LR parser that is constructed from grammar rules defined +# as Python functions. The grammer is specified by supplying the BNF inside +# Python documentation strings. The inspiration for this technique was borrowed +# from John Aycock's Spark parsing system. PLY might be viewed as cross between +# Spark and the GNU bison utility. +# +# The current implementation is only somewhat object-oriented. The +# LR parser itself is defined in terms of an object (which allows multiple +# parsers to co-exist). However, most of the variables used during table +# construction are defined in terms of global variables. Users shouldn't +# notice unless they are trying to define multiple parsers at the same +# time using threads (in which case they should have their head examined). +# +# This implementation supports both SLR and LALR(1) parsing. LALR(1) +# support was originally implemented by Elias Ioup (ezioup@alumni.uchicago.edu), +# using the algorithm found in Aho, Sethi, and Ullman "Compilers: Principles, +# Techniques, and Tools" (The Dragon Book). LALR(1) has since been replaced +# by the more efficient DeRemer and Pennello algorithm. +# +# :::::::: WARNING ::::::: +# +# Construction of LR parsing tables is fairly complicated and expensive. +# To make this module run fast, a *LOT* of work has been put into +# optimization---often at the expensive of readability and what might +# consider to be good Python "coding style." Modify the code at your +# own risk! +# ---------------------------------------------------------------------------- + +__version__ = "3.4" +__tabversion__ = "3.2" # Table version + +#----------------------------------------------------------------------------- +# === User configurable parameters === +# +# Change these to modify the default behavior of yacc (if you wish) +#----------------------------------------------------------------------------- + +yaccdebug = 1 # Debugging mode. If set, yacc generates a + # a 'parser.out' file in the current directory + +debug_file = 'parser.out' # Default name of the debugging file +tab_module = 'parsetab' # Default name of the table module +default_lr = 'LALR' # Default LR table generation method + +error_count = 3 # Number of symbols that must be shifted to leave recovery mode + +yaccdevel = 0 # Set to True if developing yacc. This turns off optimized + # implementations of certain functions. + +resultlimit = 40 # Size limit of results when running in debug mode. + +pickle_protocol = 0 # Protocol to use when writing pickle files + +import re, types, sys, os.path + +# Compatibility function for python 2.6/3.0 +if sys.version_info[0] < 3: + def func_code(f): + return f.func_code +else: + def func_code(f): + return f.__code__ + +# Compatibility +try: + MAXINT = sys.maxint +except AttributeError: + MAXINT = sys.maxsize + +# Python 2.x/3.0 compatibility. +def load_ply_lex(): + if sys.version_info[0] < 3: + import lex + else: + import ply.lex as lex + return lex + +# This object is a stand-in for a logging object created by the +# logging module. PLY will use this by default to create things +# such as the parser.out file. If a user wants more detailed +# information, they can create their own logging object and pass +# it into PLY. + +class PlyLogger(object): + def __init__(self,f): + self.f = f + def debug(self,msg,*args,**kwargs): + self.f.write((msg % args) + "\n") + info = debug + + def warning(self,msg,*args,**kwargs): + self.f.write("WARNING: "+ (msg % args) + "\n") + + def error(self,msg,*args,**kwargs): + self.f.write("ERROR: " + (msg % args) + "\n") + + critical = debug + +# Null logger is used when no output is generated. Does nothing. +class NullLogger(object): + def __getattribute__(self,name): + return self + def __call__(self,*args,**kwargs): + return self + +# Exception raised for yacc-related errors +class YaccError(Exception): pass + +# Format the result message that the parser produces when running in debug mode. +def format_result(r): + repr_str = repr(r) + if '\n' in repr_str: repr_str = repr(repr_str) + if len(repr_str) > resultlimit: + repr_str = repr_str[:resultlimit]+" ..." + result = "<%s @ 0x%x> (%s)" % (type(r).__name__,id(r),repr_str) + return result + + +# Format stack entries when the parser is running in debug mode +def format_stack_entry(r): + repr_str = repr(r) + if '\n' in repr_str: repr_str = repr(repr_str) + if len(repr_str) < 16: + return repr_str + else: + return "<%s @ 0x%x>" % (type(r).__name__,id(r)) + +#----------------------------------------------------------------------------- +# === LR Parsing Engine === +# +# The following classes are used for the LR parser itself. These are not +# used during table construction and are independent of the actual LR +# table generation algorithm +#----------------------------------------------------------------------------- + +# This class is used to hold non-terminal grammar symbols during parsing. +# It normally has the following attributes set: +# .type = Grammar symbol type +# .value = Symbol value +# .lineno = Starting line number +# .endlineno = Ending line number (optional, set automatically) +# .lexpos = Starting lex position +# .endlexpos = Ending lex position (optional, set automatically) + +class YaccSymbol: + def __str__(self): return self.type + def __repr__(self): return str(self) + +# This class is a wrapper around the objects actually passed to each +# grammar rule. Index lookup and assignment actually assign the +# .value attribute of the underlying YaccSymbol object. +# The lineno() method returns the line number of a given +# item (or 0 if not defined). The linespan() method returns +# a tuple of (startline,endline) representing the range of lines +# for a symbol. The lexspan() method returns a tuple (lexpos,endlexpos) +# representing the range of positional information for a symbol. + +class YaccProduction: + def __init__(self,s,stack=None): + self.slice = s + self.stack = stack + self.lexer = None + self.parser= None + def __getitem__(self,n): + if n >= 0: return self.slice[n].value + else: return self.stack[n].value + + def __setitem__(self,n,v): + self.slice[n].value = v + + def __getslice__(self,i,j): + return [s.value for s in self.slice[i:j]] + + def __len__(self): + return len(self.slice) + + def lineno(self,n): + return getattr(self.slice[n],"lineno",0) + + def set_lineno(self,n,lineno): + self.slice[n].lineno = lineno + + def linespan(self,n): + startline = getattr(self.slice[n],"lineno",0) + endline = getattr(self.slice[n],"endlineno",startline) + return startline,endline + + def lexpos(self,n): + return getattr(self.slice[n],"lexpos",0) + + def lexspan(self,n): + startpos = getattr(self.slice[n],"lexpos",0) + endpos = getattr(self.slice[n],"endlexpos",startpos) + return startpos,endpos + + def error(self): + raise SyntaxError + + +# ----------------------------------------------------------------------------- +# == LRParser == +# +# The LR Parsing engine. +# ----------------------------------------------------------------------------- + +class LRParser: + def __init__(self,lrtab,errorf): + self.productions = lrtab.lr_productions + self.action = lrtab.lr_action + self.goto = lrtab.lr_goto + self.errorfunc = errorf + + def errok(self): + self.errorok = 1 + + def restart(self): + del self.statestack[:] + del self.symstack[:] + sym = YaccSymbol() + sym.type = '$end' + self.symstack.append(sym) + self.statestack.append(0) + + def parse(self,input=None,lexer=None,debug=0,tracking=0,tokenfunc=None): + if debug or yaccdevel: + if isinstance(debug,int): + debug = PlyLogger(sys.stderr) + return self.parsedebug(input,lexer,debug,tracking,tokenfunc) + elif tracking: + return self.parseopt(input,lexer,debug,tracking,tokenfunc) + else: + return self.parseopt_notrack(input,lexer,debug,tracking,tokenfunc) + + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # parsedebug(). + # + # This is the debugging enabled version of parse(). All changes made to the + # parsing engine should be made here. For the non-debugging version, + # copy this code to a method parseopt() and delete all of the sections + # enclosed in: + # + # #--! DEBUG + # statements + # #--! DEBUG + # + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + def parsedebug(self,input=None,lexer=None,debug=None,tracking=0,tokenfunc=None): + lookahead = None # Current lookahead symbol + lookaheadstack = [ ] # Stack of lookahead symbols + actions = self.action # Local reference to action table (to avoid lookup on self.) + goto = self.goto # Local reference to goto table (to avoid lookup on self.) + prod = self.productions # Local reference to production list (to avoid lookup on self.) + pslice = YaccProduction(None) # Production object passed to grammar rules + errorcount = 0 # Used during error recovery + + # --! DEBUG + debug.info("PLY: PARSE DEBUG START") + # --! DEBUG + + # If no lexer was given, we will try to use the lex module + if not lexer: + lex = load_ply_lex() + lexer = lex.lexer + + # Set up the lexer and parser objects on pslice + pslice.lexer = lexer + pslice.parser = self + + # If input was supplied, pass to lexer + if input is not None: + lexer.input(input) + + if tokenfunc is None: + # Tokenize function + get_token = lexer.token + else: + get_token = tokenfunc + + # Set up the state and symbol stacks + + statestack = [ ] # Stack of parsing states + self.statestack = statestack + symstack = [ ] # Stack of grammar symbols + self.symstack = symstack + + pslice.stack = symstack # Put in the production + errtoken = None # Err token + + # The start state is assumed to be (0,$end) + + statestack.append(0) + sym = YaccSymbol() + sym.type = "$end" + symstack.append(sym) + state = 0 + while 1: + # Get the next symbol on the input. If a lookahead symbol + # is already set, we just use that. Otherwise, we'll pull + # the next token off of the lookaheadstack or from the lexer + + # --! DEBUG + debug.debug('') + debug.debug('State : %s', state) + # --! DEBUG + + if not lookahead: + if not lookaheadstack: + lookahead = get_token() # Get the next token + else: + lookahead = lookaheadstack.pop() + if not lookahead: + lookahead = YaccSymbol() + lookahead.type = "$end" + + # --! DEBUG + debug.debug('Stack : %s', + ("%s . %s" % (" ".join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip()) + # --! DEBUG + + # Check the action table + ltype = lookahead.type + t = actions[state].get(ltype) + + if t is not None: + if t > 0: + # shift a symbol on the stack + statestack.append(t) + state = t + + # --! DEBUG + debug.debug("Action : Shift and goto state %s", t) + # --! DEBUG + + symstack.append(lookahead) + lookahead = None + + # Decrease error count on successful shift + if errorcount: errorcount -=1 + continue + + if t < 0: + # reduce a symbol on the stack, emit a production + p = prod[-t] + pname = p.name + plen = p.len + + # Get production function + sym = YaccSymbol() + sym.type = pname # Production name + sym.value = None + + # --! DEBUG + if plen: + debug.info("Action : Reduce rule [%s] with %s and goto state %d", p.str, "["+",".join([format_stack_entry(_v.value) for _v in symstack[-plen:]])+"]",-t) + else: + debug.info("Action : Reduce rule [%s] with %s and goto state %d", p.str, [],-t) + + # --! DEBUG + + if plen: + targ = symstack[-plen-1:] + targ[0] = sym + + # --! TRACKING + if tracking: + t1 = targ[1] + sym.lineno = t1.lineno + sym.lexpos = t1.lexpos + t1 = targ[-1] + sym.endlineno = getattr(t1,"endlineno",t1.lineno) + sym.endlexpos = getattr(t1,"endlexpos",t1.lexpos) + + # --! TRACKING + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # The code enclosed in this section is duplicated + # below as a performance optimization. Make sure + # changes get made in both locations. + + pslice.slice = targ + + try: + # Call the grammar rule with our special slice object + del symstack[-plen:] + del statestack[-plen:] + p.callable(pslice) + # --! DEBUG + debug.info("Result : %s", format_result(pslice[0])) + # --! DEBUG + symstack.append(sym) + state = goto[statestack[-1]][pname] + statestack.append(state) + except SyntaxError: + # If an error was set. Enter error recovery state + lookaheadstack.append(lookahead) + symstack.pop() + statestack.pop() + state = statestack[-1] + sym.type = 'error' + lookahead = sym + errorcount = error_count + self.errorok = 0 + continue + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + else: + + # --! TRACKING + if tracking: + sym.lineno = lexer.lineno + sym.lexpos = lexer.lexpos + # --! TRACKING + + targ = [ sym ] + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # The code enclosed in this section is duplicated + # above as a performance optimization. Make sure + # changes get made in both locations. + + pslice.slice = targ + + try: + # Call the grammar rule with our special slice object + p.callable(pslice) + # --! DEBUG + debug.info("Result : %s", format_result(pslice[0])) + # --! DEBUG + symstack.append(sym) + state = goto[statestack[-1]][pname] + statestack.append(state) + except SyntaxError: + # If an error was set. Enter error recovery state + lookaheadstack.append(lookahead) + symstack.pop() + statestack.pop() + state = statestack[-1] + sym.type = 'error' + lookahead = sym + errorcount = error_count + self.errorok = 0 + continue + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + if t == 0: + n = symstack[-1] + result = getattr(n,"value",None) + # --! DEBUG + debug.info("Done : Returning %s", format_result(result)) + debug.info("PLY: PARSE DEBUG END") + # --! DEBUG + return result + + if t == None: + + # --! DEBUG + debug.error('Error : %s', + ("%s . %s" % (" ".join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip()) + # --! DEBUG + + # We have some kind of parsing error here. To handle + # this, we are going to push the current token onto + # the tokenstack and replace it with an 'error' token. + # If there are any synchronization rules, they may + # catch it. + # + # In addition to pushing the error token, we call call + # the user defined p_error() function if this is the + # first syntax error. This function is only called if + # errorcount == 0. + if errorcount == 0 or self.errorok: + errorcount = error_count + self.errorok = 0 + errtoken = lookahead + if errtoken.type == "$end": + errtoken = None # End of file! + if self.errorfunc: + global errok,token,restart + errok = self.errok # Set some special functions available in error recovery + token = get_token + restart = self.restart + if errtoken and not hasattr(errtoken,'lexer'): + errtoken.lexer = lexer + tok = self.errorfunc(errtoken) + del errok, token, restart # Delete special functions + + if self.errorok: + # User must have done some kind of panic + # mode recovery on their own. The + # returned token is the next lookahead + lookahead = tok + errtoken = None + continue + else: + if errtoken: + if hasattr(errtoken,"lineno"): lineno = lookahead.lineno + else: lineno = 0 + if lineno: + sys.stderr.write("yacc: Syntax error at line %d, token=%s\n" % (lineno, errtoken.type)) + else: + sys.stderr.write("yacc: Syntax error, token=%s" % errtoken.type) + else: + sys.stderr.write("yacc: Parse error in input. EOF\n") + return + + else: + errorcount = error_count + + # case 1: the statestack only has 1 entry on it. If we're in this state, the + # entire parse has been rolled back and we're completely hosed. The token is + # discarded and we just keep going. + + if len(statestack) <= 1 and lookahead.type != "$end": + lookahead = None + errtoken = None + state = 0 + # Nuke the pushback stack + del lookaheadstack[:] + continue + + # case 2: the statestack has a couple of entries on it, but we're + # at the end of the file. nuke the top entry and generate an error token + + # Start nuking entries on the stack + if lookahead.type == "$end": + # Whoa. We're really hosed here. Bail out + return + + if lookahead.type != 'error': + sym = symstack[-1] + if sym.type == 'error': + # Hmmm. Error is on top of stack, we'll just nuke input + # symbol and continue + lookahead = None + continue + t = YaccSymbol() + t.type = 'error' + if hasattr(lookahead,"lineno"): + t.lineno = lookahead.lineno + t.value = lookahead + lookaheadstack.append(lookahead) + lookahead = t + else: + symstack.pop() + statestack.pop() + state = statestack[-1] # Potential bug fix + + continue + + # Call an error function here + raise RuntimeError("yacc: internal parser error!!!\n") + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # parseopt(). + # + # Optimized version of parse() method. DO NOT EDIT THIS CODE DIRECTLY. + # Edit the debug version above, then copy any modifications to the method + # below while removing #--! DEBUG sections. + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + + def parseopt(self,input=None,lexer=None,debug=0,tracking=0,tokenfunc=None): + lookahead = None # Current lookahead symbol + lookaheadstack = [ ] # Stack of lookahead symbols + actions = self.action # Local reference to action table (to avoid lookup on self.) + goto = self.goto # Local reference to goto table (to avoid lookup on self.) + prod = self.productions # Local reference to production list (to avoid lookup on self.) + pslice = YaccProduction(None) # Production object passed to grammar rules + errorcount = 0 # Used during error recovery + + # If no lexer was given, we will try to use the lex module + if not lexer: + lex = load_ply_lex() + lexer = lex.lexer + + # Set up the lexer and parser objects on pslice + pslice.lexer = lexer + pslice.parser = self + + # If input was supplied, pass to lexer + if input is not None: + lexer.input(input) + + if tokenfunc is None: + # Tokenize function + get_token = lexer.token + else: + get_token = tokenfunc + + # Set up the state and symbol stacks + + statestack = [ ] # Stack of parsing states + self.statestack = statestack + symstack = [ ] # Stack of grammar symbols + self.symstack = symstack + + pslice.stack = symstack # Put in the production + errtoken = None # Err token + + # The start state is assumed to be (0,$end) + + statestack.append(0) + sym = YaccSymbol() + sym.type = '$end' + symstack.append(sym) + state = 0 + while 1: + # Get the next symbol on the input. If a lookahead symbol + # is already set, we just use that. Otherwise, we'll pull + # the next token off of the lookaheadstack or from the lexer + + if not lookahead: + if not lookaheadstack: + lookahead = get_token() # Get the next token + else: + lookahead = lookaheadstack.pop() + if not lookahead: + lookahead = YaccSymbol() + lookahead.type = '$end' + + # Check the action table + ltype = lookahead.type + t = actions[state].get(ltype) + + if t is not None: + if t > 0: + # shift a symbol on the stack + statestack.append(t) + state = t + + symstack.append(lookahead) + lookahead = None + + # Decrease error count on successful shift + if errorcount: errorcount -=1 + continue + + if t < 0: + # reduce a symbol on the stack, emit a production + p = prod[-t] + pname = p.name + plen = p.len + + # Get production function + sym = YaccSymbol() + sym.type = pname # Production name + sym.value = None + + if plen: + targ = symstack[-plen-1:] + targ[0] = sym + + # --! TRACKING + if tracking: + t1 = targ[1] + sym.lineno = t1.lineno + sym.lexpos = t1.lexpos + t1 = targ[-1] + sym.endlineno = getattr(t1,"endlineno",t1.lineno) + sym.endlexpos = getattr(t1,"endlexpos",t1.lexpos) + + # --! TRACKING + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # The code enclosed in this section is duplicated + # below as a performance optimization. Make sure + # changes get made in both locations. + + pslice.slice = targ + + try: + # Call the grammar rule with our special slice object + del symstack[-plen:] + del statestack[-plen:] + p.callable(pslice) + symstack.append(sym) + state = goto[statestack[-1]][pname] + statestack.append(state) + except SyntaxError: + # If an error was set. Enter error recovery state + lookaheadstack.append(lookahead) + symstack.pop() + statestack.pop() + state = statestack[-1] + sym.type = 'error' + lookahead = sym + errorcount = error_count + self.errorok = 0 + continue + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + else: + + # --! TRACKING + if tracking: + sym.lineno = lexer.lineno + sym.lexpos = lexer.lexpos + # --! TRACKING + + targ = [ sym ] + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # The code enclosed in this section is duplicated + # above as a performance optimization. Make sure + # changes get made in both locations. + + pslice.slice = targ + + try: + # Call the grammar rule with our special slice object + p.callable(pslice) + symstack.append(sym) + state = goto[statestack[-1]][pname] + statestack.append(state) + except SyntaxError: + # If an error was set. Enter error recovery state + lookaheadstack.append(lookahead) + symstack.pop() + statestack.pop() + state = statestack[-1] + sym.type = 'error' + lookahead = sym + errorcount = error_count + self.errorok = 0 + continue + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + if t == 0: + n = symstack[-1] + return getattr(n,"value",None) + + if t == None: + + # We have some kind of parsing error here. To handle + # this, we are going to push the current token onto + # the tokenstack and replace it with an 'error' token. + # If there are any synchronization rules, they may + # catch it. + # + # In addition to pushing the error token, we call call + # the user defined p_error() function if this is the + # first syntax error. This function is only called if + # errorcount == 0. + if errorcount == 0 or self.errorok: + errorcount = error_count + self.errorok = 0 + errtoken = lookahead + if errtoken.type == '$end': + errtoken = None # End of file! + if self.errorfunc: + global errok,token,restart + errok = self.errok # Set some special functions available in error recovery + token = get_token + restart = self.restart + if errtoken and not hasattr(errtoken,'lexer'): + errtoken.lexer = lexer + tok = self.errorfunc(errtoken) + del errok, token, restart # Delete special functions + + if self.errorok: + # User must have done some kind of panic + # mode recovery on their own. The + # returned token is the next lookahead + lookahead = tok + errtoken = None + continue + else: + if errtoken: + if hasattr(errtoken,"lineno"): lineno = lookahead.lineno + else: lineno = 0 + if lineno: + sys.stderr.write("yacc: Syntax error at line %d, token=%s\n" % (lineno, errtoken.type)) + else: + sys.stderr.write("yacc: Syntax error, token=%s" % errtoken.type) + else: + sys.stderr.write("yacc: Parse error in input. EOF\n") + return + + else: + errorcount = error_count + + # case 1: the statestack only has 1 entry on it. If we're in this state, the + # entire parse has been rolled back and we're completely hosed. The token is + # discarded and we just keep going. + + if len(statestack) <= 1 and lookahead.type != '$end': + lookahead = None + errtoken = None + state = 0 + # Nuke the pushback stack + del lookaheadstack[:] + continue + + # case 2: the statestack has a couple of entries on it, but we're + # at the end of the file. nuke the top entry and generate an error token + + # Start nuking entries on the stack + if lookahead.type == '$end': + # Whoa. We're really hosed here. Bail out + return + + if lookahead.type != 'error': + sym = symstack[-1] + if sym.type == 'error': + # Hmmm. Error is on top of stack, we'll just nuke input + # symbol and continue + lookahead = None + continue + t = YaccSymbol() + t.type = 'error' + if hasattr(lookahead,"lineno"): + t.lineno = lookahead.lineno + t.value = lookahead + lookaheadstack.append(lookahead) + lookahead = t + else: + symstack.pop() + statestack.pop() + state = statestack[-1] # Potential bug fix + + continue + + # Call an error function here + raise RuntimeError("yacc: internal parser error!!!\n") + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # parseopt_notrack(). + # + # Optimized version of parseopt() with line number tracking removed. + # DO NOT EDIT THIS CODE DIRECTLY. Copy the optimized version and remove + # code in the #--! TRACKING sections + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + def parseopt_notrack(self,input=None,lexer=None,debug=0,tracking=0,tokenfunc=None): + lookahead = None # Current lookahead symbol + lookaheadstack = [ ] # Stack of lookahead symbols + actions = self.action # Local reference to action table (to avoid lookup on self.) + goto = self.goto # Local reference to goto table (to avoid lookup on self.) + prod = self.productions # Local reference to production list (to avoid lookup on self.) + pslice = YaccProduction(None) # Production object passed to grammar rules + errorcount = 0 # Used during error recovery + + # If no lexer was given, we will try to use the lex module + if not lexer: + lex = load_ply_lex() + lexer = lex.lexer + + # Set up the lexer and parser objects on pslice + pslice.lexer = lexer + pslice.parser = self + + # If input was supplied, pass to lexer + if input is not None: + lexer.input(input) + + if tokenfunc is None: + # Tokenize function + get_token = lexer.token + else: + get_token = tokenfunc + + # Set up the state and symbol stacks + + statestack = [ ] # Stack of parsing states + self.statestack = statestack + symstack = [ ] # Stack of grammar symbols + self.symstack = symstack + + pslice.stack = symstack # Put in the production + errtoken = None # Err token + + # The start state is assumed to be (0,$end) + + statestack.append(0) + sym = YaccSymbol() + sym.type = '$end' + symstack.append(sym) + state = 0 + while 1: + # Get the next symbol on the input. If a lookahead symbol + # is already set, we just use that. Otherwise, we'll pull + # the next token off of the lookaheadstack or from the lexer + + if not lookahead: + if not lookaheadstack: + lookahead = get_token() # Get the next token + else: + lookahead = lookaheadstack.pop() + if not lookahead: + lookahead = YaccSymbol() + lookahead.type = '$end' + + # Check the action table + ltype = lookahead.type + t = actions[state].get(ltype) + + if t is not None: + if t > 0: + # shift a symbol on the stack + statestack.append(t) + state = t + + symstack.append(lookahead) + lookahead = None + + # Decrease error count on successful shift + if errorcount: errorcount -=1 + continue + + if t < 0: + # reduce a symbol on the stack, emit a production + p = prod[-t] + pname = p.name + plen = p.len + + # Get production function + sym = YaccSymbol() + sym.type = pname # Production name + sym.value = None + + if plen: + targ = symstack[-plen-1:] + targ[0] = sym + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # The code enclosed in this section is duplicated + # below as a performance optimization. Make sure + # changes get made in both locations. + + pslice.slice = targ + + try: + # Call the grammar rule with our special slice object + del symstack[-plen:] + del statestack[-plen:] + p.callable(pslice) + symstack.append(sym) + state = goto[statestack[-1]][pname] + statestack.append(state) + except SyntaxError: + # If an error was set. Enter error recovery state + lookaheadstack.append(lookahead) + symstack.pop() + statestack.pop() + state = statestack[-1] + sym.type = 'error' + lookahead = sym + errorcount = error_count + self.errorok = 0 + continue + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + else: + + targ = [ sym ] + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # The code enclosed in this section is duplicated + # above as a performance optimization. Make sure + # changes get made in both locations. + + pslice.slice = targ + + try: + # Call the grammar rule with our special slice object + p.callable(pslice) + symstack.append(sym) + state = goto[statestack[-1]][pname] + statestack.append(state) + except SyntaxError: + # If an error was set. Enter error recovery state + lookaheadstack.append(lookahead) + symstack.pop() + statestack.pop() + state = statestack[-1] + sym.type = 'error' + lookahead = sym + errorcount = error_count + self.errorok = 0 + continue + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + if t == 0: + n = symstack[-1] + return getattr(n,"value",None) + + if t == None: + + # We have some kind of parsing error here. To handle + # this, we are going to push the current token onto + # the tokenstack and replace it with an 'error' token. + # If there are any synchronization rules, they may + # catch it. + # + # In addition to pushing the error token, we call call + # the user defined p_error() function if this is the + # first syntax error. This function is only called if + # errorcount == 0. + if errorcount == 0 or self.errorok: + errorcount = error_count + self.errorok = 0 + errtoken = lookahead + if errtoken.type == '$end': + errtoken = None # End of file! + if self.errorfunc: + global errok,token,restart + errok = self.errok # Set some special functions available in error recovery + token = get_token + restart = self.restart + if errtoken and not hasattr(errtoken,'lexer'): + errtoken.lexer = lexer + tok = self.errorfunc(errtoken) + del errok, token, restart # Delete special functions + + if self.errorok: + # User must have done some kind of panic + # mode recovery on their own. The + # returned token is the next lookahead + lookahead = tok + errtoken = None + continue + else: + if errtoken: + if hasattr(errtoken,"lineno"): lineno = lookahead.lineno + else: lineno = 0 + if lineno: + sys.stderr.write("yacc: Syntax error at line %d, token=%s\n" % (lineno, errtoken.type)) + else: + sys.stderr.write("yacc: Syntax error, token=%s" % errtoken.type) + else: + sys.stderr.write("yacc: Parse error in input. EOF\n") + return + + else: + errorcount = error_count + + # case 1: the statestack only has 1 entry on it. If we're in this state, the + # entire parse has been rolled back and we're completely hosed. The token is + # discarded and we just keep going. + + if len(statestack) <= 1 and lookahead.type != '$end': + lookahead = None + errtoken = None + state = 0 + # Nuke the pushback stack + del lookaheadstack[:] + continue + + # case 2: the statestack has a couple of entries on it, but we're + # at the end of the file. nuke the top entry and generate an error token + + # Start nuking entries on the stack + if lookahead.type == '$end': + # Whoa. We're really hosed here. Bail out + return + + if lookahead.type != 'error': + sym = symstack[-1] + if sym.type == 'error': + # Hmmm. Error is on top of stack, we'll just nuke input + # symbol and continue + lookahead = None + continue + t = YaccSymbol() + t.type = 'error' + if hasattr(lookahead,"lineno"): + t.lineno = lookahead.lineno + t.value = lookahead + lookaheadstack.append(lookahead) + lookahead = t + else: + symstack.pop() + statestack.pop() + state = statestack[-1] # Potential bug fix + + continue + + # Call an error function here + raise RuntimeError("yacc: internal parser error!!!\n") + +# ----------------------------------------------------------------------------- +# === Grammar Representation === +# +# The following functions, classes, and variables are used to represent and +# manipulate the rules that make up a grammar. +# ----------------------------------------------------------------------------- + +import re + +# regex matching identifiers +_is_identifier = re.compile(r'^[a-zA-Z0-9_-]+$') + +# ----------------------------------------------------------------------------- +# class Production: +# +# This class stores the raw information about a single production or grammar rule. +# A grammar rule refers to a specification such as this: +# +# expr : expr PLUS term +# +# Here are the basic attributes defined on all productions +# +# name - Name of the production. For example 'expr' +# prod - A list of symbols on the right side ['expr','PLUS','term'] +# prec - Production precedence level +# number - Production number. +# func - Function that executes on reduce +# file - File where production function is defined +# lineno - Line number where production function is defined +# +# The following attributes are defined or optional. +# +# len - Length of the production (number of symbols on right hand side) +# usyms - Set of unique symbols found in the production +# ----------------------------------------------------------------------------- + +class Production(object): + reduced = 0 + def __init__(self,number,name,prod,precedence=('right',0),func=None,file='',line=0): + self.name = name + self.prod = tuple(prod) + self.number = number + self.func = func + self.callable = None + self.file = file + self.line = line + self.prec = precedence + + # Internal settings used during table construction + + self.len = len(self.prod) # Length of the production + + # Create a list of unique production symbols used in the production + self.usyms = [ ] + for s in self.prod: + if s not in self.usyms: + self.usyms.append(s) + + # List of all LR items for the production + self.lr_items = [] + self.lr_next = None + + # Create a string representation + if self.prod: + self.str = "%s -> %s" % (self.name," ".join(self.prod)) + else: + self.str = "%s -> <empty>" % self.name + + def __str__(self): + return self.str + + def __repr__(self): + return "Production("+str(self)+")" + + def __len__(self): + return len(self.prod) + + def __nonzero__(self): + return 1 + + def __getitem__(self,index): + return self.prod[index] + + # Return the nth lr_item from the production (or None if at the end) + def lr_item(self,n): + if n > len(self.prod): return None + p = LRItem(self,n) + + # Precompute the list of productions immediately following. Hack. Remove later + try: + p.lr_after = Prodnames[p.prod[n+1]] + except (IndexError,KeyError): + p.lr_after = [] + try: + p.lr_before = p.prod[n-1] + except IndexError: + p.lr_before = None + + return p + + # Bind the production function name to a callable + def bind(self,pdict): + if self.func: + self.callable = pdict[self.func] + +# This class serves as a minimal standin for Production objects when +# reading table data from files. It only contains information +# actually used by the LR parsing engine, plus some additional +# debugging information. +class MiniProduction(object): + def __init__(self,str,name,len,func,file,line): + self.name = name + self.len = len + self.func = func + self.callable = None + self.file = file + self.line = line + self.str = str + def __str__(self): + return self.str + def __repr__(self): + return "MiniProduction(%s)" % self.str + + # Bind the production function name to a callable + def bind(self,pdict): + if self.func: + self.callable = pdict[self.func] + + +# ----------------------------------------------------------------------------- +# class LRItem +# +# This class represents a specific stage of parsing a production rule. For +# example: +# +# expr : expr . PLUS term +# +# In the above, the "." represents the current location of the parse. Here +# basic attributes: +# +# name - Name of the production. For example 'expr' +# prod - A list of symbols on the right side ['expr','.', 'PLUS','term'] +# number - Production number. +# +# lr_next Next LR item. Example, if we are ' expr -> expr . PLUS term' +# then lr_next refers to 'expr -> expr PLUS . term' +# lr_index - LR item index (location of the ".") in the prod list. +# lookaheads - LALR lookahead symbols for this item +# len - Length of the production (number of symbols on right hand side) +# lr_after - List of all productions that immediately follow +# lr_before - Grammar symbol immediately before +# ----------------------------------------------------------------------------- + +class LRItem(object): + def __init__(self,p,n): + self.name = p.name + self.prod = list(p.prod) + self.number = p.number + self.lr_index = n + self.lookaheads = { } + self.prod.insert(n,".") + self.prod = tuple(self.prod) + self.len = len(self.prod) + self.usyms = p.usyms + + def __str__(self): + if self.prod: + s = "%s -> %s" % (self.name," ".join(self.prod)) + else: + s = "%s -> <empty>" % self.name + return s + + def __repr__(self): + return "LRItem("+str(self)+")" + +# ----------------------------------------------------------------------------- +# rightmost_terminal() +# +# Return the rightmost terminal from a list of symbols. Used in add_production() +# ----------------------------------------------------------------------------- +def rightmost_terminal(symbols, terminals): + i = len(symbols) - 1 + while i >= 0: + if symbols[i] in terminals: + return symbols[i] + i -= 1 + return None + +# ----------------------------------------------------------------------------- +# === GRAMMAR CLASS === +# +# The following class represents the contents of the specified grammar along +# with various computed properties such as first sets, follow sets, LR items, etc. +# This data is used for critical parts of the table generation process later. +# ----------------------------------------------------------------------------- + +class GrammarError(YaccError): pass + +class Grammar(object): + def __init__(self,terminals): + self.Productions = [None] # A list of all of the productions. The first + # entry is always reserved for the purpose of + # building an augmented grammar + + self.Prodnames = { } # A dictionary mapping the names of nonterminals to a list of all + # productions of that nonterminal. + + self.Prodmap = { } # A dictionary that is only used to detect duplicate + # productions. + + self.Terminals = { } # A dictionary mapping the names of terminal symbols to a + # list of the rules where they are used. + + for term in terminals: + self.Terminals[term] = [] + + self.Terminals['error'] = [] + + self.Nonterminals = { } # A dictionary mapping names of nonterminals to a list + # of rule numbers where they are used. + + self.First = { } # A dictionary of precomputed FIRST(x) symbols + + self.Follow = { } # A dictionary of precomputed FOLLOW(x) symbols + + self.Precedence = { } # Precedence rules for each terminal. Contains tuples of the + # form ('right',level) or ('nonassoc', level) or ('left',level) + + self.UsedPrecedence = { } # Precedence rules that were actually used by the grammer. + # This is only used to provide error checking and to generate + # a warning about unused precedence rules. + + self.Start = None # Starting symbol for the grammar + + + def __len__(self): + return len(self.Productions) + + def __getitem__(self,index): + return self.Productions[index] + + # ----------------------------------------------------------------------------- + # set_precedence() + # + # Sets the precedence for a given terminal. assoc is the associativity such as + # 'left','right', or 'nonassoc'. level is a numeric level. + # + # ----------------------------------------------------------------------------- + + def set_precedence(self,term,assoc,level): + assert self.Productions == [None],"Must call set_precedence() before add_production()" + if term in self.Precedence: + raise GrammarError("Precedence already specified for terminal '%s'" % term) + if assoc not in ['left','right','nonassoc']: + raise GrammarError("Associativity must be one of 'left','right', or 'nonassoc'") + self.Precedence[term] = (assoc,level) + + # ----------------------------------------------------------------------------- + # add_production() + # + # Given an action function, this function assembles a production rule and + # computes its precedence level. + # + # The production rule is supplied as a list of symbols. For example, + # a rule such as 'expr : expr PLUS term' has a production name of 'expr' and + # symbols ['expr','PLUS','term']. + # + # Precedence is determined by the precedence of the right-most non-terminal + # or the precedence of a terminal specified by %prec. + # + # A variety of error checks are performed to make sure production symbols + # are valid and that %prec is used correctly. + # ----------------------------------------------------------------------------- + + def add_production(self,prodname,syms,func=None,file='',line=0): + + if prodname in self.Terminals: + raise GrammarError("%s:%d: Illegal rule name '%s'. Already defined as a token" % (file,line,prodname)) + if prodname == 'error': + raise GrammarError("%s:%d: Illegal rule name '%s'. error is a reserved word" % (file,line,prodname)) + if not _is_identifier.match(prodname): + raise GrammarError("%s:%d: Illegal rule name '%s'" % (file,line,prodname)) + + # Look for literal tokens + for n,s in enumerate(syms): + if s[0] in "'\"": + try: + c = eval(s) + if (len(c) > 1): + raise GrammarError("%s:%d: Literal token %s in rule '%s' may only be a single character" % (file,line,s, prodname)) + if not c in self.Terminals: + self.Terminals[c] = [] + syms[n] = c + continue + except SyntaxError: + pass + if not _is_identifier.match(s) and s != '%prec': + raise GrammarError("%s:%d: Illegal name '%s' in rule '%s'" % (file,line,s, prodname)) + + # Determine the precedence level + if '%prec' in syms: + if syms[-1] == '%prec': + raise GrammarError("%s:%d: Syntax error. Nothing follows %%prec" % (file,line)) + if syms[-2] != '%prec': + raise GrammarError("%s:%d: Syntax error. %%prec can only appear at the end of a grammar rule" % (file,line)) + precname = syms[-1] + prodprec = self.Precedence.get(precname,None) + if not prodprec: + raise GrammarError("%s:%d: Nothing known about the precedence of '%s'" % (file,line,precname)) + else: + self.UsedPrecedence[precname] = 1 + del syms[-2:] # Drop %prec from the rule + else: + # If no %prec, precedence is determined by the rightmost terminal symbol + precname = rightmost_terminal(syms,self.Terminals) + prodprec = self.Precedence.get(precname,('right',0)) + + # See if the rule is already in the rulemap + map = "%s -> %s" % (prodname,syms) + if map in self.Prodmap: + m = self.Prodmap[map] + raise GrammarError("%s:%d: Duplicate rule %s. " % (file,line, m) + + "Previous definition at %s:%d" % (m.file, m.line)) + + # From this point on, everything is valid. Create a new Production instance + pnumber = len(self.Productions) + if not prodname in self.Nonterminals: + self.Nonterminals[prodname] = [ ] + + # Add the production number to Terminals and Nonterminals + for t in syms: + if t in self.Terminals: + self.Terminals[t].append(pnumber) + else: + if not t in self.Nonterminals: + self.Nonterminals[t] = [ ] + self.Nonterminals[t].append(pnumber) + + # Create a production and add it to the list of productions + p = Production(pnumber,prodname,syms,prodprec,func,file,line) + self.Productions.append(p) + self.Prodmap[map] = p + + # Add to the global productions list + try: + self.Prodnames[prodname].append(p) + except KeyError: + self.Prodnames[prodname] = [ p ] + return 0 + + # ----------------------------------------------------------------------------- + # set_start() + # + # Sets the starting symbol and creates the augmented grammar. Production + # rule 0 is S' -> start where start is the start symbol. + # ----------------------------------------------------------------------------- + + def set_start(self,start=None): + if not start: + start = self.Productions[1].name + if start not in self.Nonterminals: + raise GrammarError("start symbol %s undefined" % start) + self.Productions[0] = Production(0,"S'",[start]) + self.Nonterminals[start].append(0) + self.Start = start + + # ----------------------------------------------------------------------------- + # find_unreachable() + # + # Find all of the nonterminal symbols that can't be reached from the starting + # symbol. Returns a list of nonterminals that can't be reached. + # ----------------------------------------------------------------------------- + + def find_unreachable(self): + + # Mark all symbols that are reachable from a symbol s + def mark_reachable_from(s): + if reachable[s]: + # We've already reached symbol s. + return + reachable[s] = 1 + for p in self.Prodnames.get(s,[]): + for r in p.prod: + mark_reachable_from(r) + + reachable = { } + for s in list(self.Terminals) + list(self.Nonterminals): + reachable[s] = 0 + + mark_reachable_from( self.Productions[0].prod[0] ) + + return [s for s in list(self.Nonterminals) + if not reachable[s]] + + # ----------------------------------------------------------------------------- + # infinite_cycles() + # + # This function looks at the various parsing rules and tries to detect + # infinite recursion cycles (grammar rules where there is no possible way + # to derive a string of only terminals). + # ----------------------------------------------------------------------------- + + def infinite_cycles(self): + terminates = {} + + # Terminals: + for t in self.Terminals: + terminates[t] = 1 + + terminates['$end'] = 1 + + # Nonterminals: + + # Initialize to false: + for n in self.Nonterminals: + terminates[n] = 0 + + # Then propagate termination until no change: + while 1: + some_change = 0 + for (n,pl) in self.Prodnames.items(): + # Nonterminal n terminates iff any of its productions terminates. + for p in pl: + # Production p terminates iff all of its rhs symbols terminate. + for s in p.prod: + if not terminates[s]: + # The symbol s does not terminate, + # so production p does not terminate. + p_terminates = 0 + break + else: + # didn't break from the loop, + # so every symbol s terminates + # so production p terminates. + p_terminates = 1 + + if p_terminates: + # symbol n terminates! + if not terminates[n]: + terminates[n] = 1 + some_change = 1 + # Don't need to consider any more productions for this n. + break + + if not some_change: + break + + infinite = [] + for (s,term) in terminates.items(): + if not term: + if not s in self.Prodnames and not s in self.Terminals and s != 'error': + # s is used-but-not-defined, and we've already warned of that, + # so it would be overkill to say that it's also non-terminating. + pass + else: + infinite.append(s) + + return infinite + + + # ----------------------------------------------------------------------------- + # undefined_symbols() + # + # Find all symbols that were used the grammar, but not defined as tokens or + # grammar rules. Returns a list of tuples (sym, prod) where sym in the symbol + # and prod is the production where the symbol was used. + # ----------------------------------------------------------------------------- + def undefined_symbols(self): + result = [] + for p in self.Productions: + if not p: continue + + for s in p.prod: + if not s in self.Prodnames and not s in self.Terminals and s != 'error': + result.append((s,p)) + return result + + # ----------------------------------------------------------------------------- + # unused_terminals() + # + # Find all terminals that were defined, but not used by the grammar. Returns + # a list of all symbols. + # ----------------------------------------------------------------------------- + def unused_terminals(self): + unused_tok = [] + for s,v in self.Terminals.items(): + if s != 'error' and not v: + unused_tok.append(s) + + return unused_tok + + # ------------------------------------------------------------------------------ + # unused_rules() + # + # Find all grammar rules that were defined, but not used (maybe not reachable) + # Returns a list of productions. + # ------------------------------------------------------------------------------ + + def unused_rules(self): + unused_prod = [] + for s,v in self.Nonterminals.items(): + if not v: + p = self.Prodnames[s][0] + unused_prod.append(p) + return unused_prod + + # ----------------------------------------------------------------------------- + # unused_precedence() + # + # Returns a list of tuples (term,precedence) corresponding to precedence + # rules that were never used by the grammar. term is the name of the terminal + # on which precedence was applied and precedence is a string such as 'left' or + # 'right' corresponding to the type of precedence. + # ----------------------------------------------------------------------------- + + def unused_precedence(self): + unused = [] + for termname in self.Precedence: + if not (termname in self.Terminals or termname in self.UsedPrecedence): + unused.append((termname,self.Precedence[termname][0])) + + return unused + + # ------------------------------------------------------------------------- + # _first() + # + # Compute the value of FIRST1(beta) where beta is a tuple of symbols. + # + # During execution of compute_first1, the result may be incomplete. + # Afterward (e.g., when called from compute_follow()), it will be complete. + # ------------------------------------------------------------------------- + def _first(self,beta): + + # We are computing First(x1,x2,x3,...,xn) + result = [ ] + for x in beta: + x_produces_empty = 0 + + # Add all the non-<empty> symbols of First[x] to the result. + for f in self.First[x]: + if f == '<empty>': + x_produces_empty = 1 + else: + if f not in result: result.append(f) + + if x_produces_empty: + # We have to consider the next x in beta, + # i.e. stay in the loop. + pass + else: + # We don't have to consider any further symbols in beta. + break + else: + # There was no 'break' from the loop, + # so x_produces_empty was true for all x in beta, + # so beta produces empty as well. + result.append('<empty>') + + return result + + # ------------------------------------------------------------------------- + # compute_first() + # + # Compute the value of FIRST1(X) for all symbols + # ------------------------------------------------------------------------- + def compute_first(self): + if self.First: + return self.First + + # Terminals: + for t in self.Terminals: + self.First[t] = [t] + + self.First['$end'] = ['$end'] + + # Nonterminals: + + # Initialize to the empty set: + for n in self.Nonterminals: + self.First[n] = [] + + # Then propagate symbols until no change: + while 1: + some_change = 0 + for n in self.Nonterminals: + for p in self.Prodnames[n]: + for f in self._first(p.prod): + if f not in self.First[n]: + self.First[n].append( f ) + some_change = 1 + if not some_change: + break + + return self.First + + # --------------------------------------------------------------------- + # compute_follow() + # + # Computes all of the follow sets for every non-terminal symbol. The + # follow set is the set of all symbols that might follow a given + # non-terminal. See the Dragon book, 2nd Ed. p. 189. + # --------------------------------------------------------------------- + def compute_follow(self,start=None): + # If already computed, return the result + if self.Follow: + return self.Follow + + # If first sets not computed yet, do that first. + if not self.First: + self.compute_first() + + # Add '$end' to the follow list of the start symbol + for k in self.Nonterminals: + self.Follow[k] = [ ] + + if not start: + start = self.Productions[1].name + + self.Follow[start] = [ '$end' ] + + while 1: + didadd = 0 + for p in self.Productions[1:]: + # Here is the production set + for i in range(len(p.prod)): + B = p.prod[i] + if B in self.Nonterminals: + # Okay. We got a non-terminal in a production + fst = self._first(p.prod[i+1:]) + hasempty = 0 + for f in fst: + if f != '<empty>' and f not in self.Follow[B]: + self.Follow[B].append(f) + didadd = 1 + if f == '<empty>': + hasempty = 1 + if hasempty or i == (len(p.prod)-1): + # Add elements of follow(a) to follow(b) + for f in self.Follow[p.name]: + if f not in self.Follow[B]: + self.Follow[B].append(f) + didadd = 1 + if not didadd: break + return self.Follow + + + # ----------------------------------------------------------------------------- + # build_lritems() + # + # This function walks the list of productions and builds a complete set of the + # LR items. The LR items are stored in two ways: First, they are uniquely + # numbered and placed in the list _lritems. Second, a linked list of LR items + # is built for each production. For example: + # + # E -> E PLUS E + # + # Creates the list + # + # [E -> . E PLUS E, E -> E . PLUS E, E -> E PLUS . E, E -> E PLUS E . ] + # ----------------------------------------------------------------------------- + + def build_lritems(self): + for p in self.Productions: + lastlri = p + i = 0 + lr_items = [] + while 1: + if i > len(p): + lri = None + else: + lri = LRItem(p,i) + # Precompute the list of productions immediately following + try: + lri.lr_after = self.Prodnames[lri.prod[i+1]] + except (IndexError,KeyError): + lri.lr_after = [] + try: + lri.lr_before = lri.prod[i-1] + except IndexError: + lri.lr_before = None + + lastlri.lr_next = lri + if not lri: break + lr_items.append(lri) + lastlri = lri + i += 1 + p.lr_items = lr_items + +# ----------------------------------------------------------------------------- +# == Class LRTable == +# +# This basic class represents a basic table of LR parsing information. +# Methods for generating the tables are not defined here. They are defined +# in the derived class LRGeneratedTable. +# ----------------------------------------------------------------------------- + +class VersionError(YaccError): pass + +class LRTable(object): + def __init__(self): + self.lr_action = None + self.lr_goto = None + self.lr_productions = None + self.lr_method = None + + def read_table(self,module): + if isinstance(module,types.ModuleType): + parsetab = module + else: + if sys.version_info[0] < 3: + exec("import %s as parsetab" % module) + else: + env = { } + exec("import %s as parsetab" % module, env, env) + parsetab = env['parsetab'] + + if parsetab._tabversion != __tabversion__: + raise VersionError("yacc table file version is out of date") + + self.lr_action = parsetab._lr_action + self.lr_goto = parsetab._lr_goto + + self.lr_productions = [] + for p in parsetab._lr_productions: + self.lr_productions.append(MiniProduction(*p)) + + self.lr_method = parsetab._lr_method + return parsetab._lr_signature + + def read_pickle(self,filename): + try: + import cPickle as pickle + except ImportError: + import pickle + + in_f = open(filename,"rb") + + tabversion = pickle.load(in_f) + if tabversion != __tabversion__: + raise VersionError("yacc table file version is out of date") + self.lr_method = pickle.load(in_f) + signature = pickle.load(in_f) + self.lr_action = pickle.load(in_f) + self.lr_goto = pickle.load(in_f) + productions = pickle.load(in_f) + + self.lr_productions = [] + for p in productions: + self.lr_productions.append(MiniProduction(*p)) + + in_f.close() + return signature + + # Bind all production function names to callable objects in pdict + def bind_callables(self,pdict): + for p in self.lr_productions: + p.bind(pdict) + +# ----------------------------------------------------------------------------- +# === LR Generator === +# +# The following classes and functions are used to generate LR parsing tables on +# a grammar. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# digraph() +# traverse() +# +# The following two functions are used to compute set valued functions +# of the form: +# +# F(x) = F'(x) U U{F(y) | x R y} +# +# This is used to compute the values of Read() sets as well as FOLLOW sets +# in LALR(1) generation. +# +# Inputs: X - An input set +# R - A relation +# FP - Set-valued function +# ------------------------------------------------------------------------------ + +def digraph(X,R,FP): + N = { } + for x in X: + N[x] = 0 + stack = [] + F = { } + for x in X: + if N[x] == 0: traverse(x,N,stack,F,X,R,FP) + return F + +def traverse(x,N,stack,F,X,R,FP): + stack.append(x) + d = len(stack) + N[x] = d + F[x] = FP(x) # F(X) <- F'(x) + + rel = R(x) # Get y's related to x + for y in rel: + if N[y] == 0: + traverse(y,N,stack,F,X,R,FP) + N[x] = min(N[x],N[y]) + for a in F.get(y,[]): + if a not in F[x]: F[x].append(a) + if N[x] == d: + N[stack[-1]] = MAXINT + F[stack[-1]] = F[x] + element = stack.pop() + while element != x: + N[stack[-1]] = MAXINT + F[stack[-1]] = F[x] + element = stack.pop() + +class LALRError(YaccError): pass + +# ----------------------------------------------------------------------------- +# == LRGeneratedTable == +# +# This class implements the LR table generation algorithm. There are no +# public methods except for write() +# ----------------------------------------------------------------------------- + +class LRGeneratedTable(LRTable): + def __init__(self,grammar,method='LALR',log=None): + if method not in ['SLR','LALR']: + raise LALRError("Unsupported method %s" % method) + + self.grammar = grammar + self.lr_method = method + + # Set up the logger + if not log: + log = NullLogger() + self.log = log + + # Internal attributes + self.lr_action = {} # Action table + self.lr_goto = {} # Goto table + self.lr_productions = grammar.Productions # Copy of grammar Production array + self.lr_goto_cache = {} # Cache of computed gotos + self.lr0_cidhash = {} # Cache of closures + + self._add_count = 0 # Internal counter used to detect cycles + + # Diagonistic information filled in by the table generator + self.sr_conflict = 0 + self.rr_conflict = 0 + self.conflicts = [] # List of conflicts + + self.sr_conflicts = [] + self.rr_conflicts = [] + + # Build the tables + self.grammar.build_lritems() + self.grammar.compute_first() + self.grammar.compute_follow() + self.lr_parse_table() + + # Compute the LR(0) closure operation on I, where I is a set of LR(0) items. + + def lr0_closure(self,I): + self._add_count += 1 + + # Add everything in I to J + J = I[:] + didadd = 1 + while didadd: + didadd = 0 + for j in J: + for x in j.lr_after: + if getattr(x,"lr0_added",0) == self._add_count: continue + # Add B --> .G to J + J.append(x.lr_next) + x.lr0_added = self._add_count + didadd = 1 + + return J + + # Compute the LR(0) goto function goto(I,X) where I is a set + # of LR(0) items and X is a grammar symbol. This function is written + # in a way that guarantees uniqueness of the generated goto sets + # (i.e. the same goto set will never be returned as two different Python + # objects). With uniqueness, we can later do fast set comparisons using + # id(obj) instead of element-wise comparison. + + def lr0_goto(self,I,x): + # First we look for a previously cached entry + g = self.lr_goto_cache.get((id(I),x),None) + if g: return g + + # Now we generate the goto set in a way that guarantees uniqueness + # of the result + + s = self.lr_goto_cache.get(x,None) + if not s: + s = { } + self.lr_goto_cache[x] = s + + gs = [ ] + for p in I: + n = p.lr_next + if n and n.lr_before == x: + s1 = s.get(id(n),None) + if not s1: + s1 = { } + s[id(n)] = s1 + gs.append(n) + s = s1 + g = s.get('$end',None) + if not g: + if gs: + g = self.lr0_closure(gs) + s['$end'] = g + else: + s['$end'] = gs + self.lr_goto_cache[(id(I),x)] = g + return g + + # Compute the LR(0) sets of item function + def lr0_items(self): + + C = [ self.lr0_closure([self.grammar.Productions[0].lr_next]) ] + i = 0 + for I in C: + self.lr0_cidhash[id(I)] = i + i += 1 + + # Loop over the items in C and each grammar symbols + i = 0 + while i < len(C): + I = C[i] + i += 1 + + # Collect all of the symbols that could possibly be in the goto(I,X) sets + asyms = { } + for ii in I: + for s in ii.usyms: + asyms[s] = None + + for x in asyms: + g = self.lr0_goto(I,x) + if not g: continue + if id(g) in self.lr0_cidhash: continue + self.lr0_cidhash[id(g)] = len(C) + C.append(g) + + return C + + # ----------------------------------------------------------------------------- + # ==== LALR(1) Parsing ==== + # + # LALR(1) parsing is almost exactly the same as SLR except that instead of + # relying upon Follow() sets when performing reductions, a more selective + # lookahead set that incorporates the state of the LR(0) machine is utilized. + # Thus, we mainly just have to focus on calculating the lookahead sets. + # + # The method used here is due to DeRemer and Pennelo (1982). + # + # DeRemer, F. L., and T. J. Pennelo: "Efficient Computation of LALR(1) + # Lookahead Sets", ACM Transactions on Programming Languages and Systems, + # Vol. 4, No. 4, Oct. 1982, pp. 615-649 + # + # Further details can also be found in: + # + # J. Tremblay and P. Sorenson, "The Theory and Practice of Compiler Writing", + # McGraw-Hill Book Company, (1985). + # + # ----------------------------------------------------------------------------- + + # ----------------------------------------------------------------------------- + # compute_nullable_nonterminals() + # + # Creates a dictionary containing all of the non-terminals that might produce + # an empty production. + # ----------------------------------------------------------------------------- + + def compute_nullable_nonterminals(self): + nullable = {} + num_nullable = 0 + while 1: + for p in self.grammar.Productions[1:]: + if p.len == 0: + nullable[p.name] = 1 + continue + for t in p.prod: + if not t in nullable: break + else: + nullable[p.name] = 1 + if len(nullable) == num_nullable: break + num_nullable = len(nullable) + return nullable + + # ----------------------------------------------------------------------------- + # find_nonterminal_trans(C) + # + # Given a set of LR(0) items, this functions finds all of the non-terminal + # transitions. These are transitions in which a dot appears immediately before + # a non-terminal. Returns a list of tuples of the form (state,N) where state + # is the state number and N is the nonterminal symbol. + # + # The input C is the set of LR(0) items. + # ----------------------------------------------------------------------------- + + def find_nonterminal_transitions(self,C): + trans = [] + for state in range(len(C)): + for p in C[state]: + if p.lr_index < p.len - 1: + t = (state,p.prod[p.lr_index+1]) + if t[1] in self.grammar.Nonterminals: + if t not in trans: trans.append(t) + state = state + 1 + return trans + + # ----------------------------------------------------------------------------- + # dr_relation() + # + # Computes the DR(p,A) relationships for non-terminal transitions. The input + # is a tuple (state,N) where state is a number and N is a nonterminal symbol. + # + # Returns a list of terminals. + # ----------------------------------------------------------------------------- + + def dr_relation(self,C,trans,nullable): + dr_set = { } + state,N = trans + terms = [] + + g = self.lr0_goto(C[state],N) + for p in g: + if p.lr_index < p.len - 1: + a = p.prod[p.lr_index+1] + if a in self.grammar.Terminals: + if a not in terms: terms.append(a) + + # This extra bit is to handle the start state + if state == 0 and N == self.grammar.Productions[0].prod[0]: + terms.append('$end') + + return terms + + # ----------------------------------------------------------------------------- + # reads_relation() + # + # Computes the READS() relation (p,A) READS (t,C). + # ----------------------------------------------------------------------------- + + def reads_relation(self,C, trans, empty): + # Look for empty transitions + rel = [] + state, N = trans + + g = self.lr0_goto(C[state],N) + j = self.lr0_cidhash.get(id(g),-1) + for p in g: + if p.lr_index < p.len - 1: + a = p.prod[p.lr_index + 1] + if a in empty: + rel.append((j,a)) + + return rel + + # ----------------------------------------------------------------------------- + # compute_lookback_includes() + # + # Determines the lookback and includes relations + # + # LOOKBACK: + # + # This relation is determined by running the LR(0) state machine forward. + # For example, starting with a production "N : . A B C", we run it forward + # to obtain "N : A B C ." We then build a relationship between this final + # state and the starting state. These relationships are stored in a dictionary + # lookdict. + # + # INCLUDES: + # + # Computes the INCLUDE() relation (p,A) INCLUDES (p',B). + # + # This relation is used to determine non-terminal transitions that occur + # inside of other non-terminal transition states. (p,A) INCLUDES (p', B) + # if the following holds: + # + # B -> LAT, where T -> epsilon and p' -L-> p + # + # L is essentially a prefix (which may be empty), T is a suffix that must be + # able to derive an empty string. State p' must lead to state p with the string L. + # + # ----------------------------------------------------------------------------- + + def compute_lookback_includes(self,C,trans,nullable): + + lookdict = {} # Dictionary of lookback relations + includedict = {} # Dictionary of include relations + + # Make a dictionary of non-terminal transitions + dtrans = {} + for t in trans: + dtrans[t] = 1 + + # Loop over all transitions and compute lookbacks and includes + for state,N in trans: + lookb = [] + includes = [] + for p in C[state]: + if p.name != N: continue + + # Okay, we have a name match. We now follow the production all the way + # through the state machine until we get the . on the right hand side + + lr_index = p.lr_index + j = state + while lr_index < p.len - 1: + lr_index = lr_index + 1 + t = p.prod[lr_index] + + # Check to see if this symbol and state are a non-terminal transition + if (j,t) in dtrans: + # Yes. Okay, there is some chance that this is an includes relation + # the only way to know for certain is whether the rest of the + # production derives empty + + li = lr_index + 1 + while li < p.len: + if p.prod[li] in self.grammar.Terminals: break # No forget it + if not p.prod[li] in nullable: break + li = li + 1 + else: + # Appears to be a relation between (j,t) and (state,N) + includes.append((j,t)) + + g = self.lr0_goto(C[j],t) # Go to next set + j = self.lr0_cidhash.get(id(g),-1) # Go to next state + + # When we get here, j is the final state, now we have to locate the production + for r in C[j]: + if r.name != p.name: continue + if r.len != p.len: continue + i = 0 + # This look is comparing a production ". A B C" with "A B C ." + while i < r.lr_index: + if r.prod[i] != p.prod[i+1]: break + i = i + 1 + else: + lookb.append((j,r)) + for i in includes: + if not i in includedict: includedict[i] = [] + includedict[i].append((state,N)) + lookdict[(state,N)] = lookb + + return lookdict,includedict + + # ----------------------------------------------------------------------------- + # compute_read_sets() + # + # Given a set of LR(0) items, this function computes the read sets. + # + # Inputs: C = Set of LR(0) items + # ntrans = Set of nonterminal transitions + # nullable = Set of empty transitions + # + # Returns a set containing the read sets + # ----------------------------------------------------------------------------- + + def compute_read_sets(self,C, ntrans, nullable): + FP = lambda x: self.dr_relation(C,x,nullable) + R = lambda x: self.reads_relation(C,x,nullable) + F = digraph(ntrans,R,FP) + return F + + # ----------------------------------------------------------------------------- + # compute_follow_sets() + # + # Given a set of LR(0) items, a set of non-terminal transitions, a readset, + # and an include set, this function computes the follow sets + # + # Follow(p,A) = Read(p,A) U U {Follow(p',B) | (p,A) INCLUDES (p',B)} + # + # Inputs: + # ntrans = Set of nonterminal transitions + # readsets = Readset (previously computed) + # inclsets = Include sets (previously computed) + # + # Returns a set containing the follow sets + # ----------------------------------------------------------------------------- + + def compute_follow_sets(self,ntrans,readsets,inclsets): + FP = lambda x: readsets[x] + R = lambda x: inclsets.get(x,[]) + F = digraph(ntrans,R,FP) + return F + + # ----------------------------------------------------------------------------- + # add_lookaheads() + # + # Attaches the lookahead symbols to grammar rules. + # + # Inputs: lookbacks - Set of lookback relations + # followset - Computed follow set + # + # This function directly attaches the lookaheads to productions contained + # in the lookbacks set + # ----------------------------------------------------------------------------- + + def add_lookaheads(self,lookbacks,followset): + for trans,lb in lookbacks.items(): + # Loop over productions in lookback + for state,p in lb: + if not state in p.lookaheads: + p.lookaheads[state] = [] + f = followset.get(trans,[]) + for a in f: + if a not in p.lookaheads[state]: p.lookaheads[state].append(a) + + # ----------------------------------------------------------------------------- + # add_lalr_lookaheads() + # + # This function does all of the work of adding lookahead information for use + # with LALR parsing + # ----------------------------------------------------------------------------- + + def add_lalr_lookaheads(self,C): + # Determine all of the nullable nonterminals + nullable = self.compute_nullable_nonterminals() + + # Find all non-terminal transitions + trans = self.find_nonterminal_transitions(C) + + # Compute read sets + readsets = self.compute_read_sets(C,trans,nullable) + + # Compute lookback/includes relations + lookd, included = self.compute_lookback_includes(C,trans,nullable) + + # Compute LALR FOLLOW sets + followsets = self.compute_follow_sets(trans,readsets,included) + + # Add all of the lookaheads + self.add_lookaheads(lookd,followsets) + + # ----------------------------------------------------------------------------- + # lr_parse_table() + # + # This function constructs the parse tables for SLR or LALR + # ----------------------------------------------------------------------------- + def lr_parse_table(self): + Productions = self.grammar.Productions + Precedence = self.grammar.Precedence + goto = self.lr_goto # Goto array + action = self.lr_action # Action array + log = self.log # Logger for output + + actionp = { } # Action production array (temporary) + + log.info("Parsing method: %s", self.lr_method) + + # Step 1: Construct C = { I0, I1, ... IN}, collection of LR(0) items + # This determines the number of states + + C = self.lr0_items() + + if self.lr_method == 'LALR': + self.add_lalr_lookaheads(C) + + # Build the parser table, state by state + st = 0 + for I in C: + # Loop over each production in I + actlist = [ ] # List of actions + st_action = { } + st_actionp = { } + st_goto = { } + log.info("") + log.info("state %d", st) + log.info("") + for p in I: + log.info(" (%d) %s", p.number, str(p)) + log.info("") + + for p in I: + if p.len == p.lr_index + 1: + if p.name == "S'": + # Start symbol. Accept! + st_action["$end"] = 0 + st_actionp["$end"] = p + else: + # We are at the end of a production. Reduce! + if self.lr_method == 'LALR': + laheads = p.lookaheads[st] + else: + laheads = self.grammar.Follow[p.name] + for a in laheads: + actlist.append((a,p,"reduce using rule %d (%s)" % (p.number,p))) + r = st_action.get(a,None) + if r is not None: + # Whoa. Have a shift/reduce or reduce/reduce conflict + if r > 0: + # Need to decide on shift or reduce here + # By default we favor shifting. Need to add + # some precedence rules here. + sprec,slevel = Productions[st_actionp[a].number].prec + rprec,rlevel = Precedence.get(a,('right',0)) + if (slevel < rlevel) or ((slevel == rlevel) and (rprec == 'left')): + # We really need to reduce here. + st_action[a] = -p.number + st_actionp[a] = p + if not slevel and not rlevel: + log.info(" ! shift/reduce conflict for %s resolved as reduce",a) + self.sr_conflicts.append((st,a,'reduce')) + Productions[p.number].reduced += 1 + elif (slevel == rlevel) and (rprec == 'nonassoc'): + st_action[a] = None + else: + # Hmmm. Guess we'll keep the shift + if not rlevel: + log.info(" ! shift/reduce conflict for %s resolved as shift",a) + self.sr_conflicts.append((st,a,'shift')) + elif r < 0: + # Reduce/reduce conflict. In this case, we favor the rule + # that was defined first in the grammar file + oldp = Productions[-r] + pp = Productions[p.number] + if oldp.line > pp.line: + st_action[a] = -p.number + st_actionp[a] = p + chosenp,rejectp = pp,oldp + Productions[p.number].reduced += 1 + Productions[oldp.number].reduced -= 1 + else: + chosenp,rejectp = oldp,pp + self.rr_conflicts.append((st,chosenp,rejectp)) + log.info(" ! reduce/reduce conflict for %s resolved using rule %d (%s)", a,st_actionp[a].number, st_actionp[a]) + else: + raise LALRError("Unknown conflict in state %d" % st) + else: + st_action[a] = -p.number + st_actionp[a] = p + Productions[p.number].reduced += 1 + else: + i = p.lr_index + a = p.prod[i+1] # Get symbol right after the "." + if a in self.grammar.Terminals: + g = self.lr0_goto(I,a) + j = self.lr0_cidhash.get(id(g),-1) + if j >= 0: + # We are in a shift state + actlist.append((a,p,"shift and go to state %d" % j)) + r = st_action.get(a,None) + if r is not None: + # Whoa have a shift/reduce or shift/shift conflict + if r > 0: + if r != j: + raise LALRError("Shift/shift conflict in state %d" % st) + elif r < 0: + # Do a precedence check. + # - if precedence of reduce rule is higher, we reduce. + # - if precedence of reduce is same and left assoc, we reduce. + # - otherwise we shift + rprec,rlevel = Productions[st_actionp[a].number].prec + sprec,slevel = Precedence.get(a,('right',0)) + if (slevel > rlevel) or ((slevel == rlevel) and (rprec == 'right')): + # We decide to shift here... highest precedence to shift + Productions[st_actionp[a].number].reduced -= 1 + st_action[a] = j + st_actionp[a] = p + if not rlevel: + log.info(" ! shift/reduce conflict for %s resolved as shift",a) + self.sr_conflicts.append((st,a,'shift')) + elif (slevel == rlevel) and (rprec == 'nonassoc'): + st_action[a] = None + else: + # Hmmm. Guess we'll keep the reduce + if not slevel and not rlevel: + log.info(" ! shift/reduce conflict for %s resolved as reduce",a) + self.sr_conflicts.append((st,a,'reduce')) + + else: + raise LALRError("Unknown conflict in state %d" % st) + else: + st_action[a] = j + st_actionp[a] = p + + # Print the actions associated with each terminal + _actprint = { } + for a,p,m in actlist: + if a in st_action: + if p is st_actionp[a]: + log.info(" %-15s %s",a,m) + _actprint[(a,m)] = 1 + log.info("") + # Print the actions that were not used. (debugging) + not_used = 0 + for a,p,m in actlist: + if a in st_action: + if p is not st_actionp[a]: + if not (a,m) in _actprint: + log.debug(" ! %-15s [ %s ]",a,m) + not_used = 1 + _actprint[(a,m)] = 1 + if not_used: + log.debug("") + + # Construct the goto table for this state + + nkeys = { } + for ii in I: + for s in ii.usyms: + if s in self.grammar.Nonterminals: + nkeys[s] = None + for n in nkeys: + g = self.lr0_goto(I,n) + j = self.lr0_cidhash.get(id(g),-1) + if j >= 0: + st_goto[n] = j + log.info(" %-30s shift and go to state %d",n,j) + + action[st] = st_action + actionp[st] = st_actionp + goto[st] = st_goto + st += 1 + + + # ----------------------------------------------------------------------------- + # write() + # + # This function writes the LR parsing tables to a file + # ----------------------------------------------------------------------------- + + def write_table(self,modulename,outputdir='',signature=""): + basemodulename = modulename.split(".")[-1] + filename = os.path.join(outputdir,basemodulename) + ".py" + try: + f = open(filename,"w") + + f.write(""" +# %s +# This file is automatically generated. Do not edit. +_tabversion = %r + +_lr_method = %r + +_lr_signature = %r + """ % (filename, __tabversion__, self.lr_method, signature)) + + # Change smaller to 0 to go back to original tables + smaller = 1 + + # Factor out names to try and make smaller + if smaller: + items = { } + + for s,nd in self.lr_action.items(): + for name,v in nd.items(): + i = items.get(name) + if not i: + i = ([],[]) + items[name] = i + i[0].append(s) + i[1].append(v) + + f.write("\n_lr_action_items = {") + for k,v in items.items(): + f.write("%r:([" % k) + for i in v[0]: + f.write("%r," % i) + f.write("],[") + for i in v[1]: + f.write("%r," % i) + + f.write("]),") + f.write("}\n") + + f.write(""" +_lr_action = { } +for _k, _v in _lr_action_items.items(): + for _x,_y in zip(_v[0],_v[1]): + if not _x in _lr_action: _lr_action[_x] = { } + _lr_action[_x][_k] = _y +del _lr_action_items +""") + + else: + f.write("\n_lr_action = { "); + for k,v in self.lr_action.items(): + f.write("(%r,%r):%r," % (k[0],k[1],v)) + f.write("}\n"); + + if smaller: + # Factor out names to try and make smaller + items = { } + + for s,nd in self.lr_goto.items(): + for name,v in nd.items(): + i = items.get(name) + if not i: + i = ([],[]) + items[name] = i + i[0].append(s) + i[1].append(v) + + f.write("\n_lr_goto_items = {") + for k,v in items.items(): + f.write("%r:([" % k) + for i in v[0]: + f.write("%r," % i) + f.write("],[") + for i in v[1]: + f.write("%r," % i) + + f.write("]),") + f.write("}\n") + + f.write(""" +_lr_goto = { } +for _k, _v in _lr_goto_items.items(): + for _x,_y in zip(_v[0],_v[1]): + if not _x in _lr_goto: _lr_goto[_x] = { } + _lr_goto[_x][_k] = _y +del _lr_goto_items +""") + else: + f.write("\n_lr_goto = { "); + for k,v in self.lr_goto.items(): + f.write("(%r,%r):%r," % (k[0],k[1],v)) + f.write("}\n"); + + # Write production table + f.write("_lr_productions = [\n") + for p in self.lr_productions: + if p.func: + f.write(" (%r,%r,%d,%r,%r,%d),\n" % (p.str,p.name, p.len, p.func,p.file,p.line)) + else: + f.write(" (%r,%r,%d,None,None,None),\n" % (str(p),p.name, p.len)) + f.write("]\n") + f.close() + + except IOError: + e = sys.exc_info()[1] + sys.stderr.write("Unable to create '%s'\n" % filename) + sys.stderr.write(str(e)+"\n") + return + + + # ----------------------------------------------------------------------------- + # pickle_table() + # + # This function pickles the LR parsing tables to a supplied file object + # ----------------------------------------------------------------------------- + + def pickle_table(self,filename,signature=""): + try: + import cPickle as pickle + except ImportError: + import pickle + outf = open(filename,"wb") + pickle.dump(__tabversion__,outf,pickle_protocol) + pickle.dump(self.lr_method,outf,pickle_protocol) + pickle.dump(signature,outf,pickle_protocol) + pickle.dump(self.lr_action,outf,pickle_protocol) + pickle.dump(self.lr_goto,outf,pickle_protocol) + + outp = [] + for p in self.lr_productions: + if p.func: + outp.append((p.str,p.name, p.len, p.func,p.file,p.line)) + else: + outp.append((str(p),p.name,p.len,None,None,None)) + pickle.dump(outp,outf,pickle_protocol) + outf.close() + +# ----------------------------------------------------------------------------- +# === INTROSPECTION === +# +# The following functions and classes are used to implement the PLY +# introspection features followed by the yacc() function itself. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# get_caller_module_dict() +# +# This function returns a dictionary containing all of the symbols defined within +# a caller further down the call stack. This is used to get the environment +# associated with the yacc() call if none was provided. +# ----------------------------------------------------------------------------- + +def get_caller_module_dict(levels): + try: + raise RuntimeError + except RuntimeError: + e,b,t = sys.exc_info() + f = t.tb_frame + while levels > 0: + f = f.f_back + levels -= 1 + ldict = f.f_globals.copy() + if f.f_globals != f.f_locals: + ldict.update(f.f_locals) + + return ldict + +# ----------------------------------------------------------------------------- +# parse_grammar() +# +# This takes a raw grammar rule string and parses it into production data +# ----------------------------------------------------------------------------- +def parse_grammar(doc,file,line): + grammar = [] + # Split the doc string into lines + pstrings = doc.splitlines() + lastp = None + dline = line + for ps in pstrings: + dline += 1 + p = ps.split() + if not p: continue + try: + if p[0] == '|': + # This is a continuation of a previous rule + if not lastp: + raise SyntaxError("%s:%d: Misplaced '|'" % (file,dline)) + prodname = lastp + syms = p[1:] + else: + prodname = p[0] + lastp = prodname + syms = p[2:] + assign = p[1] + if assign != ':' and assign != '::=': + raise SyntaxError("%s:%d: Syntax error. Expected ':'" % (file,dline)) + + grammar.append((file,dline,prodname,syms)) + except SyntaxError: + raise + except Exception: + raise SyntaxError("%s:%d: Syntax error in rule '%s'" % (file,dline,ps.strip())) + + return grammar + +# ----------------------------------------------------------------------------- +# ParserReflect() +# +# This class represents information extracted for building a parser including +# start symbol, error function, tokens, precedence list, action functions, +# etc. +# ----------------------------------------------------------------------------- +class ParserReflect(object): + def __init__(self,pdict,log=None): + self.pdict = pdict + self.start = None + self.error_func = None + self.tokens = None + self.files = {} + self.grammar = [] + self.error = 0 + + if log is None: + self.log = PlyLogger(sys.stderr) + else: + self.log = log + + # Get all of the basic information + def get_all(self): + self.get_start() + self.get_error_func() + self.get_tokens() + self.get_precedence() + self.get_pfunctions() + + # Validate all of the information + def validate_all(self): + self.validate_start() + self.validate_error_func() + self.validate_tokens() + self.validate_precedence() + self.validate_pfunctions() + self.validate_files() + return self.error + + # Compute a signature over the grammar + def signature(self): + try: + from hashlib import md5 + except ImportError: + from md5 import md5 + try: + sig = md5() + if self.start: + sig.update(self.start.encode('latin-1')) + if self.prec: + sig.update("".join(["".join(p) for p in self.prec]).encode('latin-1')) + if self.tokens: + sig.update(" ".join(self.tokens).encode('latin-1')) + for f in self.pfuncs: + if f[3]: + sig.update(f[3].encode('latin-1')) + except (TypeError,ValueError): + pass + return sig.digest() + + # ----------------------------------------------------------------------------- + # validate_file() + # + # This method checks to see if there are duplicated p_rulename() functions + # in the parser module file. Without this function, it is really easy for + # users to make mistakes by cutting and pasting code fragments (and it's a real + # bugger to try and figure out why the resulting parser doesn't work). Therefore, + # we just do a little regular expression pattern matching of def statements + # to try and detect duplicates. + # ----------------------------------------------------------------------------- + + def validate_files(self): + # Match def p_funcname( + fre = re.compile(r'\s*def\s+(p_[a-zA-Z_0-9]*)\(') + + for filename in self.files.keys(): + base,ext = os.path.splitext(filename) + if ext != '.py': return 1 # No idea. Assume it's okay. + + try: + f = open(filename) + lines = f.readlines() + f.close() + except IOError: + continue + + counthash = { } + for linen,l in enumerate(lines): + linen += 1 + m = fre.match(l) + if m: + name = m.group(1) + prev = counthash.get(name) + if not prev: + counthash[name] = linen + else: + self.log.warning("%s:%d: Function %s redefined. Previously defined on line %d", filename,linen,name,prev) + + # Get the start symbol + def get_start(self): + self.start = self.pdict.get('start') + + # Validate the start symbol + def validate_start(self): + if self.start is not None: + if not isinstance(self.start,str): + self.log.error("'start' must be a string") + + # Look for error handler + def get_error_func(self): + self.error_func = self.pdict.get('p_error') + + # Validate the error function + def validate_error_func(self): + if self.error_func: + if isinstance(self.error_func,types.FunctionType): + ismethod = 0 + elif isinstance(self.error_func, types.MethodType): + ismethod = 1 + else: + self.log.error("'p_error' defined, but is not a function or method") + self.error = 1 + return + + eline = func_code(self.error_func).co_firstlineno + efile = func_code(self.error_func).co_filename + self.files[efile] = 1 + + if (func_code(self.error_func).co_argcount != 1+ismethod): + self.log.error("%s:%d: p_error() requires 1 argument",efile,eline) + self.error = 1 + + # Get the tokens map + def get_tokens(self): + tokens = self.pdict.get("tokens",None) + if not tokens: + self.log.error("No token list is defined") + self.error = 1 + return + + if not isinstance(tokens,(list, tuple)): + self.log.error("tokens must be a list or tuple") + self.error = 1 + return + + if not tokens: + self.log.error("tokens is empty") + self.error = 1 + return + + self.tokens = tokens + + # Validate the tokens + def validate_tokens(self): + # Validate the tokens. + if 'error' in self.tokens: + self.log.error("Illegal token name 'error'. Is a reserved word") + self.error = 1 + return + + terminals = {} + for n in self.tokens: + if n in terminals: + self.log.warning("Token '%s' multiply defined", n) + terminals[n] = 1 + + # Get the precedence map (if any) + def get_precedence(self): + self.prec = self.pdict.get("precedence",None) + + # Validate and parse the precedence map + def validate_precedence(self): + preclist = [] + if self.prec: + if not isinstance(self.prec,(list,tuple)): + self.log.error("precedence must be a list or tuple") + self.error = 1 + return + for level,p in enumerate(self.prec): + if not isinstance(p,(list,tuple)): + self.log.error("Bad precedence table") + self.error = 1 + return + + if len(p) < 2: + self.log.error("Malformed precedence entry %s. Must be (assoc, term, ..., term)",p) + self.error = 1 + return + assoc = p[0] + if not isinstance(assoc,str): + self.log.error("precedence associativity must be a string") + self.error = 1 + return + for term in p[1:]: + if not isinstance(term,str): + self.log.error("precedence items must be strings") + self.error = 1 + return + preclist.append((term,assoc,level+1)) + self.preclist = preclist + + # Get all p_functions from the grammar + def get_pfunctions(self): + p_functions = [] + for name, item in self.pdict.items(): + if name[:2] != 'p_': continue + if name == 'p_error': continue + if isinstance(item,(types.FunctionType,types.MethodType)): + line = func_code(item).co_firstlineno + file = func_code(item).co_filename + p_functions.append((line,file,name,item.__doc__)) + + # Sort all of the actions by line number + p_functions.sort() + self.pfuncs = p_functions + + + # Validate all of the p_functions + def validate_pfunctions(self): + grammar = [] + # Check for non-empty symbols + if len(self.pfuncs) == 0: + self.log.error("no rules of the form p_rulename are defined") + self.error = 1 + return + + for line, file, name, doc in self.pfuncs: + func = self.pdict[name] + if isinstance(func, types.MethodType): + reqargs = 2 + else: + reqargs = 1 + if func_code(func).co_argcount > reqargs: + self.log.error("%s:%d: Rule '%s' has too many arguments",file,line,func.__name__) + self.error = 1 + elif func_code(func).co_argcount < reqargs: + self.log.error("%s:%d: Rule '%s' requires an argument",file,line,func.__name__) + self.error = 1 + elif not func.__doc__: + self.log.warning("%s:%d: No documentation string specified in function '%s' (ignored)",file,line,func.__name__) + else: + try: + parsed_g = parse_grammar(doc,file,line) + for g in parsed_g: + grammar.append((name, g)) + except SyntaxError: + e = sys.exc_info()[1] + self.log.error(str(e)) + self.error = 1 + + # Looks like a valid grammar rule + # Mark the file in which defined. + self.files[file] = 1 + + # Secondary validation step that looks for p_ definitions that are not functions + # or functions that look like they might be grammar rules. + + for n,v in self.pdict.items(): + if n[0:2] == 'p_' and isinstance(v, (types.FunctionType, types.MethodType)): continue + if n[0:2] == 't_': continue + if n[0:2] == 'p_' and n != 'p_error': + self.log.warning("'%s' not defined as a function", n) + if ((isinstance(v,types.FunctionType) and func_code(v).co_argcount == 1) or + (isinstance(v,types.MethodType) and func_code(v).co_argcount == 2)): + try: + doc = v.__doc__.split(" ") + if doc[1] == ':': + self.log.warning("%s:%d: Possible grammar rule '%s' defined without p_ prefix", + func_code(v).co_filename, func_code(v).co_firstlineno,n) + except Exception: + pass + + self.grammar = grammar + +# ----------------------------------------------------------------------------- +# yacc(module) +# +# Build a parser +# ----------------------------------------------------------------------------- + +def yacc(method='LALR', debug=yaccdebug, module=None, tabmodule=tab_module, start=None, + check_recursion=1, optimize=0, write_tables=1, debugfile=debug_file,outputdir='', + debuglog=None, errorlog = None, picklefile=None): + + global parse # Reference to the parsing method of the last built parser + + # If pickling is enabled, table files are not created + + if picklefile: + write_tables = 0 + + if errorlog is None: + errorlog = PlyLogger(sys.stderr) + + # Get the module dictionary used for the parser + if module: + _items = [(k,getattr(module,k)) for k in dir(module)] + pdict = dict(_items) + else: + pdict = get_caller_module_dict(2) + + # Collect parser information from the dictionary + pinfo = ParserReflect(pdict,log=errorlog) + pinfo.get_all() + + if pinfo.error: + raise YaccError("Unable to build parser") + + # Check signature against table files (if any) + signature = pinfo.signature() + + # Read the tables + try: + lr = LRTable() + if picklefile: + read_signature = lr.read_pickle(picklefile) + else: + read_signature = lr.read_table(tabmodule) + if optimize or (read_signature == signature): + try: + lr.bind_callables(pinfo.pdict) + parser = LRParser(lr,pinfo.error_func) + parse = parser.parse + return parser + except Exception: + e = sys.exc_info()[1] + errorlog.warning("There was a problem loading the table file: %s", repr(e)) + except VersionError: + e = sys.exc_info() + errorlog.warning(str(e)) + except Exception: + pass + + if debuglog is None: + if debug: + debuglog = PlyLogger(open(debugfile,"w")) + else: + debuglog = NullLogger() + + debuglog.info("Created by PLY version %s (http://www.dabeaz.com/ply)", __version__) + + + errors = 0 + + # Validate the parser information + if pinfo.validate_all(): + raise YaccError("Unable to build parser") + + if not pinfo.error_func: + errorlog.warning("no p_error() function is defined") + + # Create a grammar object + grammar = Grammar(pinfo.tokens) + + # Set precedence level for terminals + for term, assoc, level in pinfo.preclist: + try: + grammar.set_precedence(term,assoc,level) + except GrammarError: + e = sys.exc_info()[1] + errorlog.warning("%s",str(e)) + + # Add productions to the grammar + for funcname, gram in pinfo.grammar: + file, line, prodname, syms = gram + try: + grammar.add_production(prodname,syms,funcname,file,line) + except GrammarError: + e = sys.exc_info()[1] + errorlog.error("%s",str(e)) + errors = 1 + + # Set the grammar start symbols + try: + if start is None: + grammar.set_start(pinfo.start) + else: + grammar.set_start(start) + except GrammarError: + e = sys.exc_info()[1] + errorlog.error(str(e)) + errors = 1 + + if errors: + raise YaccError("Unable to build parser") + + # Verify the grammar structure + undefined_symbols = grammar.undefined_symbols() + for sym, prod in undefined_symbols: + errorlog.error("%s:%d: Symbol '%s' used, but not defined as a token or a rule",prod.file,prod.line,sym) + errors = 1 + + unused_terminals = grammar.unused_terminals() + if unused_terminals: + debuglog.info("") + debuglog.info("Unused terminals:") + debuglog.info("") + for term in unused_terminals: + errorlog.warning("Token '%s' defined, but not used", term) + debuglog.info(" %s", term) + + # Print out all productions to the debug log + if debug: + debuglog.info("") + debuglog.info("Grammar") + debuglog.info("") + for n,p in enumerate(grammar.Productions): + debuglog.info("Rule %-5d %s", n, p) + + # Find unused non-terminals + unused_rules = grammar.unused_rules() + for prod in unused_rules: + errorlog.warning("%s:%d: Rule '%s' defined, but not used", prod.file, prod.line, prod.name) + + if len(unused_terminals) == 1: + errorlog.warning("There is 1 unused token") + if len(unused_terminals) > 1: + errorlog.warning("There are %d unused tokens", len(unused_terminals)) + + if len(unused_rules) == 1: + errorlog.warning("There is 1 unused rule") + if len(unused_rules) > 1: + errorlog.warning("There are %d unused rules", len(unused_rules)) + + if debug: + debuglog.info("") + debuglog.info("Terminals, with rules where they appear") + debuglog.info("") + terms = list(grammar.Terminals) + terms.sort() + for term in terms: + debuglog.info("%-20s : %s", term, " ".join([str(s) for s in grammar.Terminals[term]])) + + debuglog.info("") + debuglog.info("Nonterminals, with rules where they appear") + debuglog.info("") + nonterms = list(grammar.Nonterminals) + nonterms.sort() + for nonterm in nonterms: + debuglog.info("%-20s : %s", nonterm, " ".join([str(s) for s in grammar.Nonterminals[nonterm]])) + debuglog.info("") + + if check_recursion: + unreachable = grammar.find_unreachable() + for u in unreachable: + errorlog.warning("Symbol '%s' is unreachable",u) + + infinite = grammar.infinite_cycles() + for inf in infinite: + errorlog.error("Infinite recursion detected for symbol '%s'", inf) + errors = 1 + + unused_prec = grammar.unused_precedence() + for term, assoc in unused_prec: + errorlog.error("Precedence rule '%s' defined for unknown symbol '%s'", assoc, term) + errors = 1 + + if errors: + raise YaccError("Unable to build parser") + + # Run the LRGeneratedTable on the grammar + if debug: + errorlog.debug("Generating %s tables", method) + + lr = LRGeneratedTable(grammar,method,debuglog) + + if debug: + num_sr = len(lr.sr_conflicts) + + # Report shift/reduce and reduce/reduce conflicts + if num_sr == 1: + errorlog.warning("1 shift/reduce conflict") + elif num_sr > 1: + errorlog.warning("%d shift/reduce conflicts", num_sr) + + num_rr = len(lr.rr_conflicts) + if num_rr == 1: + errorlog.warning("1 reduce/reduce conflict") + elif num_rr > 1: + errorlog.warning("%d reduce/reduce conflicts", num_rr) + + # Write out conflicts to the output file + if debug and (lr.sr_conflicts or lr.rr_conflicts): + debuglog.warning("") + debuglog.warning("Conflicts:") + debuglog.warning("") + + for state, tok, resolution in lr.sr_conflicts: + debuglog.warning("shift/reduce conflict for %s in state %d resolved as %s", tok, state, resolution) + + already_reported = {} + for state, rule, rejected in lr.rr_conflicts: + if (state,id(rule),id(rejected)) in already_reported: + continue + debuglog.warning("reduce/reduce conflict in state %d resolved using rule (%s)", state, rule) + debuglog.warning("rejected rule (%s) in state %d", rejected,state) + errorlog.warning("reduce/reduce conflict in state %d resolved using rule (%s)", state, rule) + errorlog.warning("rejected rule (%s) in state %d", rejected, state) + already_reported[state,id(rule),id(rejected)] = 1 + + warned_never = [] + for state, rule, rejected in lr.rr_conflicts: + if not rejected.reduced and (rejected not in warned_never): + debuglog.warning("Rule (%s) is never reduced", rejected) + errorlog.warning("Rule (%s) is never reduced", rejected) + warned_never.append(rejected) + + # Write the table file if requested + if write_tables: + lr.write_table(tabmodule,outputdir,signature) + + # Write a pickled version of the tables + if picklefile: + lr.pickle_table(picklefile,signature) + + # Build the parser + lr.bind_callables(pinfo.pdict) + parser = LRParser(lr,pinfo.error_func) + + parse = parser.parse + return parser diff --git a/lib/python3.4/site-packages/pycparser/plyparser.py b/lib/python3.4/site-packages/pycparser/plyparser.py new file mode 100644 index 0000000..7b86f56 --- /dev/null +++ b/lib/python3.4/site-packages/pycparser/plyparser.py @@ -0,0 +1,55 @@ +#----------------------------------------------------------------- +# plyparser.py +# +# PLYParser class and other utilites for simplifying programming +# parsers with PLY +# +# Copyright (C) 2008-2015, Eli Bendersky +# License: BSD +#----------------------------------------------------------------- + + +class Coord(object): + """ Coordinates of a syntactic element. Consists of: + - File name + - Line number + - (optional) column number, for the Lexer + """ + __slots__ = ('file', 'line', 'column', '__weakref__') + def __init__(self, file, line, column=None): + self.file = file + self.line = line + self.column = column + + def __str__(self): + str = "%s:%s" % (self.file, self.line) + if self.column: str += ":%s" % self.column + return str + + +class ParseError(Exception): pass + + +class PLYParser(object): + def _create_opt_rule(self, rulename): + """ Given a rule name, creates an optional ply.yacc rule + for it. The name of the optional rule is + <rulename>_opt + """ + optname = rulename + '_opt' + + def optrule(self, p): + p[0] = p[1] + + optrule.__doc__ = '%s : empty\n| %s' % (optname, rulename) + optrule.__name__ = 'p_%s' % optname + setattr(self.__class__, optrule.__name__, optrule) + + def _coord(self, lineno, column=None): + return Coord( + file=self.clex.filename, + line=lineno, + column=column) + + def _parse_error(self, msg, coord): + raise ParseError("%s: %s" % (coord, msg)) diff --git a/lib/python3.4/site-packages/pycparser/yacctab.py b/lib/python3.4/site-packages/pycparser/yacctab.py new file mode 100644 index 0000000..bc3c676 --- /dev/null +++ b/lib/python3.4/site-packages/pycparser/yacctab.py @@ -0,0 +1,292 @@ + +# yacctab.py +# This file is automatically generated. Do not edit. +_tabversion = '3.2' + +_lr_method = 'LALR' + +_lr_signature = '\x11\x82\x05\xfb:\x10\xfeo5\xb4\x11N\xe7S\xb4b' + +_lr_action_items = {'VOID':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,66,78,80,82,87,89,90,91,92,93,94,95,96,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[6,6,-63,-74,-73,-60,-56,-57,-35,-31,-61,6,-36,-55,-70,-65,-54,6,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,6,-69,6,-72,-76,6,-59,-86,-261,-85,6,-113,-112,-32,-102,-101,6,6,6,-47,-48,6,-115,6,6,6,6,-92,6,6,6,6,-38,6,-49,6,6,-87,-93,-262,-103,-121,-120,6,6,6,6,6,-39,-41,-44,-40,-42,6,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,6,-175,-174,6,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'LBRACKET':([1,2,3,5,6,9,10,13,14,17,18,19,21,23,25,26,27,28,29,30,32,33,35,37,39,40,42,43,44,45,46,49,50,51,52,54,55,56,58,60,62,63,67,68,69,70,74,78,81,83,84,86,90,94,96,97,110,112,115,116,118,121,122,123,124,125,126,129,130,138,140,143,145,146,148,149,153,155,166,167,174,176,177,178,179,180,191,197,198,218,221,222,224,228,235,239,262,267,269,270,300,302,303,310,311,314,315,323,324,325,326,329,334,338,339,360,362,364,366,367,368,388,389,391,392,399,401,428,429,430,437,],[-263,-63,-74,-73,-60,-56,-57,-61,-263,-55,-70,-65,-54,-58,-178,65,-68,-263,-71,72,-75,-114,-66,-62,-64,-67,-263,-69,-263,-72,-76,-59,-52,-9,-10,-86,-261,-85,-51,65,-102,-101,-28,-122,-124,-27,72,72,164,-50,-53,72,-115,-263,-263,72,72,-248,-125,-123,-252,-243,-255,-259,-256,-253,-241,-242,226,-251,-228,-257,-249,-240,-254,-250,164,264,72,72,-87,-262,-23,-84,-24,-83,-103,-121,-120,-260,-258,-237,-236,-150,-152,72,-140,264,-154,-148,-248,-89,-88,-105,-104,-116,-119,-235,-234,-233,-232,-231,-244,72,72,-143,264,-141,-149,-151,-153,-118,-117,-229,-230,264,-142,-245,264,-238,-239,]),'WCHAR_CONST':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,115,119,127,128,134,135,137,139,141,142,144,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,309,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,429,432,433,434,436,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,124,124,-47,124,-28,-263,-125,-227,124,-225,124,-224,124,-223,124,124,-222,-226,-263,-223,124,124,124,-262,124,124,-223,124,124,-184,-187,-185,-181,-182,-186,-188,124,-190,-191,-183,-189,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,-12,124,124,-11,-223,-41,-44,-40,124,-42,124,124,-156,-155,-45,-157,124,-43,124,124,124,-263,-139,-175,-174,124,-172,124,124,-158,124,-171,-159,124,124,124,124,-263,124,124,-11,-170,-173,124,-162,124,-160,124,124,-161,124,124,124,-263,124,-166,-165,-163,124,124,124,-167,-164,124,-169,-168,]),'FLOAT_CONST':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,115,119,127,128,134,135,137,139,141,142,144,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,309,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,429,432,433,434,436,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,125,125,-47,125,-28,-263,-125,-227,125,-225,125,-224,125,-223,125,125,-222,-226,-263,-223,125,125,125,-262,125,125,-223,125,125,-184,-187,-185,-181,-182,-186,-188,125,-190,-191,-183,-189,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,-12,125,125,-11,-223,-41,-44,-40,125,-42,125,125,-156,-155,-45,-157,125,-43,125,125,125,-263,-139,-175,-174,125,-172,125,125,-158,125,-171,-159,125,125,125,125,-263,125,125,-11,-170,-173,125,-162,125,-160,125,125,-161,125,125,125,-263,125,-166,-165,-163,125,125,125,-167,-164,125,-169,-168,]),'MINUS':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,112,115,118,119,120,121,122,123,124,125,126,127,128,129,130,132,134,135,137,138,139,140,141,142,143,144,145,146,147,148,149,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,224,226,227,230,231,232,233,234,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,300,309,323,324,325,326,329,334,335,336,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,362,365,370,371,373,374,375,376,379,380,381,383,384,385,390,391,392,393,395,398,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,428,429,430,432,433,434,436,437,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,128,128,-47,128,-28,-263,-248,-125,-252,-227,-214,-243,-255,-259,-256,-253,-241,128,-225,-242,-216,-195,128,-224,128,-251,-223,-228,128,128,-257,-222,-249,-240,244,-254,-250,-226,-263,-223,128,128,128,-262,128,128,-223,128,128,-184,-187,-185,-181,-182,-186,-188,128,-190,-191,-183,-189,-260,128,-220,-258,-237,-236,128,128,128,-214,-219,128,-217,-218,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,-12,128,128,-11,-223,-41,-44,-40,128,-42,128,128,-156,-155,-45,-157,128,-43,-248,128,-235,-234,-233,-232,-231,-244,128,128,244,244,244,-200,244,244,244,-199,244,244,-197,-196,244,244,244,244,244,-198,-263,-139,-175,-174,128,-172,128,128,-158,128,-171,-159,128,128,-221,-229,-230,128,128,-215,-263,128,128,-11,-170,-173,128,-162,128,-160,128,128,-161,128,128,128,-245,-263,-238,128,-166,-165,-163,-239,128,128,128,-167,-164,128,-169,-168,]),'RPAREN':([1,2,3,5,6,9,10,13,14,17,18,19,21,23,25,26,27,28,29,32,33,35,37,39,40,42,43,44,45,46,49,50,51,52,53,54,56,58,59,60,62,63,66,67,68,69,70,74,78,81,83,84,90,94,96,106,107,108,109,110,111,112,113,114,115,116,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,151,153,158,159,160,161,165,166,167,174,176,177,178,179,180,191,197,198,199,200,201,202,218,220,221,222,224,227,228,231,232,234,235,236,237,238,239,240,269,270,275,285,302,303,310,311,314,315,318,319,320,321,322,323,324,325,326,328,329,330,332,333,334,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,366,367,368,378,388,389,390,391,392,397,398,410,412,415,416,417,419,428,430,432,435,437,438,439,442,],[-263,-63,-74,-73,-60,-56,-57,-61,-263,-55,-70,-65,-54,-58,-178,-111,-68,-263,-71,-75,-114,-66,-62,-64,-67,-263,-69,-263,-72,-76,-59,-52,-9,-10,90,-86,-85,-51,-113,-112,-102,-101,-263,-28,-122,-124,-27,-145,-263,-147,-50,-53,-115,-263,-263,197,-15,198,-128,-263,-16,-248,-126,-132,-125,-123,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,-193,-254,-250,-179,-146,-21,-22,269,270,-263,-145,-263,-87,-262,-23,-84,-24,-83,-103,-121,-120,-131,-1,-2,-130,-260,-220,-258,-237,-236,329,-150,-214,-219,-217,-152,334,336,-176,-263,-218,-154,-148,368,-14,-89,-88,-105,-104,-116,-119,-133,-127,-129,-180,390,-235,-234,-233,-232,-246,-231,392,395,396,-244,-144,-263,-145,-201,-213,-202,-200,-204,-208,-203,-199,-206,-211,-197,-196,-205,-212,-207,-209,-210,-198,-149,-151,-153,-13,-118,-117,-221,-229,-230,-177,-215,423,425,427,-247,428,-194,-245,-238,-263,440,-239,-263,443,446,]),'LONG':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,66,78,80,82,87,89,90,91,92,93,94,95,96,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[19,19,-63,-74,-73,-60,-56,-57,-35,-31,-61,19,-36,-55,-70,-65,-54,19,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,19,-69,19,-72,-76,19,-59,-86,-261,-85,19,-113,-112,-32,-102,-101,19,19,19,-47,-48,19,-115,19,19,19,19,-92,19,19,19,19,-38,19,-49,19,19,-87,-93,-262,-103,-121,-120,19,19,19,19,19,-39,-41,-44,-40,-42,19,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,19,-175,-174,19,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'PLUS':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,112,115,118,119,120,121,122,123,124,125,126,127,128,129,130,132,134,135,137,138,139,140,141,142,143,144,145,146,147,148,149,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,224,226,227,230,231,232,233,234,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,300,309,323,324,325,326,329,334,335,336,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,362,365,370,371,373,374,375,376,379,380,381,383,384,385,390,391,392,393,395,398,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,428,429,430,432,433,434,436,437,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,135,135,-47,135,-28,-263,-248,-125,-252,-227,-214,-243,-255,-259,-256,-253,-241,135,-225,-242,-216,-195,135,-224,135,-251,-223,-228,135,135,-257,-222,-249,-240,248,-254,-250,-226,-263,-223,135,135,135,-262,135,135,-223,135,135,-184,-187,-185,-181,-182,-186,-188,135,-190,-191,-183,-189,-260,135,-220,-258,-237,-236,135,135,135,-214,-219,135,-217,-218,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,-12,135,135,-11,-223,-41,-44,-40,135,-42,135,135,-156,-155,-45,-157,135,-43,-248,135,-235,-234,-233,-232,-231,-244,135,135,248,248,248,-200,248,248,248,-199,248,248,-197,-196,248,248,248,248,248,-198,-263,-139,-175,-174,135,-172,135,135,-158,135,-171,-159,135,135,-221,-229,-230,135,135,-215,-263,135,135,-11,-170,-173,135,-162,135,-160,135,135,-161,135,135,135,-245,-263,-238,135,-166,-165,-163,-239,135,135,135,-167,-164,135,-169,-168,]),'ELLIPSIS':([204,],[319,]),'GT':([112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,390,391,392,398,428,430,437,],[-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,249,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-201,249,-202,-200,-204,249,-203,-199,-206,249,-197,-196,-205,249,249,249,249,-198,-221,-229,-230,-215,-245,-238,-239,]),'GOTO':([55,82,170,176,276,277,280,282,289,291,292,293,295,297,298,370,371,374,375,379,381,383,384,405,406,409,411,414,423,424,425,427,433,434,436,441,443,444,445,446,447,448,],[-261,-47,278,-262,-41,-44,-40,-42,278,-156,-155,-45,-157,278,-43,-175,-174,-172,278,-158,-171,-159,278,-170,-173,-162,278,-160,278,-161,278,278,-166,-165,-163,278,278,-167,-164,278,-169,-168,]),'ENUM':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,66,78,80,82,87,89,90,91,92,93,94,95,96,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[24,24,-63,-74,-73,-60,-56,-57,-35,-31,-61,24,-36,-55,-70,-65,-54,24,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,24,-69,24,-72,-76,24,-59,-86,-261,-85,24,-113,-112,-32,-102,-101,24,24,24,-47,-48,24,-115,24,24,24,24,-92,24,24,24,24,-38,24,-49,24,24,-87,-93,-262,-103,-121,-120,24,24,24,24,24,-39,-41,-44,-40,-42,24,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,24,-175,-174,24,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'PERIOD':([55,112,118,121,122,123,124,125,126,129,130,138,140,143,145,146,148,149,155,176,218,221,222,224,262,267,300,323,324,325,326,329,334,360,362,364,391,392,399,401,428,429,430,437,],[-261,-248,-252,-243,-255,-259,-256,-253,-241,-242,225,-251,-228,-257,-249,-240,-254,-250,263,-262,-260,-258,-237,-236,-140,263,-248,-235,-234,-233,-232,-231,-244,-143,263,-141,-229,-230,263,-142,-245,263,-238,-239,]),'GE':([112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,390,391,392,398,428,430,437,],[-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,253,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-201,253,-202,-200,-204,253,-203,-199,-206,253,-197,-196,-205,253,253,253,253,-198,-221,-229,-230,-215,-245,-238,-239,]),'INT_CONST_DEC':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,115,119,127,128,134,135,137,139,141,142,144,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,309,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,429,432,433,434,436,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,145,145,-47,145,-28,-263,-125,-227,145,-225,145,-224,145,-223,145,145,-222,-226,-263,-223,145,145,145,-262,145,145,-223,145,145,-184,-187,-185,-181,-182,-186,-188,145,-190,-191,-183,-189,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,-12,145,145,-11,-223,-41,-44,-40,145,-42,145,145,-156,-155,-45,-157,145,-43,145,145,145,-263,-139,-175,-174,145,-172,145,145,-158,145,-171,-159,145,145,145,145,-263,145,145,-11,-170,-173,145,-162,145,-160,145,145,-161,145,145,145,-263,145,-166,-165,-163,145,145,145,-167,-164,145,-169,-168,]),'ARROW':([112,118,121,122,123,124,125,126,129,130,138,140,143,145,146,148,149,176,218,221,222,224,300,323,324,325,326,329,334,391,392,428,430,437,],[-248,-252,-243,-255,-259,-256,-253,-241,-242,223,-251,-228,-257,-249,-240,-254,-250,-262,-260,-258,-237,-236,-248,-235,-234,-233,-232,-231,-244,-229,-230,-245,-238,-239,]),'HEX_FLOAT_CONST':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,115,119,127,128,134,135,137,139,141,142,144,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,309,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,429,432,433,434,436,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,148,148,-47,148,-28,-263,-125,-227,148,-225,148,-224,148,-223,148,148,-222,-226,-263,-223,148,148,148,-262,148,148,-223,148,148,-184,-187,-185,-181,-182,-186,-188,148,-190,-191,-183,-189,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,-12,148,148,-11,-223,-41,-44,-40,148,-42,148,148,-156,-155,-45,-157,148,-43,148,148,148,-263,-139,-175,-174,148,-172,148,148,-158,148,-171,-159,148,148,148,148,-263,148,148,-11,-170,-173,148,-162,148,-160,148,148,-161,148,148,148,-263,148,-166,-165,-163,148,148,148,-167,-164,148,-169,-168,]),'DOUBLE':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,66,78,80,82,87,89,90,91,92,93,94,95,96,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[40,40,-63,-74,-73,-60,-56,-57,-35,-31,-61,40,-36,-55,-70,-65,-54,40,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,40,-69,40,-72,-76,40,-59,-86,-261,-85,40,-113,-112,-32,-102,-101,40,40,40,-47,-48,40,-115,40,40,40,40,-92,40,40,40,40,-38,40,-49,40,40,-87,-93,-262,-103,-121,-120,40,40,40,40,40,-39,-41,-44,-40,-42,40,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,40,-175,-174,40,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'MINUSEQUAL':([112,118,120,121,122,123,124,125,126,129,130,138,140,143,145,146,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,390,391,392,398,428,430,437,],[-248,-252,207,-243,-255,-259,-256,-253,-241,-242,-216,-251,-228,-257,-249,-240,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-221,-229,-230,-215,-245,-238,-239,]),'INT_CONST_OCT':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,115,119,127,128,134,135,137,139,141,142,144,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,309,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,429,432,433,434,436,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,149,149,-47,149,-28,-263,-125,-227,149,-225,149,-224,149,-223,149,149,-222,-226,-263,-223,149,149,149,-262,149,149,-223,149,149,-184,-187,-185,-181,-182,-186,-188,149,-190,-191,-183,-189,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,-12,149,149,-11,-223,-41,-44,-40,149,-42,149,149,-156,-155,-45,-157,149,-43,149,149,149,-263,-139,-175,-174,149,-172,149,149,-158,149,-171,-159,149,149,149,149,-263,149,149,-11,-170,-173,149,-162,149,-160,149,149,-161,149,149,149,-263,149,-166,-165,-163,149,149,149,-167,-164,149,-169,-168,]),'TIMESEQUAL':([112,118,120,121,122,123,124,125,126,129,130,138,140,143,145,146,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,390,391,392,398,428,430,437,],[-248,-252,216,-243,-255,-259,-256,-253,-241,-242,-216,-251,-228,-257,-249,-240,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-221,-229,-230,-215,-245,-238,-239,]),'OR':([112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,390,391,392,398,428,430,437,],[-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,258,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-201,258,-202,-200,-204,-208,-203,-199,-206,-211,-197,-196,-205,258,-207,-209,-210,-198,-221,-229,-230,-215,-245,-238,-239,]),'SHORT':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,66,78,80,82,87,89,90,91,92,93,94,95,96,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[2,2,-63,-74,-73,-60,-56,-57,-35,-31,-61,2,-36,-55,-70,-65,-54,2,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,2,-69,2,-72,-76,2,-59,-86,-261,-85,2,-113,-112,-32,-102,-101,2,2,2,-47,-48,2,-115,2,2,2,2,-92,2,2,2,2,-38,2,-49,2,2,-87,-93,-262,-103,-121,-120,2,2,2,2,2,-39,-41,-44,-40,-42,2,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,2,-175,-174,2,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'RETURN':([55,82,170,176,276,277,280,282,289,291,292,293,295,297,298,370,371,374,375,379,381,383,384,405,406,409,411,414,423,424,425,427,433,434,436,441,443,444,445,446,447,448,],[-261,-47,281,-262,-41,-44,-40,-42,281,-156,-155,-45,-157,281,-43,-175,-174,-172,281,-158,-171,-159,281,-170,-173,-162,281,-160,281,-161,281,281,-166,-165,-163,281,281,-167,-164,281,-169,-168,]),'RSHIFTEQUAL':([112,118,120,121,122,123,124,125,126,129,130,138,140,143,145,146,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,390,391,392,398,428,430,437,],[-248,-252,217,-243,-255,-259,-256,-253,-241,-242,-216,-251,-228,-257,-249,-240,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-221,-229,-230,-215,-245,-238,-239,]),'RESTRICT':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,28,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,65,66,67,69,78,80,82,87,89,90,91,92,93,94,95,96,104,105,115,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[32,32,-63,-74,-73,-60,-56,-57,-35,-31,-61,32,-36,-55,-70,-65,-54,32,-58,-178,-111,-68,32,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,32,-69,32,-72,-76,32,-59,-86,-261,-85,32,-113,-112,-32,-102,-101,32,32,32,-124,32,32,-47,-48,32,-115,32,32,32,32,-92,32,32,32,-125,32,32,32,-38,32,-49,32,32,-87,-93,-262,-103,-121,-120,32,32,32,32,32,-39,-41,-44,-40,-42,32,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,32,-175,-174,32,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'STATIC':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,59,60,61,62,63,65,66,69,78,80,82,87,89,90,104,115,165,167,169,170,171,174,176,191,197,198,204,272,276,277,280,282,289,291,292,293,295,298,302,303,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[9,9,-63,-74,-73,-60,-56,-57,-35,-31,-61,9,-36,-55,-70,-65,-54,9,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,9,-69,9,-72,-76,9,-59,-86,-261,-85,-113,-112,-32,-102,-101,105,9,-124,9,9,-47,-48,9,-115,195,-125,9,9,-38,9,-49,-87,-262,-103,-121,-120,9,-39,-41,-44,-40,-42,9,-156,-155,-45,-157,-43,-89,-88,-105,-104,-116,-119,9,-175,-174,9,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'SIZEOF':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,115,119,127,128,134,135,137,139,141,142,144,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,309,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,429,432,433,434,436,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,127,127,-47,127,-28,-263,-125,-227,127,-225,127,-224,127,-223,127,127,-222,-226,-263,-223,127,127,127,-262,127,127,-223,127,127,-184,-187,-185,-181,-182,-186,-188,127,-190,-191,-183,-189,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,-12,127,127,-11,-223,-41,-44,-40,127,-42,127,127,-156,-155,-45,-157,127,-43,127,127,127,-263,-139,-175,-174,127,-172,127,127,-158,127,-171,-159,127,127,127,127,-263,127,127,-11,-170,-173,127,-162,127,-160,127,127,-161,127,127,127,-263,127,-166,-165,-163,127,127,127,-167,-164,127,-169,-168,]),'UNSIGNED':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,66,78,80,82,87,89,90,91,92,93,94,95,96,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[18,18,-63,-74,-73,-60,-56,-57,-35,-31,-61,18,-36,-55,-70,-65,-54,18,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,18,-69,18,-72,-76,18,-59,-86,-261,-85,18,-113,-112,-32,-102,-101,18,18,18,-47,-48,18,-115,18,18,18,18,-92,18,18,18,18,-38,18,-49,18,18,-87,-93,-262,-103,-121,-120,18,18,18,18,18,-39,-41,-44,-40,-42,18,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,18,-175,-174,18,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'UNION':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,66,78,80,82,87,89,90,91,92,93,94,95,96,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[20,20,-63,-74,-73,-60,-56,-57,-35,-31,-61,20,-36,-55,-70,-65,-54,20,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,20,-69,20,-72,-76,20,-59,-86,-261,-85,20,-113,-112,-32,-102,-101,20,20,20,-47,-48,20,-115,20,20,20,20,-92,20,20,20,20,-38,20,-49,20,20,-87,-93,-262,-103,-121,-120,20,20,20,20,20,-39,-41,-44,-40,-42,20,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,20,-175,-174,20,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'COLON':([2,3,5,6,13,18,19,25,26,27,29,32,33,35,37,39,40,43,45,46,54,56,59,60,62,63,90,94,96,97,112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,151,174,176,177,178,179,180,187,191,197,198,218,220,221,222,224,231,232,234,238,240,286,300,302,303,305,306,310,311,314,315,321,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,377,388,389,390,391,392,397,398,419,428,430,437,],[-63,-74,-73,-60,-61,-70,-65,-178,-111,-68,-71,-75,-114,-66,-62,-64,-67,-69,-72,-76,-86,-85,-113,-112,-102,-101,-115,-263,-263,181,-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,-193,-254,-250,-179,-87,-262,-23,-84,-24,-83,309,-103,-121,-120,-260,-220,-258,-237,-236,-214,-219,-217,-176,-218,375,384,-89,-88,-192,181,-105,-104,-116,-119,-180,-235,-234,-233,-232,-231,-244,-201,-213,-202,-200,-204,-208,-203,-199,-206,-211,-197,-196,-205,-212,-207,-209,400,-210,-198,411,-118,-117,-221,-229,-230,-177,-215,-194,-245,-238,-239,]),'$end':([0,8,11,12,15,22,31,36,38,47,61,82,169,176,272,383,],[-263,0,-35,-31,-36,-29,-34,-33,-37,-30,-32,-47,-38,-262,-39,-159,]),'WSTRING_LITERAL':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,115,119,121,123,127,128,134,135,137,139,141,142,144,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,218,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,309,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,429,432,433,434,436,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,123,123,-47,123,-28,-263,-125,-227,218,-259,123,-225,123,-224,123,-223,123,123,-222,-226,-263,-223,123,123,123,-262,123,123,-223,123,123,-184,-187,-185,-181,-182,-186,-188,123,-190,-191,-183,-189,-260,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,-12,123,123,-11,-223,-41,-44,-40,123,-42,123,123,-156,-155,-45,-157,123,-43,123,123,123,-263,-139,-175,-174,123,-172,123,123,-158,123,-171,-159,123,123,123,123,-263,123,123,-11,-170,-173,123,-162,123,-160,123,123,-161,123,123,123,-263,123,-166,-165,-163,123,123,123,-167,-164,123,-169,-168,]),'DIVIDE':([112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,390,391,392,398,428,430,437,],[-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,251,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,251,251,251,251,251,251,251,251,251,251,-197,-196,251,251,251,251,251,-198,-221,-229,-230,-215,-245,-238,-239,]),'FOR':([55,82,170,176,276,277,280,282,289,291,292,293,295,297,298,370,371,374,375,379,381,383,384,405,406,409,411,414,423,424,425,427,433,434,436,441,443,444,445,446,447,448,],[-261,-47,283,-262,-41,-44,-40,-42,283,-156,-155,-45,-157,283,-43,-175,-174,-172,283,-158,-171,-159,283,-170,-173,-162,283,-160,283,-161,283,283,-166,-165,-163,283,283,-167,-164,283,-169,-168,]),'PLUSPLUS':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,112,115,118,119,121,122,123,124,125,126,127,128,129,130,134,135,137,138,139,140,141,142,143,144,145,146,148,149,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,218,219,221,222,224,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,300,309,323,324,325,326,329,334,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,391,392,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,428,429,430,432,433,434,436,437,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,137,137,-47,137,-28,-263,-248,-125,-252,-227,-243,-255,-259,-256,-253,-241,137,-225,-242,224,137,-224,137,-251,-223,-228,137,137,-257,-222,-249,-240,-254,-250,-226,-263,-223,137,137,137,-262,137,137,-223,137,137,-184,-187,-185,-181,-182,-186,-188,137,-190,-191,-183,-189,-260,137,-258,-237,-236,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,-12,137,137,-11,-223,-41,-44,-40,137,-42,137,137,-156,-155,-45,-157,137,-43,-248,137,-235,-234,-233,-232,-231,-244,137,137,-263,-139,-175,-174,137,-172,137,137,-158,137,-171,-159,137,137,-229,-230,137,137,-263,137,137,-11,-170,-173,137,-162,137,-160,137,137,-161,137,137,137,-245,-263,-238,137,-166,-165,-163,-239,137,137,137,-167,-164,137,-169,-168,]),'EQUALS':([1,2,3,5,6,9,10,13,14,17,18,19,21,23,25,26,27,29,30,32,33,35,37,39,40,42,43,44,45,46,49,50,51,52,54,56,58,59,60,62,63,80,83,84,86,90,102,112,118,120,121,122,123,124,125,126,129,130,138,140,143,145,146,148,149,168,174,176,191,197,198,218,220,221,222,224,231,232,234,240,262,267,300,302,303,310,311,314,315,323,324,325,326,329,334,360,364,388,389,390,391,392,398,401,428,430,437,],[-263,-63,-74,-73,-60,-56,-57,-61,-263,-55,-70,-65,-54,-58,-178,-111,-68,-71,77,-75,-114,-66,-62,-64,-67,-263,-69,-263,-72,-76,-59,-52,-9,-10,-86,-85,-51,-113,-112,-102,-101,162,-50,-53,77,-115,192,-248,-252,209,-243,-255,-259,-256,-253,-241,-242,-216,-251,-228,-257,-249,-240,-254,-250,162,-87,-262,-103,-121,-120,-260,-220,-258,-237,-236,-214,-219,-217,-218,-140,365,-248,-89,-88,-105,-104,-116,-119,-235,-234,-233,-232,-231,-244,-143,-141,-118,-117,-221,-229,-230,-215,-142,-245,-238,-239,]),'ELSE':([176,276,277,280,282,293,298,370,371,374,381,383,405,406,409,414,424,433,434,436,444,445,447,448,],[-262,-41,-44,-40,-42,-45,-43,-175,-174,-172,-171,-159,-170,-173,-162,-160,-161,-166,-165,441,-167,-164,-169,-168,]),'ANDEQUAL':([112,118,120,121,122,123,124,125,126,129,130,138,140,143,145,146,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,390,391,392,398,428,430,437,],[-248,-252,214,-243,-255,-259,-256,-253,-241,-242,-216,-251,-228,-257,-249,-240,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-221,-229,-230,-215,-245,-238,-239,]),'EQ':([112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,390,391,392,398,428,430,437,],[-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,255,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-201,255,-202,-200,-204,-208,-203,-199,-206,255,-197,-196,-205,255,-207,255,255,-198,-221,-229,-230,-215,-245,-238,-239,]),'AND':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,112,115,118,119,120,121,122,123,124,125,126,127,128,129,130,132,134,135,137,138,139,140,141,142,143,144,145,146,147,148,149,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,224,226,227,230,231,232,233,234,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,300,309,323,324,325,326,329,334,335,336,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,362,365,370,371,373,374,375,376,379,380,381,383,384,385,390,391,392,393,395,398,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,428,429,430,432,433,434,436,437,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,144,144,-47,144,-28,-263,-248,-125,-252,-227,-214,-243,-255,-259,-256,-253,-241,144,-225,-242,-216,-195,144,-224,144,-251,-223,-228,144,144,-257,-222,-249,-240,256,-254,-250,-226,-263,-223,144,144,144,-262,144,144,-223,144,144,-184,-187,-185,-181,-182,-186,-188,144,-190,-191,-183,-189,-260,144,-220,-258,-237,-236,144,144,144,-214,-219,144,-217,-218,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,-12,144,144,-11,-223,-41,-44,-40,144,-42,144,144,-156,-155,-45,-157,144,-43,-248,144,-235,-234,-233,-232,-231,-244,144,144,-201,256,-202,-200,-204,-208,-203,-199,-206,256,-197,-196,-205,256,-207,-209,256,-198,-263,-139,-175,-174,144,-172,144,144,-158,144,-171,-159,144,144,-221,-229,-230,144,144,-215,-263,144,144,-11,-170,-173,144,-162,144,-160,144,144,-161,144,144,144,-245,-263,-238,144,-166,-165,-163,-239,144,144,144,-167,-164,144,-169,-168,]),'TYPEID':([0,1,2,3,5,6,7,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,31,32,33,34,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,66,67,68,69,70,74,78,80,82,87,89,90,91,92,93,94,95,96,115,116,141,165,166,167,169,170,171,172,173,174,175,176,191,197,198,204,219,223,225,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[25,25,-63,-74,-73,-60,54,-56,-57,-35,-31,-61,25,-36,59,-55,-70,-65,-91,-54,25,-58,62,-178,-111,-68,-263,-71,-34,-75,-114,-90,-66,-33,-62,-37,-64,-67,25,-69,25,-72,-76,25,-59,-86,-261,-85,25,-113,-112,-32,-102,-101,25,-28,-122,-124,-27,59,25,25,-47,-48,25,-115,25,25,25,25,-92,25,-125,-123,25,25,59,25,-38,25,-49,25,25,-87,-93,-262,-103,-121,-120,25,25,323,325,25,25,25,-39,-41,-44,-40,-42,25,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,25,-175,-174,25,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'LBRACE':([7,20,24,26,33,34,48,54,55,56,59,60,62,63,77,80,82,85,87,88,89,90,155,162,163,170,171,176,197,198,260,266,268,276,277,280,282,289,291,292,293,295,297,298,314,315,336,362,365,370,371,374,375,379,381,383,384,388,389,390,395,396,399,402,403,405,406,409,411,414,423,424,425,427,429,433,434,436,441,443,444,445,446,447,448,],[55,-91,55,-111,-114,-90,-263,55,-261,55,-113,-112,55,55,55,-263,-47,-7,-48,55,-8,-115,-263,55,55,55,-49,-262,-121,-120,-12,55,-11,-41,-44,-40,-42,55,-156,-155,-45,-157,55,-43,-116,-119,55,-263,-139,-175,-174,-172,55,-158,-171,-159,55,-118,-117,55,55,55,-263,55,-11,-170,-173,-162,55,-160,55,-161,55,55,-263,-166,-165,-163,55,55,-167,-164,55,-169,-168,]),'PPHASH':([0,11,12,15,22,31,36,38,61,82,169,176,272,383,],[38,-35,-31,-36,38,-34,-33,-37,-32,-47,-38,-262,-39,-159,]),'INT':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,66,78,80,82,87,89,90,91,92,93,94,95,96,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[39,39,-63,-74,-73,-60,-56,-57,-35,-31,-61,39,-36,-55,-70,-65,-54,39,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,39,-69,39,-72,-76,39,-59,-86,-261,-85,39,-113,-112,-32,-102,-101,39,39,39,-47,-48,39,-115,39,39,39,39,-92,39,39,39,39,-38,39,-49,39,39,-87,-93,-262,-103,-121,-120,39,39,39,39,39,-39,-41,-44,-40,-42,39,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,39,-175,-174,39,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'SIGNED':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,66,78,80,82,87,89,90,91,92,93,94,95,96,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[43,43,-63,-74,-73,-60,-56,-57,-35,-31,-61,43,-36,-55,-70,-65,-54,43,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,43,-69,43,-72,-76,43,-59,-86,-261,-85,43,-113,-112,-32,-102,-101,43,43,43,-47,-48,43,-115,43,43,43,43,-92,43,43,43,43,-38,43,-49,43,43,-87,-93,-262,-103,-121,-120,43,43,43,43,43,-39,-41,-44,-40,-42,43,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,43,-175,-174,43,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'CONTINUE':([55,82,170,176,276,277,280,282,289,291,292,293,295,297,298,370,371,374,375,379,381,383,384,405,406,409,411,414,423,424,425,427,433,434,436,441,443,444,445,446,447,448,],[-261,-47,284,-262,-41,-44,-40,-42,284,-156,-155,-45,-157,284,-43,-175,-174,-172,284,-158,-171,-159,284,-170,-173,-162,284,-160,284,-161,284,284,-166,-165,-163,284,284,-167,-164,284,-169,-168,]),'NOT':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,115,119,127,128,134,135,137,139,141,142,144,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,309,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,429,432,433,434,436,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,152,152,-47,152,-28,-263,-125,-227,152,-225,152,-224,152,-223,152,152,-222,-226,-263,-223,152,152,152,-262,152,152,-223,152,152,-184,-187,-185,-181,-182,-186,-188,152,-190,-191,-183,-189,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,-12,152,152,-11,-223,-41,-44,-40,152,-42,152,152,-156,-155,-45,-157,152,-43,152,152,152,-263,-139,-175,-174,152,-172,152,152,-158,152,-171,-159,152,152,152,152,-263,152,152,-11,-170,-173,152,-162,152,-160,152,152,-161,152,152,152,-263,152,-166,-165,-163,152,152,152,-167,-164,152,-169,-168,]),'OREQUAL':([112,118,120,121,122,123,124,125,126,129,130,138,140,143,145,146,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,390,391,392,398,428,430,437,],[-248,-252,215,-243,-255,-259,-256,-253,-241,-242,-216,-251,-228,-257,-249,-240,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-221,-229,-230,-215,-245,-238,-239,]),'MOD':([112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,390,391,392,398,428,430,437,],[-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,259,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,259,259,259,259,259,259,259,259,259,259,-197,-196,259,259,259,259,259,-198,-221,-229,-230,-215,-245,-238,-239,]),'RSHIFT':([112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,390,391,392,398,428,430,437,],[-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,241,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-201,241,-202,-200,241,241,241,-199,241,241,-197,-196,241,241,241,241,241,-198,-221,-229,-230,-215,-245,-238,-239,]),'DEFAULT':([55,82,170,176,276,277,280,282,289,291,292,293,295,297,298,370,371,374,375,379,381,383,384,405,406,409,411,414,423,424,425,427,433,434,436,441,443,444,445,446,447,448,],[-261,-47,286,-262,-41,-44,-40,-42,286,-156,-155,-45,-157,286,-43,-175,-174,-172,286,-158,-171,-159,286,-170,-173,-162,286,-160,286,-161,286,286,-166,-165,-163,286,286,-167,-164,286,-169,-168,]),'CHAR':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,66,78,80,82,87,89,90,91,92,93,94,95,96,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[37,37,-63,-74,-73,-60,-56,-57,-35,-31,-61,37,-36,-55,-70,-65,-54,37,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,37,-69,37,-72,-76,37,-59,-86,-261,-85,37,-113,-112,-32,-102,-101,37,37,37,-47,-48,37,-115,37,37,37,37,-92,37,37,37,37,-38,37,-49,37,37,-87,-93,-262,-103,-121,-120,37,37,37,37,37,-39,-41,-44,-40,-42,37,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,37,-175,-174,37,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'WHILE':([55,82,170,176,276,277,280,282,289,291,292,293,295,297,298,370,371,374,375,379,381,382,383,384,405,406,409,411,414,423,424,425,427,433,434,436,441,443,444,445,446,447,448,],[-261,-47,287,-262,-41,-44,-40,-42,287,-156,-155,-45,-157,287,-43,-175,-174,-172,287,-158,-171,413,-159,287,-170,-173,-162,287,-160,287,-161,287,287,-166,-165,-163,287,287,-167,-164,287,-169,-168,]),'DIVEQUAL':([112,118,120,121,122,123,124,125,126,129,130,138,140,143,145,146,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,390,391,392,398,428,430,437,],[-248,-252,206,-243,-255,-259,-256,-253,-241,-242,-216,-251,-228,-257,-249,-240,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-221,-229,-230,-215,-245,-238,-239,]),'EXTERN':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,59,60,61,62,63,66,78,80,82,87,89,90,165,167,169,170,171,174,176,191,197,198,204,272,276,277,280,282,289,291,292,293,295,298,302,303,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[10,10,-63,-74,-73,-60,-56,-57,-35,-31,-61,10,-36,-55,-70,-65,-54,10,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,10,-69,10,-72,-76,10,-59,-86,-261,-85,-113,-112,-32,-102,-101,10,10,10,-47,-48,10,-115,10,10,-38,10,-49,-87,-262,-103,-121,-120,10,-39,-41,-44,-40,-42,10,-156,-155,-45,-157,-43,-89,-88,-105,-104,-116,-119,10,-175,-174,10,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'CASE':([55,82,170,176,276,277,280,282,289,291,292,293,295,297,298,370,371,374,375,379,381,383,384,405,406,409,411,414,423,424,425,427,433,434,436,441,443,444,445,446,447,448,],[-261,-47,288,-262,-41,-44,-40,-42,288,-156,-155,-45,-157,288,-43,-175,-174,-172,288,-158,-171,-159,288,-170,-173,-162,288,-160,288,-161,288,288,-166,-165,-163,288,288,-167,-164,288,-169,-168,]),'LAND':([112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,390,391,392,398,428,430,437,],[-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,254,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-201,254,-202,-200,-204,-208,-203,-199,-206,-211,-197,-196,-205,-212,-207,-209,-210,-198,-221,-229,-230,-215,-245,-238,-239,]),'REGISTER':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,59,60,61,62,63,66,78,80,82,87,89,90,165,167,169,170,171,174,176,191,197,198,204,272,276,277,280,282,289,291,292,293,295,298,302,303,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[17,17,-63,-74,-73,-60,-56,-57,-35,-31,-61,17,-36,-55,-70,-65,-54,17,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,17,-69,17,-72,-76,17,-59,-86,-261,-85,-113,-112,-32,-102,-101,17,17,17,-47,-48,17,-115,17,17,-38,17,-49,-87,-262,-103,-121,-120,17,-39,-41,-44,-40,-42,17,-156,-155,-45,-157,-43,-89,-88,-105,-104,-116,-119,17,-175,-174,17,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'MODEQUAL':([112,118,120,121,122,123,124,125,126,129,130,138,140,143,145,146,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,390,391,392,398,428,430,437,],[-248,-252,208,-243,-255,-259,-256,-253,-241,-242,-216,-251,-228,-257,-249,-240,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-221,-229,-230,-215,-245,-238,-239,]),'NE':([112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,390,391,392,398,428,430,437,],[-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,246,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-201,246,-202,-200,-204,-208,-203,-199,-206,246,-197,-196,-205,246,-207,246,246,-198,-221,-229,-230,-215,-245,-238,-239,]),'SWITCH':([55,82,170,176,276,277,280,282,289,291,292,293,295,297,298,370,371,374,375,379,381,383,384,405,406,409,411,414,423,424,425,427,433,434,436,441,443,444,445,446,447,448,],[-261,-47,290,-262,-41,-44,-40,-42,290,-156,-155,-45,-157,290,-43,-175,-174,-172,290,-158,-171,-159,290,-170,-173,-162,290,-160,290,-161,290,290,-166,-165,-163,290,290,-167,-164,290,-169,-168,]),'INT_CONST_HEX':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,115,119,127,128,134,135,137,139,141,142,144,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,309,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,429,432,433,434,436,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,138,138,-47,138,-28,-263,-125,-227,138,-225,138,-224,138,-223,138,138,-222,-226,-263,-223,138,138,138,-262,138,138,-223,138,138,-184,-187,-185,-181,-182,-186,-188,138,-190,-191,-183,-189,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,-12,138,138,-11,-223,-41,-44,-40,138,-42,138,138,-156,-155,-45,-157,138,-43,138,138,138,-263,-139,-175,-174,138,-172,138,138,-158,138,-171,-159,138,138,138,138,-263,138,138,-11,-170,-173,138,-162,138,-160,138,138,-161,138,138,138,-263,138,-166,-165,-163,138,138,138,-167,-164,138,-169,-168,]),'_COMPLEX':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,66,78,80,82,87,89,90,91,92,93,94,95,96,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[27,27,-63,-74,-73,-60,-56,-57,-35,-31,-61,27,-36,-55,-70,-65,-54,27,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,27,-69,27,-72,-76,27,-59,-86,-261,-85,27,-113,-112,-32,-102,-101,27,27,27,-47,-48,27,-115,27,27,27,27,-92,27,27,27,27,-38,27,-49,27,27,-87,-93,-262,-103,-121,-120,27,27,27,27,27,-39,-41,-44,-40,-42,27,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,27,-175,-174,27,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'PLUSEQUAL':([112,118,120,121,122,123,124,125,126,129,130,138,140,143,145,146,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,390,391,392,398,428,430,437,],[-248,-252,211,-243,-255,-259,-256,-253,-241,-242,-216,-251,-228,-257,-249,-240,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-221,-229,-230,-215,-245,-238,-239,]),'STRUCT':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,66,78,80,82,87,89,90,91,92,93,94,95,96,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[34,34,-63,-74,-73,-60,-56,-57,-35,-31,-61,34,-36,-55,-70,-65,-54,34,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,34,-69,34,-72,-76,34,-59,-86,-261,-85,34,-113,-112,-32,-102,-101,34,34,34,-47,-48,34,-115,34,34,34,34,-92,34,34,34,34,-38,34,-49,34,34,-87,-93,-262,-103,-121,-120,34,34,34,34,34,-39,-41,-44,-40,-42,34,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,34,-175,-174,34,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'CONDOP':([112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,390,391,392,398,428,430,437,],[-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,257,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-201,-213,-202,-200,-204,-208,-203,-199,-206,-211,-197,-196,-205,-212,-207,-209,-210,-198,-221,-229,-230,-215,-245,-238,-239,]),'BREAK':([55,82,170,176,276,277,280,282,289,291,292,293,295,297,298,370,371,374,375,379,381,383,384,405,406,409,411,414,423,424,425,427,433,434,436,441,443,444,445,446,447,448,],[-261,-47,294,-262,-41,-44,-40,-42,294,-156,-155,-45,-157,294,-43,-175,-174,-172,294,-158,-171,-159,294,-170,-173,-162,294,-160,294,-161,294,294,-166,-165,-163,294,294,-167,-164,294,-169,-168,]),'VOLATILE':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,28,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,65,66,67,69,78,80,82,87,89,90,91,92,93,94,95,96,104,105,115,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[46,46,-63,-74,-73,-60,-56,-57,-35,-31,-61,46,-36,-55,-70,-65,-54,46,-58,-178,-111,-68,46,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,46,-69,46,-72,-76,46,-59,-86,-261,-85,46,-113,-112,-32,-102,-101,46,46,46,-124,46,46,-47,-48,46,-115,46,46,46,46,-92,46,46,46,-125,46,46,46,-38,46,-49,46,46,-87,-93,-262,-103,-121,-120,46,46,46,46,46,-39,-41,-44,-40,-42,46,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,46,-175,-174,46,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'INLINE':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,59,60,61,62,63,66,78,80,82,87,89,90,165,167,169,170,171,174,176,191,197,198,204,272,276,277,280,282,289,291,292,293,295,298,302,303,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[49,49,-63,-74,-73,-60,-56,-57,-35,-31,-61,49,-36,-55,-70,-65,-54,49,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,49,-69,49,-72,-76,49,-59,-86,-261,-85,-113,-112,-32,-102,-101,49,49,49,-47,-48,49,-115,49,49,-38,49,-49,-87,-262,-103,-121,-120,49,-39,-41,-44,-40,-42,49,-156,-155,-45,-157,-43,-89,-88,-105,-104,-116,-119,49,-175,-174,49,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'INT_CONST_BIN':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,115,119,127,128,134,135,137,139,141,142,144,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,309,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,429,432,433,434,436,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,118,118,-47,118,-28,-263,-125,-227,118,-225,118,-224,118,-223,118,118,-222,-226,-263,-223,118,118,118,-262,118,118,-223,118,118,-184,-187,-185,-181,-182,-186,-188,118,-190,-191,-183,-189,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,-12,118,118,-11,-223,-41,-44,-40,118,-42,118,118,-156,-155,-45,-157,118,-43,118,118,118,-263,-139,-175,-174,118,-172,118,118,-158,118,-171,-159,118,118,118,118,-263,118,118,-11,-170,-173,118,-162,118,-160,118,118,-161,118,118,118,-263,118,-166,-165,-163,118,118,118,-167,-164,118,-169,-168,]),'DO':([55,82,170,176,276,277,280,282,289,291,292,293,295,297,298,370,371,374,375,379,381,383,384,405,406,409,411,414,423,424,425,427,433,434,436,441,443,444,445,446,447,448,],[-261,-47,297,-262,-41,-44,-40,-42,297,-156,-155,-45,-157,297,-43,-175,-174,-172,297,-158,-171,-159,297,-170,-173,-162,297,-160,297,-161,297,297,-166,-165,-163,297,297,-167,-164,297,-169,-168,]),'LNOT':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,115,119,127,128,134,135,137,139,141,142,144,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,309,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,429,432,433,434,436,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,119,119,-47,119,-28,-263,-125,-227,119,-225,119,-224,119,-223,119,119,-222,-226,-263,-223,119,119,119,-262,119,119,-223,119,119,-184,-187,-185,-181,-182,-186,-188,119,-190,-191,-183,-189,119,119,119,119,119,119,119,119,119,119,119,119,119,119,119,119,119,119,119,119,119,119,119,119,-12,119,119,-11,-223,-41,-44,-40,119,-42,119,119,-156,-155,-45,-157,119,-43,119,119,119,-263,-139,-175,-174,119,-172,119,119,-158,119,-171,-159,119,119,119,119,-263,119,119,-11,-170,-173,119,-162,119,-160,119,119,-161,119,119,119,-263,119,-166,-165,-163,119,119,119,-167,-164,119,-169,-168,]),'CONST':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,28,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,65,66,67,69,78,80,82,87,89,90,91,92,93,94,95,96,104,105,115,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[3,3,-63,-74,-73,-60,-56,-57,-35,-31,-61,3,-36,-55,-70,-65,-54,3,-58,-178,-111,-68,3,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,3,-69,3,-72,-76,3,-59,-86,-261,-85,3,-113,-112,-32,-102,-101,3,3,3,-124,3,3,-47,-48,3,-115,3,3,3,3,-92,3,3,3,-125,3,3,3,-38,3,-49,3,3,-87,-93,-262,-103,-121,-120,3,3,3,3,3,-39,-41,-44,-40,-42,3,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,3,-175,-174,3,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'LOR':([112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,390,391,392,398,428,430,437,],[-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,242,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-201,-213,-202,-200,-204,-208,-203,-199,-206,-211,-197,-196,-205,-212,-207,-209,-210,-198,-221,-229,-230,-215,-245,-238,-239,]),'CHAR_CONST':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,115,119,127,128,134,135,137,139,141,142,144,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,309,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,429,432,433,434,436,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,122,122,-47,122,-28,-263,-125,-227,122,-225,122,-224,122,-223,122,122,-222,-226,-263,-223,122,122,122,-262,122,122,-223,122,122,-184,-187,-185,-181,-182,-186,-188,122,-190,-191,-183,-189,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,-12,122,122,-11,-223,-41,-44,-40,122,-42,122,122,-156,-155,-45,-157,122,-43,122,122,122,-263,-139,-175,-174,122,-172,122,122,-158,122,-171,-159,122,122,122,122,-263,122,122,-11,-170,-173,122,-162,122,-160,122,122,-161,122,122,122,-263,122,-166,-165,-163,122,122,122,-167,-164,122,-169,-168,]),'LSHIFT':([112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,390,391,392,398,428,430,437,],[-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,243,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-201,243,-202,-200,243,243,243,-199,243,243,-197,-196,243,243,243,243,243,-198,-221,-229,-230,-215,-245,-238,-239,]),'RBRACE':([55,82,93,95,100,101,102,112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,151,155,156,170,172,173,175,176,188,189,190,218,220,221,222,224,231,232,234,240,261,265,268,276,277,280,282,289,291,292,293,295,296,298,299,305,307,308,312,313,321,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,359,362,363,370,371,374,379,381,383,390,391,392,398,404,405,406,409,414,418,419,420,424,428,429,430,433,434,436,437,444,445,447,448,],[-261,-47,176,-92,-106,176,-109,-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,-193,-254,-250,-179,-263,-134,-263,176,176,-93,-262,176,176,-107,-260,-220,-258,-237,-236,-214,-219,-217,-218,176,-20,-19,-41,-44,-40,-42,-6,-156,-155,-45,-157,-5,-43,176,-192,-94,-95,-108,-110,-180,-235,-234,-233,-232,-231,-244,-201,-213,-202,-200,-204,-208,-203,-199,-206,-211,-197,-196,-205,-212,-207,-209,-210,-198,-135,176,-137,-175,-174,-172,-158,-171,-159,-221,-229,-230,-215,-136,-170,-173,-162,-160,176,-194,-138,-161,-245,176,-238,-166,-165,-163,-239,-167,-164,-169,-168,]),'_BOOL':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,66,78,80,82,87,89,90,91,92,93,94,95,96,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[13,13,-63,-74,-73,-60,-56,-57,-35,-31,-61,13,-36,-55,-70,-65,-54,13,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,13,-69,13,-72,-76,13,-59,-86,-261,-85,13,-113,-112,-32,-102,-101,13,13,13,-47,-48,13,-115,13,13,13,13,-92,13,13,13,13,-38,13,-49,13,13,-87,-93,-262,-103,-121,-120,13,13,13,13,13,-39,-41,-44,-40,-42,13,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,13,-175,-174,13,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'LE':([112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,390,391,392,398,428,430,437,],[-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,245,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-201,245,-202,-200,-204,245,-203,-199,-206,245,-197,-196,-205,245,245,245,245,-198,-221,-229,-230,-215,-245,-238,-239,]),'SEMI':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,28,29,30,31,32,33,35,36,37,38,39,40,41,42,43,44,45,46,49,50,51,52,54,55,56,58,59,60,61,62,63,67,68,69,70,71,73,74,75,76,79,80,81,82,83,84,86,90,94,96,97,112,115,116,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,151,153,154,156,166,168,169,170,174,176,177,178,179,180,182,183,184,185,186,187,191,197,198,205,218,220,221,222,224,228,231,232,234,235,238,240,269,270,271,272,276,277,279,280,281,282,284,285,289,291,292,293,294,295,296,297,298,300,302,303,304,305,310,311,314,315,321,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,359,366,367,368,369,370,371,372,373,374,375,378,379,381,383,384,386,387,388,389,390,391,392,397,398,404,405,406,407,408,409,411,414,419,421,422,423,424,425,427,428,430,431,433,434,436,437,440,441,443,444,445,446,447,448,],[15,-263,-63,-74,-73,-60,-56,-57,-35,-31,-61,-263,-36,-55,-70,-65,-54,15,-58,-178,-111,-68,-263,-71,-263,-34,-75,-114,-66,-33,-62,-37,-64,-67,82,-263,-69,-263,-72,-76,-59,-52,-9,-10,-86,-261,-85,-51,-113,-112,-32,-102,-101,-28,-122,-124,-27,-18,-46,-145,-77,-17,-80,-81,-147,-47,-50,-53,-263,-115,-263,-263,-263,-248,-125,-123,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,-193,-254,-250,-179,-146,-79,-134,-145,-81,-38,-263,-87,-262,-23,-84,-24,-83,-26,307,-96,308,-25,-98,-103,-121,-120,-78,-260,-220,-258,-237,-236,-150,-214,-219,-217,-152,-176,-218,-154,-148,-82,-39,-41,-44,370,-40,371,-42,374,-14,-263,-156,-155,-45,381,-157,-13,-263,-43,-248,-89,-88,-100,-192,-105,-104,-116,-119,-180,-235,-234,-233,-232,-231,-244,-201,-213,-202,-200,-204,-208,-203,-199,-206,-211,-197,-196,-205,-212,-207,-209,-210,-198,-135,-149,-151,-153,405,-175,-174,406,-263,-172,-263,-13,-158,-171,-159,-263,-97,-99,-118,-117,-221,-229,-230,-177,-215,-136,-170,-173,421,-263,-162,-263,-160,-194,-263,432,-263,-161,-263,-263,-245,-238,438,-166,-165,-163,-239,444,-263,-263,-167,-164,-263,-169,-168,]),'LT':([112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,390,391,392,398,428,430,437,],[-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,247,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-201,247,-202,-200,-204,247,-203,-199,-206,247,-197,-196,-205,247,247,247,247,-198,-221,-229,-230,-215,-245,-238,-239,]),'COMMA':([1,2,3,5,6,9,10,13,14,17,18,19,21,23,25,26,27,28,29,32,33,35,37,39,40,42,43,44,45,46,49,50,51,52,54,56,58,59,60,62,63,67,68,69,70,71,74,75,79,80,81,83,84,90,94,96,100,101,102,109,110,111,112,113,114,115,116,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,151,153,154,156,166,168,174,176,177,178,179,180,182,184,187,188,189,190,191,197,198,199,200,201,202,205,218,220,221,222,224,228,231,232,234,235,236,238,239,240,265,269,270,271,285,300,302,303,304,305,310,311,312,313,314,315,318,320,321,323,324,325,326,327,328,329,330,331,334,337,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,363,366,367,368,372,386,387,388,389,390,391,392,397,398,404,410,412,415,416,418,419,420,428,430,435,437,],[-263,-63,-74,-73,-60,-56,-57,-61,-263,-55,-70,-65,-54,-58,-178,-111,-68,-263,-71,-75,-114,-66,-62,-64,-67,-263,-69,-263,-72,-76,-59,-52,-9,-10,-86,-85,-51,-113,-112,-102,-101,-28,-122,-124,-27,117,-145,-77,-80,-81,-147,-50,-53,-115,-263,-263,-106,190,-109,-128,-263,203,-248,204,-132,-125,-123,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,-193,-254,-250,-179,-146,-79,-134,-145,-81,-87,-262,-23,-84,-24,-83,306,-96,-98,190,190,-107,-103,-121,-120,-131,-1,-2,-130,-78,-260,-220,-258,-237,-236,-150,-214,-219,-217,-152,335,-176,-263,-218,362,-154,-148,-82,335,-248,-89,-88,-100,-192,-105,-104,-108,-110,-116,-119,-133,-129,-180,-235,-234,-233,-232,335,-246,-231,393,394,-244,-144,-145,-201,-213,-202,-200,-204,-208,-203,-199,-206,-211,-197,-196,-205,-212,-207,-209,335,-210,-198,-135,-137,-149,-151,-153,335,-97,-99,-118,-117,-221,-229,-230,-177,-215,-136,335,335,335,-247,429,-194,-138,-245,-238,335,-239,]),'OFFSETOF':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,115,119,127,128,134,135,137,139,141,142,144,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,309,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,429,432,433,434,436,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,133,133,-47,133,-28,-263,-125,-227,133,-225,133,-224,133,-223,133,133,-222,-226,-263,-223,133,133,133,-262,133,133,-223,133,133,-184,-187,-185,-181,-182,-186,-188,133,-190,-191,-183,-189,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,-12,133,133,-11,-223,-41,-44,-40,133,-42,133,133,-156,-155,-45,-157,133,-43,133,133,133,-263,-139,-175,-174,133,-172,133,133,-158,133,-171,-159,133,133,133,133,-263,133,133,-11,-170,-173,133,-162,133,-160,133,133,-161,133,133,133,-263,133,-166,-165,-163,133,133,133,-167,-164,133,-169,-168,]),'TYPEDEF':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,59,60,61,62,63,66,78,80,82,87,89,90,165,167,169,170,171,174,176,191,197,198,204,272,276,277,280,282,289,291,292,293,295,298,302,303,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[23,23,-63,-74,-73,-60,-56,-57,-35,-31,-61,23,-36,-55,-70,-65,-54,23,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,23,-69,23,-72,-76,23,-59,-86,-261,-85,-113,-112,-32,-102,-101,23,23,23,-47,-48,23,-115,23,23,-38,23,-49,-87,-262,-103,-121,-120,23,-39,-41,-44,-40,-42,23,-156,-155,-45,-157,-43,-89,-88,-105,-104,-116,-119,23,-175,-174,23,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'XOR':([112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,390,391,392,398,428,430,437,],[-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,250,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-201,250,-202,-200,-204,-208,-203,-199,-206,-211,-197,-196,-205,250,-207,-209,250,-198,-221,-229,-230,-215,-245,-238,-239,]),'AUTO':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,59,60,61,62,63,66,78,80,82,87,89,90,165,167,169,170,171,174,176,191,197,198,204,272,276,277,280,282,289,291,292,293,295,298,302,303,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[21,21,-63,-74,-73,-60,-56,-57,-35,-31,-61,21,-36,-55,-70,-65,-54,21,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,21,-69,21,-72,-76,21,-59,-86,-261,-85,-113,-112,-32,-102,-101,21,21,21,-47,-48,21,-115,21,21,-38,21,-49,-87,-262,-103,-121,-120,21,-39,-41,-44,-40,-42,21,-156,-155,-45,-157,-43,-89,-88,-105,-104,-116,-119,21,-175,-174,21,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'TIMES':([0,1,2,3,4,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,27,28,29,30,31,32,35,36,37,38,39,40,42,43,44,45,46,49,50,51,52,54,55,56,58,61,62,63,65,67,68,69,70,72,77,78,82,83,84,86,94,96,97,103,104,105,110,112,115,117,118,119,120,121,122,123,124,125,126,127,128,129,130,132,134,135,137,138,139,140,141,142,143,144,145,146,147,148,149,152,155,157,162,164,167,169,170,174,176,177,178,179,180,181,191,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,224,226,227,230,231,232,233,234,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,272,274,276,277,280,281,282,288,289,291,292,293,295,297,298,300,302,303,306,309,310,311,323,324,325,326,329,334,335,336,338,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,362,365,370,371,373,374,375,376,379,380,381,383,384,385,390,391,392,393,395,398,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,428,429,430,432,433,434,436,437,438,441,443,444,445,446,447,448,],[28,-263,-63,-74,28,-73,-60,-56,-57,-35,-31,-61,-263,-36,-55,-70,-65,-54,28,-58,-178,-68,-263,-71,28,-34,-75,-66,-33,-62,-37,-64,-67,-263,-69,-263,-72,-76,-59,-52,-9,-10,-86,-261,-85,-51,-32,-102,-101,-263,-28,28,-124,-27,139,157,28,-47,-50,-53,28,-263,-263,28,194,-28,-263,28,-248,-125,28,-252,-227,-214,-243,-255,-259,-256,-253,-241,157,-225,-242,-216,-195,157,-224,157,-251,-223,-228,157,157,-257,-222,-249,-240,252,-254,-250,-226,-263,-223,157,274,28,-38,157,-87,-262,-23,-84,-24,-83,157,-103,157,-223,157,157,-184,-187,-185,-181,-182,-186,-188,157,-190,-191,-183,-189,-260,157,-220,-258,-237,-236,157,157,157,-214,-219,157,-217,28,-218,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,-12,157,157,-11,-39,-223,-41,-44,-40,157,-42,157,157,-156,-155,-45,-157,157,-43,-248,-89,-88,28,157,-105,-104,-235,-234,-233,-232,-231,-244,157,157,28,252,252,252,252,252,252,252,252,252,252,-197,-196,252,252,252,252,252,-198,-263,-139,-175,-174,157,-172,157,157,-158,157,-171,-159,157,157,-221,-229,-230,157,157,-215,-263,157,157,-11,-170,-173,157,-162,157,-160,157,157,-161,157,157,157,-245,-263,-238,157,-166,-165,-163,-239,157,157,157,-167,-164,157,-169,-168,]),'LPAREN':([0,1,2,3,4,5,6,9,10,11,12,13,14,15,16,17,18,19,21,22,23,25,26,27,28,29,30,31,32,33,35,36,37,38,39,40,42,43,44,45,46,49,50,51,52,54,55,56,58,60,61,62,63,65,67,68,69,70,72,74,77,78,81,82,83,84,86,90,94,96,97,103,104,105,110,112,115,116,117,118,119,121,122,123,124,125,126,127,128,129,130,133,134,135,137,138,139,140,141,142,143,144,145,146,148,149,152,153,155,157,162,164,166,167,169,170,174,176,177,178,179,180,181,191,192,194,195,196,197,198,206,207,208,209,210,211,212,213,214,215,216,217,218,219,221,222,224,226,227,228,230,233,235,239,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,269,270,272,274,276,277,280,281,282,283,287,288,289,290,291,292,293,295,297,298,300,301,302,303,306,309,310,311,314,315,323,324,325,326,329,334,335,336,338,339,362,365,366,367,368,370,371,373,374,375,376,379,380,381,383,384,385,388,389,391,392,393,395,399,400,402,403,405,406,408,409,411,413,414,421,423,424,425,426,427,428,429,430,432,433,434,436,437,438,441,443,444,445,446,447,448,],[4,-263,-63,-74,4,-73,-60,-56,-57,-35,-31,-61,-263,-36,4,-55,-70,-65,-54,4,-58,-178,66,-68,-263,-71,78,-34,-75,-114,-66,-33,-62,-37,-64,-67,-263,-69,-263,-72,-76,-59,-52,-9,-10,-86,-261,-85,-51,66,-32,-102,-101,-263,-28,-122,-124,-27,141,78,141,78,165,-47,-50,-53,167,-115,-263,-263,167,141,-28,-263,78,-248,-125,-123,4,-252,-227,-243,-255,-259,-256,-253,-241,219,-225,-242,227,229,230,-224,233,-251,-223,-228,141,233,-257,-222,-249,-240,-254,-250,-226,165,-263,-223,141,141,167,167,-38,141,-87,-262,-23,-84,-24,-83,230,-103,230,-223,141,141,-121,-120,-184,-187,-185,-181,-182,-186,-188,141,-190,-191,-183,-189,-260,141,-258,-237,-236,141,141,-150,141,141,-152,338,230,230,230,230,230,230,230,230,230,230,230,230,230,230,230,230,141,230,230,-12,230,141,-11,-154,-148,-39,-223,-41,-44,-40,141,-42,373,376,230,141,380,-156,-155,-45,-157,141,-43,-248,385,-89,-88,4,230,-105,-104,-116,-119,-235,-234,-233,-232,-231,-244,141,230,338,338,-263,-139,-149,-151,-153,-175,-174,141,-172,141,141,-158,141,-171,-159,141,141,-118,-117,-229,-230,141,230,-263,230,141,-11,-170,-173,141,-162,141,426,-160,141,141,-161,141,141,141,-245,-263,-238,141,-166,-165,-163,-239,141,141,141,-167,-164,141,-169,-168,]),'MINUSMINUS':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,112,115,118,119,121,122,123,124,125,126,127,128,129,130,134,135,137,138,139,140,141,142,143,144,145,146,148,149,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,218,219,221,222,224,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,300,309,323,324,325,326,329,334,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,391,392,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,428,429,430,432,433,434,436,437,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,142,142,-47,142,-28,-263,-248,-125,-252,-227,-243,-255,-259,-256,-253,-241,142,-225,-242,222,142,-224,142,-251,-223,-228,142,142,-257,-222,-249,-240,-254,-250,-226,-263,-223,142,142,142,-262,142,142,-223,142,142,-184,-187,-185,-181,-182,-186,-188,142,-190,-191,-183,-189,-260,142,-258,-237,-236,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,-12,142,142,-11,-223,-41,-44,-40,142,-42,142,142,-156,-155,-45,-157,142,-43,-248,142,-235,-234,-233,-232,-231,-244,142,142,-263,-139,-175,-174,142,-172,142,142,-158,142,-171,-159,142,142,-229,-230,142,142,-263,142,142,-11,-170,-173,142,-162,142,-160,142,142,-161,142,142,142,-245,-263,-238,142,-166,-165,-163,-239,142,142,142,-167,-164,142,-169,-168,]),'ID':([0,1,2,3,4,5,6,7,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,27,28,29,30,31,32,34,35,36,37,38,39,40,42,43,44,45,46,49,50,51,52,54,55,56,58,61,62,63,64,65,66,67,68,69,70,72,74,77,78,82,83,84,86,94,96,97,98,99,103,104,105,110,115,116,117,119,127,128,134,135,137,139,141,142,144,152,155,157,162,164,166,167,169,170,174,176,177,178,179,180,181,190,191,192,194,195,196,203,206,207,208,209,210,211,212,213,214,215,216,217,219,223,225,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,263,264,266,268,272,274,276,277,278,280,281,282,288,289,291,292,293,295,297,298,302,303,306,309,310,311,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,393,394,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,429,432,433,434,436,438,441,443,444,445,446,447,448,],[33,-263,-63,-74,33,-73,-60,56,-56,-57,-35,-31,-61,-263,-36,33,-55,-70,-65,-91,-54,33,-58,63,-178,-68,-263,-71,33,-34,-75,-90,-66,-33,-62,-37,-64,-67,-263,-69,-263,-72,-76,-59,-52,-9,-10,-86,-261,-85,-51,-32,-102,-101,102,-263,112,-28,-122,-124,-27,112,33,112,33,-47,-50,-53,33,-263,-263,33,102,102,112,-28,-263,33,-125,-123,33,-227,112,-225,112,-224,112,-223,112,112,-222,-226,-263,-223,112,112,33,33,-38,300,-87,-262,-23,-84,-24,-83,112,102,-103,112,-223,112,112,112,-184,-187,-185,-181,-182,-186,-188,112,-190,-191,-183,-189,112,324,326,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,-12,112,112,112,-11,-39,-223,-41,-44,369,-40,112,-42,112,300,-156,-155,-45,-157,300,-43,-89,-88,33,112,-105,-104,112,112,-263,-139,-175,-174,112,-172,300,112,-158,112,-171,-159,300,112,112,112,112,-263,112,112,-11,-170,-173,112,-162,300,-160,112,300,-161,300,112,300,-263,112,-166,-165,-163,112,300,300,-167,-164,300,-169,-168,]),'IF':([55,82,170,176,276,277,280,282,289,291,292,293,295,297,298,370,371,374,375,379,381,383,384,405,406,409,411,414,423,424,425,427,433,434,436,441,443,444,445,446,447,448,],[-261,-47,301,-262,-41,-44,-40,-42,301,-156,-155,-45,-157,301,-43,-175,-174,-172,301,-158,-171,-159,301,-170,-173,-162,301,-160,301,-161,301,301,-166,-165,-163,301,301,-167,-164,301,-169,-168,]),'STRING_LITERAL':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,115,119,127,128,129,134,135,137,139,141,142,143,144,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,219,221,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,309,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,429,432,433,434,436,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,143,143,-47,143,-28,-263,-125,-227,143,-225,221,143,-224,143,-223,143,143,-257,-222,-226,-263,-223,143,143,143,-262,143,143,-223,143,143,-184,-187,-185,-181,-182,-186,-188,143,-190,-191,-183,-189,143,-258,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,-12,143,143,-11,-223,-41,-44,-40,143,-42,143,143,-156,-155,-45,-157,143,-43,143,143,143,-263,-139,-175,-174,143,-172,143,143,-158,143,-171,-159,143,143,143,143,-263,143,143,-11,-170,-173,143,-162,143,-160,143,143,-161,143,143,143,-263,143,-166,-165,-163,143,143,143,-167,-164,143,-169,-168,]),'FLOAT':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,66,78,80,82,87,89,90,91,92,93,94,95,96,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[35,35,-63,-74,-73,-60,-56,-57,-35,-31,-61,35,-36,-55,-70,-65,-54,35,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,35,-69,35,-72,-76,35,-59,-86,-261,-85,35,-113,-112,-32,-102,-101,35,35,35,-47,-48,35,-115,35,35,35,35,-92,35,35,35,35,-38,35,-49,35,35,-87,-93,-262,-103,-121,-120,35,35,35,35,35,-39,-41,-44,-40,-42,35,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,35,-175,-174,35,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'XOREQUAL':([112,118,120,121,122,123,124,125,126,129,130,138,140,143,145,146,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,390,391,392,398,428,430,437,],[-248,-252,210,-243,-255,-259,-256,-253,-241,-242,-216,-251,-228,-257,-249,-240,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-221,-229,-230,-215,-245,-238,-239,]),'LSHIFTEQUAL':([112,118,120,121,122,123,124,125,126,129,130,138,140,143,145,146,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,390,391,392,398,428,430,437,],[-248,-252,212,-243,-255,-259,-256,-253,-241,-242,-216,-251,-228,-257,-249,-240,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-221,-229,-230,-215,-245,-238,-239,]),'RBRACKET':([3,32,46,65,69,70,72,103,104,112,115,118,120,121,122,123,124,125,126,129,130,131,132,136,138,139,140,143,145,146,147,148,149,150,151,164,176,193,194,218,220,221,222,224,231,232,234,238,240,273,274,305,316,317,321,323,324,325,326,327,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,361,390,391,392,397,398,419,428,430,437,],[-74,-75,-76,-263,-124,-27,-263,-263,-28,-248,-125,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,228,-195,-4,-251,235,-228,-257,-249,-240,-193,-254,-250,-3,-179,-263,-262,314,315,-260,-220,-258,-237,-236,-214,-219,-217,-176,-218,366,367,-192,388,389,-180,-235,-234,-233,-232,391,-231,-244,-201,-213,-202,-200,-204,-208,-203,-199,-206,-211,-197,-196,-205,-212,-207,-209,-210,-198,401,-221,-229,-230,-177,-215,-194,-245,-238,-239,]),} + +_lr_action = { } +for _k, _v in _lr_action_items.items(): + for _x,_y in zip(_v[0],_v[1]): + if not _x in _lr_action: _lr_action[_x] = { } + _lr_action[_x][_k] = _y +del _lr_action_items + +_lr_goto_items = {'storage_class_specifier':([0,1,14,22,42,44,48,66,78,80,89,165,167,170,204,289,338,373,],[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,]),'identifier_list_opt':([66,],[106,]),'selection_statement':([170,289,297,375,384,411,423,425,427,441,443,446,],[298,298,298,298,298,298,298,298,298,298,298,298,]),'constant':([72,77,103,127,134,137,141,142,162,164,170,181,192,195,196,213,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,264,266,281,288,289,297,309,335,336,373,375,376,380,384,385,393,395,400,402,408,411,421,423,425,426,427,432,438,441,443,446,],[126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,]),'unary_expression':([72,77,103,127,134,137,141,142,162,164,170,181,192,195,196,213,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,264,266,281,288,289,297,309,335,336,373,375,376,380,384,385,393,395,400,402,408,411,421,423,425,426,427,432,438,441,443,446,],[120,120,120,220,231,234,120,240,120,120,120,231,231,120,120,120,120,120,120,120,120,231,231,231,231,231,231,231,231,231,231,231,231,231,231,231,231,120,231,231,231,120,120,231,120,120,231,120,231,120,120,120,120,120,120,120,231,231,120,120,120,120,120,120,120,120,120,120,120,120,120,]),'conditional_expression':([72,77,103,141,162,164,170,181,192,195,196,213,219,226,227,230,233,257,264,266,281,288,289,297,309,335,373,375,376,380,384,385,393,400,402,408,411,421,423,425,426,427,432,438,441,443,446,],[151,151,151,151,151,151,151,305,305,151,151,151,151,151,151,151,151,151,305,151,151,305,151,151,305,151,151,151,151,151,151,151,151,419,151,151,151,151,151,151,151,151,151,151,151,151,151,]),'brace_close':([93,101,172,173,188,189,261,299,362,418,429,],[174,191,302,303,310,311,359,383,404,430,437,]),'struct_or_union_specifier':([0,1,14,22,42,44,48,57,66,78,80,89,91,92,93,94,96,141,165,167,170,172,173,204,219,229,230,233,289,338,373,],[5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,]),'unified_wstring_literal':([72,77,103,127,134,137,141,142,162,164,170,181,192,195,196,213,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,264,266,281,288,289,297,309,335,336,373,375,376,380,384,385,393,395,400,402,408,411,421,423,425,426,427,432,438,441,443,446,],[121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,]),'abstract_declarator_opt':([110,239,],[199,337,]),'iteration_statement':([170,289,297,375,384,411,423,425,427,441,443,446,],[277,277,277,277,277,277,277,277,277,277,277,277,]),'init_declarator_list':([30,86,],[71,71,]),'translation_unit_or_empty':([0,],[8,]),'struct_declaration_list':([57,91,92,],[93,172,173,]),'block_item_list_opt':([170,],[299,]),'enumerator':([64,98,99,190,],[100,100,100,312,]),'pp_directive':([0,22,],[11,11,]),'abstract_declarator':([30,78,86,97,110,167,239,338,],[79,161,79,185,201,161,201,161,]),'declaration_specifiers_opt':([1,14,42,44,],[50,58,83,84,]),'external_declaration':([0,22,],[12,61,]),'type_specifier':([0,1,14,22,42,44,48,57,66,78,80,89,91,92,93,94,96,141,165,167,170,172,173,204,219,229,230,233,289,338,373,],[14,14,14,14,14,14,14,94,14,14,14,14,94,94,94,94,94,94,14,14,14,94,94,14,94,94,94,94,14,14,14,]),'designation':([155,362,399,429,],[260,260,260,260,]),'compound_statement':([88,163,170,289,297,375,384,411,423,425,427,441,443,446,],[169,272,282,282,282,282,282,282,282,282,282,282,282,282,]),'pointer':([0,4,22,30,68,78,86,97,110,117,167,239,306,338,],[16,16,16,74,116,74,166,166,74,16,166,339,16,339,]),'type_name':([141,219,229,230,233,],[237,322,331,332,333,]),'unified_string_literal':([72,77,103,127,134,137,141,142,162,164,170,181,192,195,196,213,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,264,266,281,288,289,297,309,335,336,373,375,376,380,384,385,393,395,400,402,408,411,421,423,425,426,427,432,438,441,443,446,],[129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,]),'postfix_expression':([72,77,103,127,134,137,141,142,162,164,170,181,192,195,196,213,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,264,266,281,288,289,297,309,335,336,373,375,376,380,384,385,393,395,400,402,408,411,421,423,425,426,427,432,438,441,443,446,],[130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,]),'assignment_expression_opt':([72,103,164,],[131,193,273,]),'designation_opt':([155,362,399,429,],[266,402,266,402,]),'expression_statement':([170,289,297,375,384,411,423,425,427,441,443,446,],[276,276,276,276,276,276,276,276,276,276,276,276,]),'parameter_declaration':([66,78,165,167,204,338,],[109,109,109,109,320,109,]),'initializer_list_opt':([155,],[261,]),'cast_expression':([72,77,103,134,141,162,164,170,181,192,195,196,213,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,264,266,281,288,289,297,309,335,336,373,375,376,380,384,385,393,395,400,402,408,411,421,423,425,426,427,432,438,441,443,446,],[132,132,132,232,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,398,132,132,132,132,132,132,132,398,132,132,132,132,132,132,132,132,132,132,132,132,132,132,]),'init_declarator':([30,86,117,],[75,75,205,]),'struct_declarator_list':([97,],[182,]),'unary_operator':([72,77,103,127,134,137,141,142,162,164,170,181,192,195,196,213,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,264,266,281,288,289,297,309,335,336,373,375,376,380,384,385,393,395,400,402,408,411,421,423,425,426,427,432,438,441,443,446,],[134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,]),'brace_open':([7,24,54,56,62,63,77,88,162,163,170,266,289,297,336,375,384,390,395,396,402,411,423,425,427,441,443,446,],[57,64,91,92,98,99,155,170,155,170,170,155,170,170,399,170,170,399,399,399,155,170,170,170,170,170,170,170,]),'assignment_operator':([120,],[213,]),'struct_or_union':([0,1,14,22,42,44,48,57,66,78,80,89,91,92,93,94,96,141,165,167,170,172,173,204,219,229,230,233,289,338,373,],[7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,]),'identifier':([66,72,77,103,127,134,137,141,142,162,164,170,181,192,195,196,203,213,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,263,264,266,281,288,289,297,309,335,336,373,375,376,380,384,385,393,394,395,400,402,408,411,421,423,425,426,427,432,438,441,443,446,],[114,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,318,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,360,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,417,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,]),'struct_declaration':([57,91,92,93,172,173,],[95,95,95,175,175,175,]),'assignment_expression':([72,77,103,141,162,164,170,195,196,213,219,226,227,230,233,257,266,281,289,297,335,373,375,376,380,384,385,393,402,408,411,421,423,425,426,427,432,438,441,443,446,],[136,156,136,238,156,136,238,316,317,321,238,238,328,238,238,238,156,238,238,238,397,238,238,238,238,238,238,416,156,238,238,238,238,238,238,238,238,238,238,238,238,]),'parameter_type_list':([66,78,165,167,338,],[108,159,159,159,159,]),'type_qualifier_list_opt':([28,65,105,],[68,103,196,]),'direct_declarator':([0,4,16,22,30,74,78,86,97,110,117,166,167,306,],[26,26,60,26,26,60,26,26,26,26,26,60,26,26,]),'type_qualifier_list':([28,65,105,],[67,104,67,]),'designator':([155,267,362,399,429,],[262,364,262,262,262,]),'argument_expression_list':([227,],[330,]),'initializer':([77,162,266,402,],[154,271,363,420,]),'specifier_qualifier_list_opt':([94,96,],[178,180,]),'constant_expression':([181,192,264,288,309,],[304,313,361,377,387,]),'expression_opt':([170,289,297,373,375,384,408,411,421,423,425,427,432,438,441,443,446,],[279,279,279,407,279,279,422,279,431,279,279,279,439,442,279,279,279,]),'primary_expression':([72,77,103,127,134,137,141,142,162,164,170,181,192,195,196,213,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,264,266,281,288,289,297,309,335,336,373,375,376,380,384,385,393,395,400,402,408,411,421,423,425,426,427,432,438,441,443,446,],[140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,]),'declaration_specifiers':([0,1,14,22,42,44,48,66,78,80,89,165,167,170,204,289,338,373,],[30,52,52,30,52,52,86,110,110,86,86,110,110,86,110,86,110,86,]),'declaration':([0,22,48,80,89,170,289,373,],[31,31,87,87,171,292,292,408,]),'struct_declarator_list_opt':([97,],[183,]),'identifier_list':([66,],[111,]),'typedef_name':([0,1,14,22,42,44,48,57,66,78,80,89,91,92,93,94,96,141,165,167,170,172,173,204,219,229,230,233,289,338,373,],[29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,]),'parameter_type_list_opt':([78,165,167,338,],[160,275,160,160,]),'jump_statement':([170,289,297,375,384,411,423,425,427,441,443,446,],[293,293,293,293,293,293,293,293,293,293,293,293,]),'declaration_list_opt':([48,80,],[88,163,]),'struct_declarator':([97,306,],[184,386,]),'function_definition':([0,22,],[36,36,]),'binary_expression':([72,77,103,141,162,164,170,181,192,195,196,213,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,264,266,281,288,289,297,309,335,373,375,376,380,384,385,393,400,402,408,411,421,423,425,426,427,432,438,441,443,446,],[147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,147,357,358,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,]),'parameter_list':([66,78,165,167,338,],[113,113,113,113,113,]),'init_declarator_list_opt':([30,86,],[73,73,]),'enum_specifier':([0,1,14,22,42,44,48,57,66,78,80,89,91,92,93,94,96,141,165,167,170,172,173,204,219,229,230,233,289,338,373,],[45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,]),'decl_body':([0,22,48,80,89,170,289,373,],[41,41,41,41,41,41,41,41,]),'type_qualifier':([0,1,14,22,28,42,44,48,57,65,66,67,78,80,89,91,92,93,94,96,104,105,141,165,167,170,172,173,204,219,229,230,233,289,338,373,],[42,42,42,42,69,42,42,42,96,69,42,115,42,42,42,96,96,96,96,96,115,69,96,42,42,42,96,96,42,96,96,96,96,42,42,42,]),'statement':([170,289,297,375,384,411,423,425,427,441,443,446,],[291,291,382,409,414,424,433,434,436,445,447,448,]),'enumerator_list':([64,98,99,],[101,188,189,]),'labeled_statement':([170,289,297,375,384,411,423,425,427,441,443,446,],[280,280,280,280,280,280,280,280,280,280,280,280,]),'function_specifier':([0,1,14,22,42,44,48,66,78,80,89,165,167,170,204,289,338,373,],[44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,]),'specifier_qualifier_list':([57,91,92,93,94,96,141,172,173,219,229,230,233,],[97,97,97,97,179,179,239,97,97,239,239,239,239,]),'block_item':([170,289,],[295,379,]),'block_item_list':([170,],[289,]),'empty':([0,1,14,28,30,42,44,48,65,66,72,78,80,86,94,96,97,103,105,110,155,164,165,167,170,239,289,297,338,362,373,375,384,399,408,411,421,423,425,427,429,432,438,441,443,446,],[47,51,51,70,76,51,51,85,70,107,150,158,85,76,177,177,186,150,70,200,268,150,158,158,296,200,378,378,158,403,378,378,378,403,378,378,378,378,378,378,403,378,378,378,378,378,]),'translation_unit':([0,],[22,]),'initializer_list':([155,399,],[265,418,]),'declarator':([0,4,22,30,78,86,97,110,117,167,306,],[48,53,48,80,53,168,187,202,168,53,187,]),'direct_abstract_declarator':([30,74,78,86,97,110,166,167,239,338,339,],[81,153,81,81,81,81,153,81,81,81,153,]),'designator_list':([155,362,399,429,],[267,267,267,267,]),'declaration_list':([48,80,],[89,89,]),'expression':([141,170,219,226,230,233,257,281,289,297,373,375,376,380,384,385,408,411,421,423,425,426,427,432,438,441,443,446,],[236,285,236,327,236,236,356,372,285,285,285,285,410,412,285,415,285,285,285,285,285,435,285,285,285,285,285,285,]),} + +_lr_goto = { } +for _k, _v in _lr_goto_items.items(): + for _x,_y in zip(_v[0],_v[1]): + if not _x in _lr_goto: _lr_goto[_x] = { } + _lr_goto[_x][_k] = _y +del _lr_goto_items +_lr_productions = [ + ("S' -> translation_unit_or_empty","S'",1,None,None,None), + ('abstract_declarator_opt -> empty','abstract_declarator_opt',1,'p_abstract_declarator_opt','../pycparser/plyparser.py',42), + ('abstract_declarator_opt -> abstract_declarator','abstract_declarator_opt',1,'p_abstract_declarator_opt','../pycparser/plyparser.py',43), + ('assignment_expression_opt -> empty','assignment_expression_opt',1,'p_assignment_expression_opt','../pycparser/plyparser.py',42), + ('assignment_expression_opt -> assignment_expression','assignment_expression_opt',1,'p_assignment_expression_opt','../pycparser/plyparser.py',43), + ('block_item_list_opt -> empty','block_item_list_opt',1,'p_block_item_list_opt','../pycparser/plyparser.py',42), + ('block_item_list_opt -> block_item_list','block_item_list_opt',1,'p_block_item_list_opt','../pycparser/plyparser.py',43), + ('declaration_list_opt -> empty','declaration_list_opt',1,'p_declaration_list_opt','../pycparser/plyparser.py',42), + ('declaration_list_opt -> declaration_list','declaration_list_opt',1,'p_declaration_list_opt','../pycparser/plyparser.py',43), + ('declaration_specifiers_opt -> empty','declaration_specifiers_opt',1,'p_declaration_specifiers_opt','../pycparser/plyparser.py',42), + ('declaration_specifiers_opt -> declaration_specifiers','declaration_specifiers_opt',1,'p_declaration_specifiers_opt','../pycparser/plyparser.py',43), + ('designation_opt -> empty','designation_opt',1,'p_designation_opt','../pycparser/plyparser.py',42), + ('designation_opt -> designation','designation_opt',1,'p_designation_opt','../pycparser/plyparser.py',43), + ('expression_opt -> empty','expression_opt',1,'p_expression_opt','../pycparser/plyparser.py',42), + ('expression_opt -> expression','expression_opt',1,'p_expression_opt','../pycparser/plyparser.py',43), + ('identifier_list_opt -> empty','identifier_list_opt',1,'p_identifier_list_opt','../pycparser/plyparser.py',42), + ('identifier_list_opt -> identifier_list','identifier_list_opt',1,'p_identifier_list_opt','../pycparser/plyparser.py',43), + ('init_declarator_list_opt -> empty','init_declarator_list_opt',1,'p_init_declarator_list_opt','../pycparser/plyparser.py',42), + ('init_declarator_list_opt -> init_declarator_list','init_declarator_list_opt',1,'p_init_declarator_list_opt','../pycparser/plyparser.py',43), + ('initializer_list_opt -> empty','initializer_list_opt',1,'p_initializer_list_opt','../pycparser/plyparser.py',42), + ('initializer_list_opt -> initializer_list','initializer_list_opt',1,'p_initializer_list_opt','../pycparser/plyparser.py',43), + ('parameter_type_list_opt -> empty','parameter_type_list_opt',1,'p_parameter_type_list_opt','../pycparser/plyparser.py',42), + ('parameter_type_list_opt -> parameter_type_list','parameter_type_list_opt',1,'p_parameter_type_list_opt','../pycparser/plyparser.py',43), + ('specifier_qualifier_list_opt -> empty','specifier_qualifier_list_opt',1,'p_specifier_qualifier_list_opt','../pycparser/plyparser.py',42), + ('specifier_qualifier_list_opt -> specifier_qualifier_list','specifier_qualifier_list_opt',1,'p_specifier_qualifier_list_opt','../pycparser/plyparser.py',43), + ('struct_declarator_list_opt -> empty','struct_declarator_list_opt',1,'p_struct_declarator_list_opt','../pycparser/plyparser.py',42), + ('struct_declarator_list_opt -> struct_declarator_list','struct_declarator_list_opt',1,'p_struct_declarator_list_opt','../pycparser/plyparser.py',43), + ('type_qualifier_list_opt -> empty','type_qualifier_list_opt',1,'p_type_qualifier_list_opt','../pycparser/plyparser.py',42), + ('type_qualifier_list_opt -> type_qualifier_list','type_qualifier_list_opt',1,'p_type_qualifier_list_opt','../pycparser/plyparser.py',43), + ('translation_unit_or_empty -> translation_unit','translation_unit_or_empty',1,'p_translation_unit_or_empty','../pycparser/c_parser.py',494), + ('translation_unit_or_empty -> empty','translation_unit_or_empty',1,'p_translation_unit_or_empty','../pycparser/c_parser.py',495), + ('translation_unit -> external_declaration','translation_unit',1,'p_translation_unit_1','../pycparser/c_parser.py',503), + ('translation_unit -> translation_unit external_declaration','translation_unit',2,'p_translation_unit_2','../pycparser/c_parser.py',510), + ('external_declaration -> function_definition','external_declaration',1,'p_external_declaration_1','../pycparser/c_parser.py',522), + ('external_declaration -> declaration','external_declaration',1,'p_external_declaration_2','../pycparser/c_parser.py',527), + ('external_declaration -> pp_directive','external_declaration',1,'p_external_declaration_3','../pycparser/c_parser.py',532), + ('external_declaration -> SEMI','external_declaration',1,'p_external_declaration_4','../pycparser/c_parser.py',537), + ('pp_directive -> PPHASH','pp_directive',1,'p_pp_directive','../pycparser/c_parser.py',542), + ('function_definition -> declarator declaration_list_opt compound_statement','function_definition',3,'p_function_definition_1','../pycparser/c_parser.py',551), + ('function_definition -> declaration_specifiers declarator declaration_list_opt compound_statement','function_definition',4,'p_function_definition_2','../pycparser/c_parser.py',568), + ('statement -> labeled_statement','statement',1,'p_statement','../pycparser/c_parser.py',579), + ('statement -> expression_statement','statement',1,'p_statement','../pycparser/c_parser.py',580), + ('statement -> compound_statement','statement',1,'p_statement','../pycparser/c_parser.py',581), + ('statement -> selection_statement','statement',1,'p_statement','../pycparser/c_parser.py',582), + ('statement -> iteration_statement','statement',1,'p_statement','../pycparser/c_parser.py',583), + ('statement -> jump_statement','statement',1,'p_statement','../pycparser/c_parser.py',584), + ('decl_body -> declaration_specifiers init_declarator_list_opt','decl_body',2,'p_decl_body','../pycparser/c_parser.py',598), + ('declaration -> decl_body SEMI','declaration',2,'p_declaration','../pycparser/c_parser.py',657), + ('declaration_list -> declaration','declaration_list',1,'p_declaration_list','../pycparser/c_parser.py',666), + ('declaration_list -> declaration_list declaration','declaration_list',2,'p_declaration_list','../pycparser/c_parser.py',667), + ('declaration_specifiers -> type_qualifier declaration_specifiers_opt','declaration_specifiers',2,'p_declaration_specifiers_1','../pycparser/c_parser.py',672), + ('declaration_specifiers -> type_specifier declaration_specifiers_opt','declaration_specifiers',2,'p_declaration_specifiers_2','../pycparser/c_parser.py',677), + ('declaration_specifiers -> storage_class_specifier declaration_specifiers_opt','declaration_specifiers',2,'p_declaration_specifiers_3','../pycparser/c_parser.py',682), + ('declaration_specifiers -> function_specifier declaration_specifiers_opt','declaration_specifiers',2,'p_declaration_specifiers_4','../pycparser/c_parser.py',687), + ('storage_class_specifier -> AUTO','storage_class_specifier',1,'p_storage_class_specifier','../pycparser/c_parser.py',692), + ('storage_class_specifier -> REGISTER','storage_class_specifier',1,'p_storage_class_specifier','../pycparser/c_parser.py',693), + ('storage_class_specifier -> STATIC','storage_class_specifier',1,'p_storage_class_specifier','../pycparser/c_parser.py',694), + ('storage_class_specifier -> EXTERN','storage_class_specifier',1,'p_storage_class_specifier','../pycparser/c_parser.py',695), + ('storage_class_specifier -> TYPEDEF','storage_class_specifier',1,'p_storage_class_specifier','../pycparser/c_parser.py',696), + ('function_specifier -> INLINE','function_specifier',1,'p_function_specifier','../pycparser/c_parser.py',701), + ('type_specifier -> VOID','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',706), + ('type_specifier -> _BOOL','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',707), + ('type_specifier -> CHAR','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',708), + ('type_specifier -> SHORT','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',709), + ('type_specifier -> INT','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',710), + ('type_specifier -> LONG','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',711), + ('type_specifier -> FLOAT','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',712), + ('type_specifier -> DOUBLE','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',713), + ('type_specifier -> _COMPLEX','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',714), + ('type_specifier -> SIGNED','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',715), + ('type_specifier -> UNSIGNED','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',716), + ('type_specifier -> typedef_name','type_specifier',1,'p_type_specifier_2','../pycparser/c_parser.py',721), + ('type_specifier -> enum_specifier','type_specifier',1,'p_type_specifier_2','../pycparser/c_parser.py',722), + ('type_specifier -> struct_or_union_specifier','type_specifier',1,'p_type_specifier_2','../pycparser/c_parser.py',723), + ('type_qualifier -> CONST','type_qualifier',1,'p_type_qualifier','../pycparser/c_parser.py',728), + ('type_qualifier -> RESTRICT','type_qualifier',1,'p_type_qualifier','../pycparser/c_parser.py',729), + ('type_qualifier -> VOLATILE','type_qualifier',1,'p_type_qualifier','../pycparser/c_parser.py',730), + ('init_declarator_list -> init_declarator','init_declarator_list',1,'p_init_declarator_list_1','../pycparser/c_parser.py',735), + ('init_declarator_list -> init_declarator_list COMMA init_declarator','init_declarator_list',3,'p_init_declarator_list_1','../pycparser/c_parser.py',736), + ('init_declarator_list -> EQUALS initializer','init_declarator_list',2,'p_init_declarator_list_2','../pycparser/c_parser.py',746), + ('init_declarator_list -> abstract_declarator','init_declarator_list',1,'p_init_declarator_list_3','../pycparser/c_parser.py',754), + ('init_declarator -> declarator','init_declarator',1,'p_init_declarator','../pycparser/c_parser.py',762), + ('init_declarator -> declarator EQUALS initializer','init_declarator',3,'p_init_declarator','../pycparser/c_parser.py',763), + ('specifier_qualifier_list -> type_qualifier specifier_qualifier_list_opt','specifier_qualifier_list',2,'p_specifier_qualifier_list_1','../pycparser/c_parser.py',768), + ('specifier_qualifier_list -> type_specifier specifier_qualifier_list_opt','specifier_qualifier_list',2,'p_specifier_qualifier_list_2','../pycparser/c_parser.py',773), + ('struct_or_union_specifier -> struct_or_union ID','struct_or_union_specifier',2,'p_struct_or_union_specifier_1','../pycparser/c_parser.py',781), + ('struct_or_union_specifier -> struct_or_union TYPEID','struct_or_union_specifier',2,'p_struct_or_union_specifier_1','../pycparser/c_parser.py',782), + ('struct_or_union_specifier -> struct_or_union brace_open struct_declaration_list brace_close','struct_or_union_specifier',4,'p_struct_or_union_specifier_2','../pycparser/c_parser.py',791), + ('struct_or_union_specifier -> struct_or_union ID brace_open struct_declaration_list brace_close','struct_or_union_specifier',5,'p_struct_or_union_specifier_3','../pycparser/c_parser.py',800), + ('struct_or_union_specifier -> struct_or_union TYPEID brace_open struct_declaration_list brace_close','struct_or_union_specifier',5,'p_struct_or_union_specifier_3','../pycparser/c_parser.py',801), + ('struct_or_union -> STRUCT','struct_or_union',1,'p_struct_or_union','../pycparser/c_parser.py',810), + ('struct_or_union -> UNION','struct_or_union',1,'p_struct_or_union','../pycparser/c_parser.py',811), + ('struct_declaration_list -> struct_declaration','struct_declaration_list',1,'p_struct_declaration_list','../pycparser/c_parser.py',818), + ('struct_declaration_list -> struct_declaration_list struct_declaration','struct_declaration_list',2,'p_struct_declaration_list','../pycparser/c_parser.py',819), + ('struct_declaration -> specifier_qualifier_list struct_declarator_list_opt SEMI','struct_declaration',3,'p_struct_declaration_1','../pycparser/c_parser.py',824), + ('struct_declaration -> specifier_qualifier_list abstract_declarator SEMI','struct_declaration',3,'p_struct_declaration_2','../pycparser/c_parser.py',862), + ('struct_declarator_list -> struct_declarator','struct_declarator_list',1,'p_struct_declarator_list','../pycparser/c_parser.py',876), + ('struct_declarator_list -> struct_declarator_list COMMA struct_declarator','struct_declarator_list',3,'p_struct_declarator_list','../pycparser/c_parser.py',877), + ('struct_declarator -> declarator','struct_declarator',1,'p_struct_declarator_1','../pycparser/c_parser.py',885), + ('struct_declarator -> declarator COLON constant_expression','struct_declarator',3,'p_struct_declarator_2','../pycparser/c_parser.py',890), + ('struct_declarator -> COLON constant_expression','struct_declarator',2,'p_struct_declarator_2','../pycparser/c_parser.py',891), + ('enum_specifier -> ENUM ID','enum_specifier',2,'p_enum_specifier_1','../pycparser/c_parser.py',899), + ('enum_specifier -> ENUM TYPEID','enum_specifier',2,'p_enum_specifier_1','../pycparser/c_parser.py',900), + ('enum_specifier -> ENUM brace_open enumerator_list brace_close','enum_specifier',4,'p_enum_specifier_2','../pycparser/c_parser.py',905), + ('enum_specifier -> ENUM ID brace_open enumerator_list brace_close','enum_specifier',5,'p_enum_specifier_3','../pycparser/c_parser.py',910), + ('enum_specifier -> ENUM TYPEID brace_open enumerator_list brace_close','enum_specifier',5,'p_enum_specifier_3','../pycparser/c_parser.py',911), + ('enumerator_list -> enumerator','enumerator_list',1,'p_enumerator_list','../pycparser/c_parser.py',916), + ('enumerator_list -> enumerator_list COMMA','enumerator_list',2,'p_enumerator_list','../pycparser/c_parser.py',917), + ('enumerator_list -> enumerator_list COMMA enumerator','enumerator_list',3,'p_enumerator_list','../pycparser/c_parser.py',918), + ('enumerator -> ID','enumerator',1,'p_enumerator','../pycparser/c_parser.py',929), + ('enumerator -> ID EQUALS constant_expression','enumerator',3,'p_enumerator','../pycparser/c_parser.py',930), + ('declarator -> direct_declarator','declarator',1,'p_declarator_1','../pycparser/c_parser.py',945), + ('declarator -> pointer direct_declarator','declarator',2,'p_declarator_2','../pycparser/c_parser.py',950), + ('declarator -> pointer TYPEID','declarator',2,'p_declarator_3','../pycparser/c_parser.py',959), + ('direct_declarator -> ID','direct_declarator',1,'p_direct_declarator_1','../pycparser/c_parser.py',970), + ('direct_declarator -> LPAREN declarator RPAREN','direct_declarator',3,'p_direct_declarator_2','../pycparser/c_parser.py',979), + ('direct_declarator -> direct_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_declarator',5,'p_direct_declarator_3','../pycparser/c_parser.py',984), + ('direct_declarator -> direct_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_declarator',6,'p_direct_declarator_4','../pycparser/c_parser.py',998), + ('direct_declarator -> direct_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_declarator',6,'p_direct_declarator_4','../pycparser/c_parser.py',999), + ('direct_declarator -> direct_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_declarator',5,'p_direct_declarator_5','../pycparser/c_parser.py',1019), + ('direct_declarator -> direct_declarator LPAREN parameter_type_list RPAREN','direct_declarator',4,'p_direct_declarator_6','../pycparser/c_parser.py',1030), + ('direct_declarator -> direct_declarator LPAREN identifier_list_opt RPAREN','direct_declarator',4,'p_direct_declarator_6','../pycparser/c_parser.py',1031), + ('pointer -> TIMES type_qualifier_list_opt','pointer',2,'p_pointer','../pycparser/c_parser.py',1058), + ('pointer -> TIMES type_qualifier_list_opt pointer','pointer',3,'p_pointer','../pycparser/c_parser.py',1059), + ('type_qualifier_list -> type_qualifier','type_qualifier_list',1,'p_type_qualifier_list','../pycparser/c_parser.py',1088), + ('type_qualifier_list -> type_qualifier_list type_qualifier','type_qualifier_list',2,'p_type_qualifier_list','../pycparser/c_parser.py',1089), + ('parameter_type_list -> parameter_list','parameter_type_list',1,'p_parameter_type_list','../pycparser/c_parser.py',1094), + ('parameter_type_list -> parameter_list COMMA ELLIPSIS','parameter_type_list',3,'p_parameter_type_list','../pycparser/c_parser.py',1095), + ('parameter_list -> parameter_declaration','parameter_list',1,'p_parameter_list','../pycparser/c_parser.py',1103), + ('parameter_list -> parameter_list COMMA parameter_declaration','parameter_list',3,'p_parameter_list','../pycparser/c_parser.py',1104), + ('parameter_declaration -> declaration_specifiers declarator','parameter_declaration',2,'p_parameter_declaration_1','../pycparser/c_parser.py',1113), + ('parameter_declaration -> declaration_specifiers abstract_declarator_opt','parameter_declaration',2,'p_parameter_declaration_2','../pycparser/c_parser.py',1124), + ('identifier_list -> identifier','identifier_list',1,'p_identifier_list','../pycparser/c_parser.py',1155), + ('identifier_list -> identifier_list COMMA identifier','identifier_list',3,'p_identifier_list','../pycparser/c_parser.py',1156), + ('initializer -> assignment_expression','initializer',1,'p_initializer_1','../pycparser/c_parser.py',1165), + ('initializer -> brace_open initializer_list_opt brace_close','initializer',3,'p_initializer_2','../pycparser/c_parser.py',1170), + ('initializer -> brace_open initializer_list COMMA brace_close','initializer',4,'p_initializer_2','../pycparser/c_parser.py',1171), + ('initializer_list -> designation_opt initializer','initializer_list',2,'p_initializer_list','../pycparser/c_parser.py',1179), + ('initializer_list -> initializer_list COMMA designation_opt initializer','initializer_list',4,'p_initializer_list','../pycparser/c_parser.py',1180), + ('designation -> designator_list EQUALS','designation',2,'p_designation','../pycparser/c_parser.py',1191), + ('designator_list -> designator','designator_list',1,'p_designator_list','../pycparser/c_parser.py',1199), + ('designator_list -> designator_list designator','designator_list',2,'p_designator_list','../pycparser/c_parser.py',1200), + ('designator -> LBRACKET constant_expression RBRACKET','designator',3,'p_designator','../pycparser/c_parser.py',1205), + ('designator -> PERIOD identifier','designator',2,'p_designator','../pycparser/c_parser.py',1206), + ('type_name -> specifier_qualifier_list abstract_declarator_opt','type_name',2,'p_type_name','../pycparser/c_parser.py',1211), + ('abstract_declarator -> pointer','abstract_declarator',1,'p_abstract_declarator_1','../pycparser/c_parser.py',1228), + ('abstract_declarator -> pointer direct_abstract_declarator','abstract_declarator',2,'p_abstract_declarator_2','../pycparser/c_parser.py',1236), + ('abstract_declarator -> direct_abstract_declarator','abstract_declarator',1,'p_abstract_declarator_3','../pycparser/c_parser.py',1241), + ('direct_abstract_declarator -> LPAREN abstract_declarator RPAREN','direct_abstract_declarator',3,'p_direct_abstract_declarator_1','../pycparser/c_parser.py',1251), + ('direct_abstract_declarator -> direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET','direct_abstract_declarator',4,'p_direct_abstract_declarator_2','../pycparser/c_parser.py',1255), + ('direct_abstract_declarator -> LBRACKET assignment_expression_opt RBRACKET','direct_abstract_declarator',3,'p_direct_abstract_declarator_3','../pycparser/c_parser.py',1266), + ('direct_abstract_declarator -> direct_abstract_declarator LBRACKET TIMES RBRACKET','direct_abstract_declarator',4,'p_direct_abstract_declarator_4','../pycparser/c_parser.py',1275), + ('direct_abstract_declarator -> LBRACKET TIMES RBRACKET','direct_abstract_declarator',3,'p_direct_abstract_declarator_5','../pycparser/c_parser.py',1286), + ('direct_abstract_declarator -> direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN','direct_abstract_declarator',4,'p_direct_abstract_declarator_6','../pycparser/c_parser.py',1295), + ('direct_abstract_declarator -> LPAREN parameter_type_list_opt RPAREN','direct_abstract_declarator',3,'p_direct_abstract_declarator_7','../pycparser/c_parser.py',1305), + ('block_item -> declaration','block_item',1,'p_block_item','../pycparser/c_parser.py',1316), + ('block_item -> statement','block_item',1,'p_block_item','../pycparser/c_parser.py',1317), + ('block_item_list -> block_item','block_item_list',1,'p_block_item_list','../pycparser/c_parser.py',1324), + ('block_item_list -> block_item_list block_item','block_item_list',2,'p_block_item_list','../pycparser/c_parser.py',1325), + ('compound_statement -> brace_open block_item_list_opt brace_close','compound_statement',3,'p_compound_statement_1','../pycparser/c_parser.py',1331), + ('labeled_statement -> ID COLON statement','labeled_statement',3,'p_labeled_statement_1','../pycparser/c_parser.py',1337), + ('labeled_statement -> CASE constant_expression COLON statement','labeled_statement',4,'p_labeled_statement_2','../pycparser/c_parser.py',1341), + ('labeled_statement -> DEFAULT COLON statement','labeled_statement',3,'p_labeled_statement_3','../pycparser/c_parser.py',1345), + ('selection_statement -> IF LPAREN expression RPAREN statement','selection_statement',5,'p_selection_statement_1','../pycparser/c_parser.py',1349), + ('selection_statement -> IF LPAREN expression RPAREN statement ELSE statement','selection_statement',7,'p_selection_statement_2','../pycparser/c_parser.py',1353), + ('selection_statement -> SWITCH LPAREN expression RPAREN statement','selection_statement',5,'p_selection_statement_3','../pycparser/c_parser.py',1357), + ('iteration_statement -> WHILE LPAREN expression RPAREN statement','iteration_statement',5,'p_iteration_statement_1','../pycparser/c_parser.py',1362), + ('iteration_statement -> DO statement WHILE LPAREN expression RPAREN SEMI','iteration_statement',7,'p_iteration_statement_2','../pycparser/c_parser.py',1366), + ('iteration_statement -> FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN statement','iteration_statement',9,'p_iteration_statement_3','../pycparser/c_parser.py',1370), + ('iteration_statement -> FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN statement','iteration_statement',8,'p_iteration_statement_4','../pycparser/c_parser.py',1374), + ('jump_statement -> GOTO ID SEMI','jump_statement',3,'p_jump_statement_1','../pycparser/c_parser.py',1379), + ('jump_statement -> BREAK SEMI','jump_statement',2,'p_jump_statement_2','../pycparser/c_parser.py',1383), + ('jump_statement -> CONTINUE SEMI','jump_statement',2,'p_jump_statement_3','../pycparser/c_parser.py',1387), + ('jump_statement -> RETURN expression SEMI','jump_statement',3,'p_jump_statement_4','../pycparser/c_parser.py',1391), + ('jump_statement -> RETURN SEMI','jump_statement',2,'p_jump_statement_4','../pycparser/c_parser.py',1392), + ('expression_statement -> expression_opt SEMI','expression_statement',2,'p_expression_statement','../pycparser/c_parser.py',1397), + ('expression -> assignment_expression','expression',1,'p_expression','../pycparser/c_parser.py',1404), + ('expression -> expression COMMA assignment_expression','expression',3,'p_expression','../pycparser/c_parser.py',1405), + ('typedef_name -> TYPEID','typedef_name',1,'p_typedef_name','../pycparser/c_parser.py',1417), + ('assignment_expression -> conditional_expression','assignment_expression',1,'p_assignment_expression','../pycparser/c_parser.py',1421), + ('assignment_expression -> unary_expression assignment_operator assignment_expression','assignment_expression',3,'p_assignment_expression','../pycparser/c_parser.py',1422), + ('assignment_operator -> EQUALS','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1435), + ('assignment_operator -> XOREQUAL','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1436), + ('assignment_operator -> TIMESEQUAL','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1437), + ('assignment_operator -> DIVEQUAL','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1438), + ('assignment_operator -> MODEQUAL','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1439), + ('assignment_operator -> PLUSEQUAL','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1440), + ('assignment_operator -> MINUSEQUAL','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1441), + ('assignment_operator -> LSHIFTEQUAL','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1442), + ('assignment_operator -> RSHIFTEQUAL','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1443), + ('assignment_operator -> ANDEQUAL','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1444), + ('assignment_operator -> OREQUAL','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1445), + ('constant_expression -> conditional_expression','constant_expression',1,'p_constant_expression','../pycparser/c_parser.py',1450), + ('conditional_expression -> binary_expression','conditional_expression',1,'p_conditional_expression','../pycparser/c_parser.py',1454), + ('conditional_expression -> binary_expression CONDOP expression COLON conditional_expression','conditional_expression',5,'p_conditional_expression','../pycparser/c_parser.py',1455), + ('binary_expression -> cast_expression','binary_expression',1,'p_binary_expression','../pycparser/c_parser.py',1463), + ('binary_expression -> binary_expression TIMES binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1464), + ('binary_expression -> binary_expression DIVIDE binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1465), + ('binary_expression -> binary_expression MOD binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1466), + ('binary_expression -> binary_expression PLUS binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1467), + ('binary_expression -> binary_expression MINUS binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1468), + ('binary_expression -> binary_expression RSHIFT binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1469), + ('binary_expression -> binary_expression LSHIFT binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1470), + ('binary_expression -> binary_expression LT binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1471), + ('binary_expression -> binary_expression LE binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1472), + ('binary_expression -> binary_expression GE binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1473), + ('binary_expression -> binary_expression GT binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1474), + ('binary_expression -> binary_expression EQ binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1475), + ('binary_expression -> binary_expression NE binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1476), + ('binary_expression -> binary_expression AND binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1477), + ('binary_expression -> binary_expression OR binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1478), + ('binary_expression -> binary_expression XOR binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1479), + ('binary_expression -> binary_expression LAND binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1480), + ('binary_expression -> binary_expression LOR binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1481), + ('cast_expression -> unary_expression','cast_expression',1,'p_cast_expression_1','../pycparser/c_parser.py',1489), + ('cast_expression -> LPAREN type_name RPAREN cast_expression','cast_expression',4,'p_cast_expression_2','../pycparser/c_parser.py',1493), + ('unary_expression -> postfix_expression','unary_expression',1,'p_unary_expression_1','../pycparser/c_parser.py',1497), + ('unary_expression -> PLUSPLUS unary_expression','unary_expression',2,'p_unary_expression_2','../pycparser/c_parser.py',1501), + ('unary_expression -> MINUSMINUS unary_expression','unary_expression',2,'p_unary_expression_2','../pycparser/c_parser.py',1502), + ('unary_expression -> unary_operator cast_expression','unary_expression',2,'p_unary_expression_2','../pycparser/c_parser.py',1503), + ('unary_expression -> SIZEOF unary_expression','unary_expression',2,'p_unary_expression_3','../pycparser/c_parser.py',1508), + ('unary_expression -> SIZEOF LPAREN type_name RPAREN','unary_expression',4,'p_unary_expression_3','../pycparser/c_parser.py',1509), + ('unary_operator -> AND','unary_operator',1,'p_unary_operator','../pycparser/c_parser.py',1517), + ('unary_operator -> TIMES','unary_operator',1,'p_unary_operator','../pycparser/c_parser.py',1518), + ('unary_operator -> PLUS','unary_operator',1,'p_unary_operator','../pycparser/c_parser.py',1519), + ('unary_operator -> MINUS','unary_operator',1,'p_unary_operator','../pycparser/c_parser.py',1520), + ('unary_operator -> NOT','unary_operator',1,'p_unary_operator','../pycparser/c_parser.py',1521), + ('unary_operator -> LNOT','unary_operator',1,'p_unary_operator','../pycparser/c_parser.py',1522), + ('postfix_expression -> primary_expression','postfix_expression',1,'p_postfix_expression_1','../pycparser/c_parser.py',1527), + ('postfix_expression -> postfix_expression LBRACKET expression RBRACKET','postfix_expression',4,'p_postfix_expression_2','../pycparser/c_parser.py',1531), + ('postfix_expression -> postfix_expression LPAREN argument_expression_list RPAREN','postfix_expression',4,'p_postfix_expression_3','../pycparser/c_parser.py',1535), + ('postfix_expression -> postfix_expression LPAREN RPAREN','postfix_expression',3,'p_postfix_expression_3','../pycparser/c_parser.py',1536), + ('postfix_expression -> postfix_expression PERIOD ID','postfix_expression',3,'p_postfix_expression_4','../pycparser/c_parser.py',1541), + ('postfix_expression -> postfix_expression PERIOD TYPEID','postfix_expression',3,'p_postfix_expression_4','../pycparser/c_parser.py',1542), + ('postfix_expression -> postfix_expression ARROW ID','postfix_expression',3,'p_postfix_expression_4','../pycparser/c_parser.py',1543), + ('postfix_expression -> postfix_expression ARROW TYPEID','postfix_expression',3,'p_postfix_expression_4','../pycparser/c_parser.py',1544), + ('postfix_expression -> postfix_expression PLUSPLUS','postfix_expression',2,'p_postfix_expression_5','../pycparser/c_parser.py',1550), + ('postfix_expression -> postfix_expression MINUSMINUS','postfix_expression',2,'p_postfix_expression_5','../pycparser/c_parser.py',1551), + ('postfix_expression -> LPAREN type_name RPAREN brace_open initializer_list brace_close','postfix_expression',6,'p_postfix_expression_6','../pycparser/c_parser.py',1556), + ('postfix_expression -> LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close','postfix_expression',7,'p_postfix_expression_6','../pycparser/c_parser.py',1557), + ('primary_expression -> identifier','primary_expression',1,'p_primary_expression_1','../pycparser/c_parser.py',1562), + ('primary_expression -> constant','primary_expression',1,'p_primary_expression_2','../pycparser/c_parser.py',1566), + ('primary_expression -> unified_string_literal','primary_expression',1,'p_primary_expression_3','../pycparser/c_parser.py',1570), + ('primary_expression -> unified_wstring_literal','primary_expression',1,'p_primary_expression_3','../pycparser/c_parser.py',1571), + ('primary_expression -> LPAREN expression RPAREN','primary_expression',3,'p_primary_expression_4','../pycparser/c_parser.py',1576), + ('primary_expression -> OFFSETOF LPAREN type_name COMMA identifier RPAREN','primary_expression',6,'p_primary_expression_5','../pycparser/c_parser.py',1580), + ('argument_expression_list -> assignment_expression','argument_expression_list',1,'p_argument_expression_list','../pycparser/c_parser.py',1588), + ('argument_expression_list -> argument_expression_list COMMA assignment_expression','argument_expression_list',3,'p_argument_expression_list','../pycparser/c_parser.py',1589), + ('identifier -> ID','identifier',1,'p_identifier','../pycparser/c_parser.py',1598), + ('constant -> INT_CONST_DEC','constant',1,'p_constant_1','../pycparser/c_parser.py',1602), + ('constant -> INT_CONST_OCT','constant',1,'p_constant_1','../pycparser/c_parser.py',1603), + ('constant -> INT_CONST_HEX','constant',1,'p_constant_1','../pycparser/c_parser.py',1604), + ('constant -> INT_CONST_BIN','constant',1,'p_constant_1','../pycparser/c_parser.py',1605), + ('constant -> FLOAT_CONST','constant',1,'p_constant_2','../pycparser/c_parser.py',1611), + ('constant -> HEX_FLOAT_CONST','constant',1,'p_constant_2','../pycparser/c_parser.py',1612), + ('constant -> CHAR_CONST','constant',1,'p_constant_3','../pycparser/c_parser.py',1618), + ('constant -> WCHAR_CONST','constant',1,'p_constant_3','../pycparser/c_parser.py',1619), + ('unified_string_literal -> STRING_LITERAL','unified_string_literal',1,'p_unified_string_literal','../pycparser/c_parser.py',1630), + ('unified_string_literal -> unified_string_literal STRING_LITERAL','unified_string_literal',2,'p_unified_string_literal','../pycparser/c_parser.py',1631), + ('unified_wstring_literal -> WSTRING_LITERAL','unified_wstring_literal',1,'p_unified_wstring_literal','../pycparser/c_parser.py',1641), + ('unified_wstring_literal -> unified_wstring_literal WSTRING_LITERAL','unified_wstring_literal',2,'p_unified_wstring_literal','../pycparser/c_parser.py',1642), + ('brace_open -> LBRACE','brace_open',1,'p_brace_open','../pycparser/c_parser.py',1652), + ('brace_close -> RBRACE','brace_close',1,'p_brace_close','../pycparser/c_parser.py',1657), + ('empty -> <empty>','empty',0,'p_empty','../pycparser/c_parser.py',1662), +] diff --git a/lib/python3.4/site-packages/setuptools-5.5.1.dist-info/DESCRIPTION.rst b/lib/python3.4/site-packages/setuptools-5.5.1.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..6f95919 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools-5.5.1.dist-info/DESCRIPTION.rst @@ -0,0 +1,2038 @@ +=============================== +Installing and Using Setuptools +=============================== + +.. contents:: **Table of Contents** + + +------------------------- +Installation Instructions +------------------------- + +The recommended way to bootstrap setuptools on any system is to download +`ez_setup.py`_ and run it using the target Python environment. Different +operating systems have different recommended techniques to accomplish this +basic routine, so below are some examples to get you started. + +Setuptools requires Python 2.6 or later. To install setuptools +on Python 2.4 or Python 2.5, use the `bootstrap script for Setuptools 1.x +<https://bitbucket.org/pypa/setuptools/raw/bootstrap-py24/ez_setup.py>`_. + +The link provided to ez_setup.py is a bookmark to bootstrap script for the +latest known stable release. + +.. _ez_setup.py: https://bootstrap.pypa.io/ez_setup.py + +Windows 8 (Powershell) +====================== + +For best results, uninstall previous versions FIRST (see `Uninstalling`_). + +Using Windows 8 or later, it's possible to install with one simple Powershell +command. Start up Powershell and paste this command:: + + > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | python - + +You must start the Powershell with Administrative privileges or you may choose +to install a user-local installation:: + + > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | python - --user + +If you have Python 3.3 or later, you can use the ``py`` command to install to +different Python versions. For example, to install to Python 3.3 if you have +Python 2.7 installed:: + + > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | py -3 - + +The recommended way to install setuptools on Windows is to download +`ez_setup.py`_ and run it. The script will download the appropriate .egg +file and install it for you. + +Once installation is complete, you will find an ``easy_install`` program in +your Python ``Scripts`` subdirectory. For simple invocation and best results, +add this directory to your ``PATH`` environment variable, if it is not already +present. If you did a user-local install, the ``Scripts`` subdirectory is +``$env:APPDATA\Python\Scripts``. + + +Windows 7 (or graphical install) +================================ + +For Windows 7 and earlier, download `ez_setup.py`_ using your favorite web +browser or other technique and "run" that file. + + +Unix (wget) +=========== + +Most Linux distributions come with wget. + +Download `ez_setup.py`_ and run it using the target Python version. The script +will download the appropriate version and install it for you:: + + > wget https://bootstrap.pypa.io/ez_setup.py -O - | python + +Note that you will may need to invoke the command with superuser privileges to +install to the system Python:: + + > wget https://bootstrap.pypa.io/ez_setup.py -O - | sudo python + +Alternatively, Setuptools may be installed to a user-local path:: + + > wget https://bootstrap.pypa.io/ez_setup.py -O - | python - --user + +Unix including Mac OS X (curl) +============================== + +If your system has curl installed, follow the ``wget`` instructions but +replace ``wget`` with ``curl`` and ``-O`` with ``-o``. For example:: + + > curl https://bootstrap.pypa.io/ez_setup.py -o - | python + + +Advanced Installation +===================== + +For more advanced installation options, such as installing to custom +locations or prefixes, download and extract the source +tarball from `Setuptools on PyPI <https://pypi.python.org/pypi/setuptools>`_ +and run setup.py with any supported distutils and Setuptools options. +For example:: + + setuptools-x.x$ python setup.py install --prefix=/opt/setuptools + +Use ``--help`` to get a full options list, but we recommend consulting +the `EasyInstall manual`_ for detailed instructions, especially `the section +on custom installation locations`_. + +.. _EasyInstall manual: https://pythonhosted.org/setuptools/EasyInstall +.. _the section on custom installation locations: https://pythonhosted.org/setuptools/EasyInstall#custom-installation-locations + + +Downloads +========= + +All setuptools downloads can be found at `the project's home page in the Python +Package Index`_. Scroll to the very bottom of the page to find the links. + +.. _the project's home page in the Python Package Index: https://pypi.python.org/pypi/setuptools + +In addition to the PyPI downloads, the development version of ``setuptools`` +is available from the `Bitbucket repo`_, and in-development versions of the +`0.6 branch`_ are available as well. + +.. _Bitbucket repo: https://bitbucket.org/pypa/setuptools/get/default.tar.gz#egg=setuptools-dev +.. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06 + +Uninstalling +============ + +On Windows, if Setuptools was installed using an ``.exe`` or ``.msi`` +installer, simply use the uninstall feature of "Add/Remove Programs" in the +Control Panel. + +Otherwise, to uninstall Setuptools or Distribute, regardless of the Python +version, delete all ``setuptools*`` and ``distribute*`` files and +directories from your system's ``site-packages`` directory +(and any other ``sys.path`` directories) FIRST. + +If you are upgrading or otherwise plan to re-install Setuptools or Distribute, +nothing further needs to be done. If you want to completely remove Setuptools, +you may also want to remove the 'easy_install' and 'easy_install-x.x' scripts +and associated executables installed to the Python scripts directory. + +-------------------------------- +Using Setuptools and EasyInstall +-------------------------------- + +Here are some of the available manuals, tutorials, and other resources for +learning about Setuptools, Python Eggs, and EasyInstall: + +* `The EasyInstall user's guide and reference manual`_ +* `The setuptools Developer's Guide`_ +* `The pkg_resources API reference`_ +* `Package Compatibility Notes`_ (user-maintained) +* `The Internal Structure of Python Eggs`_ + +Questions, comments, and bug reports should be directed to the `distutils-sig +mailing list`_. If you have written (or know of) any tutorials, documentation, +plug-ins, or other resources for setuptools users, please let us know about +them there, so this reference list can be updated. If you have working, +*tested* patches to correct problems or add features, you may submit them to +the `setuptools bug tracker`_. + +.. _setuptools bug tracker: https://bitbucket.org/pypa/setuptools/issues +.. _Package Compatibility Notes: https://pythonhosted.org/setuptools/PackageNotes +.. _The Internal Structure of Python Eggs: https://pythonhosted.org/setuptools/formats.html +.. _The setuptools Developer's Guide: https://pythonhosted.org/setuptools/setuptools.html +.. _The pkg_resources API reference: https://pythonhosted.org/setuptools/pkg_resources.html +.. _The EasyInstall user's guide and reference manual: https://pythonhosted.org/setuptools/easy_install.html +.. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/ + + +------- +Credits +------- + +* The original design for the ``.egg`` format and the ``pkg_resources`` API was + co-created by Phillip Eby and Bob Ippolito. Bob also implemented the first + version of ``pkg_resources``, and supplied the OS X operating system version + compatibility algorithm. + +* Ian Bicking implemented many early "creature comfort" features of + easy_install, including support for downloading via Sourceforge and + Subversion repositories. Ian's comments on the Web-SIG about WSGI + application deployment also inspired the concept of "entry points" in eggs, + and he has given talks at PyCon and elsewhere to inform and educate the + community about eggs and setuptools. + +* Jim Fulton contributed time and effort to build automated tests of various + aspects of ``easy_install``, and supplied the doctests for the command-line + ``.exe`` wrappers on Windows. + +* Phillip J. Eby is the seminal author of setuptools, and + first proposed the idea of an importable binary distribution format for + Python application plug-ins. + +* Significant parts of the implementation of setuptools were funded by the Open + Source Applications Foundation, to provide a plug-in infrastructure for the + Chandler PIM application. In addition, many OSAF staffers (such as Mike + "Code Bear" Taylor) contributed their time and stress as guinea pigs for the + use of eggs and setuptools, even before eggs were "cool". (Thanks, guys!) + +* Tarek Ziadé is the principal author of the Distribute fork, which + re-invigorated the community on the project, encouraged renewed innovation, + and addressed many defects. + +* Since the merge with Distribute, Jason R. Coombs is the + maintainer of setuptools. The project is maintained in coordination with + the Python Packaging Authority (PyPA) and the larger Python community. + +.. _files: + +======= +CHANGES +======= + +----- +5.5.1 +----- + +* `Issue #239 <https://bitbucket.org/pypa/setuptools/issue/239>`_: Fix typo in 5.5 such that fix did not take. + +--- +5.5 +--- + +* `Issue #239 <https://bitbucket.org/pypa/setuptools/issue/239>`_: Setuptools now includes the setup_requires directive on + Distribution objects and validates the syntax just like install_requires + and tests_require directives. + +----- +5.4.2 +----- + +* `Issue #236 <https://bitbucket.org/pypa/setuptools/issue/236>`_: Corrected regression in execfile implementation for Python 2.6. + +----- +5.4.1 +----- + +* `Python #7776 <http://bugs.python.org/issue7776>`_: (ssl_support) Correct usage of host for validation when + tunneling for HTTPS. + +--- +5.4 +--- + +* `Issue #154 <https://bitbucket.org/pypa/setuptools/issue/154>`_: ``pkg_resources`` will now cache the zip manifests rather than + re-processing the same file from disk multiple times, but only if the + environment variable ``PKG_RESOURCES_CACHE_ZIP_MANIFESTS`` is set. Clients + that package many modules in the same zip file will see some improvement + in startup time by enabling this feature. This feature is not enabled by + default because it causes a substantial increase in memory usage. + +--- +5.3 +--- + +* `Issue #185 <https://bitbucket.org/pypa/setuptools/issue/185>`_: Make svn tagging work on the new style SVN metadata. + Thanks cazabon! +* Prune revision control directories (e.g .svn) from base path + as well as sub-directories. + +--- +5.2 +--- + +* Added a `Developer Guide + <https://pythonhosted.org/setuptools/developer-guide.html>`_ to the official + documentation. +* Some code refactoring and cleanup was done with no intended behavioral + changes. +* During install_egg_info, the generated lines for namespace package .pth + files are now processed even during a dry run. + +--- +5.1 +--- + +* `Issue #202 <https://bitbucket.org/pypa/setuptools/issue/202>`_: Implemented more robust cache invalidation for the ZipImporter, + building on the work in `Issue #168 <https://bitbucket.org/pypa/setuptools/issue/168>`_. Special thanks to Jurko Gospodnetic and + PJE. + +----- +5.0.2 +----- + +* `Issue #220 <https://bitbucket.org/pypa/setuptools/issue/220>`_: Restored script templates. + +----- +5.0.1 +----- + +* Renamed script templates to end with .tmpl now that they no longer need + to be processed by 2to3. Fixes spurious syntax errors during build/install. + +--- +5.0 +--- + +* `Issue #218 <https://bitbucket.org/pypa/setuptools/issue/218>`_: Re-release of 3.8.1 to signal that it supersedes 4.x. +* Incidentally, script templates were updated not to include the triple-quote + escaping. + +------------------------- +3.7.1 and 3.8.1 and 4.0.1 +------------------------- + +* `Issue #213 <https://bitbucket.org/pypa/setuptools/issue/213>`_: Use legacy StringIO behavior for compatibility under pbr. +* `Issue #218 <https://bitbucket.org/pypa/setuptools/issue/218>`_: Setuptools 3.8.1 superseded 4.0.1, and 4.x was removed + from the available versions to install. + +--- +4.0 +--- + +* `Issue #210 <https://bitbucket.org/pypa/setuptools/issue/210>`_: ``setup.py develop`` now copies scripts in binary mode rather + than text mode, matching the behavior of the ``install`` command. + +--- +3.8 +--- + +* Extend `Issue #197 <https://bitbucket.org/pypa/setuptools/issue/197>`_ workaround to include all Python 3 versions prior to + 3.2.2. + +--- +3.7 +--- + +* `Issue #193 <https://bitbucket.org/pypa/setuptools/issue/193>`_: Improved handling of Unicode filenames when building manifests. + +--- +3.6 +--- + +* `Issue #203 <https://bitbucket.org/pypa/setuptools/issue/203>`_: Honor proxy settings for Powershell downloader in the bootstrap + routine. + +----- +3.5.2 +----- + +* `Issue #168 <https://bitbucket.org/pypa/setuptools/issue/168>`_: More robust handling of replaced zip files and stale caches. + Fixes ZipImportError complaining about a 'bad local header'. + +----- +3.5.1 +----- + +* `Issue #199 <https://bitbucket.org/pypa/setuptools/issue/199>`_: Restored ``install._install`` for compatibility with earlier + NumPy versions. + +--- +3.5 +--- + +* `Issue #195 <https://bitbucket.org/pypa/setuptools/issue/195>`_: Follow symbolic links in find_packages (restoring behavior + broken in 3.4). +* `Issue #197 <https://bitbucket.org/pypa/setuptools/issue/197>`_: On Python 3.1, PKG-INFO is now saved in a UTF-8 encoding instead + of ``sys.getpreferredencoding`` to match the behavior on Python 2.6-3.4. +* `Issue #192 <https://bitbucket.org/pypa/setuptools/issue/192>`_: Preferred bootstrap location is now + https://bootstrap.pypa.io/ez_setup.py (mirrored from former location). + +----- +3.4.4 +----- + +* `Issue #184 <https://bitbucket.org/pypa/setuptools/issue/184>`_: Correct failure where find_package over-matched packages + when directory traversal isn't short-circuited. + +----- +3.4.3 +----- + +* `Issue #183 <https://bitbucket.org/pypa/setuptools/issue/183>`_: Really fix test command with Python 3.1. + +----- +3.4.2 +----- + +* `Issue #183 <https://bitbucket.org/pypa/setuptools/issue/183>`_: Fix additional regression in test command on Python 3.1. + +----- +3.4.1 +----- + +* `Issue #180 <https://bitbucket.org/pypa/setuptools/issue/180>`_: Fix regression in test command not caught by py.test-run tests. + +--- +3.4 +--- + +* `Issue #176 <https://bitbucket.org/pypa/setuptools/issue/176>`_: Add parameter to the test command to support a custom test + runner: --test-runner or -r. +* `Issue #177 <https://bitbucket.org/pypa/setuptools/issue/177>`_: Now assume most common invocation to install command on + platforms/environments without stack support (issuing a warning). Setuptools + now installs naturally on IronPython. Behavior on CPython should be + unchanged. + +--- +3.3 +--- + +* Add ``include`` parameter to ``setuptools.find_packages()``. + +--- +3.2 +--- + +* `Pull Request #39 <https://bitbucket.org/pypa/setuptools/pull-request/39>`_: Add support for C++ targets from Cython ``.pyx`` files. +* `Issue #162 <https://bitbucket.org/pypa/setuptools/issue/162>`_: Update dependency on certifi to 1.0.1. +* `Issue #164 <https://bitbucket.org/pypa/setuptools/issue/164>`_: Update dependency on wincertstore to 0.2. + +--- +3.1 +--- + +* `Issue #161 <https://bitbucket.org/pypa/setuptools/issue/161>`_: Restore Features functionality to allow backward compatibility + (for Features) until the uses of that functionality is sufficiently removed. + +----- +3.0.2 +----- + +* Correct typo in previous bugfix. + +----- +3.0.1 +----- + +* `Issue #157 <https://bitbucket.org/pypa/setuptools/issue/157>`_: Restore support for Python 2.6 in bootstrap script where + ``zipfile.ZipFile`` does not yet have support for context managers. + +--- +3.0 +--- + +* `Issue #125 <https://bitbucket.org/pypa/setuptools/issue/125>`_: Prevent Subversion support from creating a ~/.subversion + directory just for checking the presence of a Subversion repository. +* `Issue #12 <https://bitbucket.org/pypa/setuptools/issue/12>`_: Namespace packages are now imported lazily. That is, the mere + declaration of a namespace package in an egg on ``sys.path`` no longer + causes it to be imported when ``pkg_resources`` is imported. Note that this + change means that all of a namespace package's ``__init__.py`` files must + include a ``declare_namespace()`` call in order to ensure that they will be + handled properly at runtime. In 2.x it was possible to get away without + including the declaration, but only at the cost of forcing namespace + packages to be imported early, which 3.0 no longer does. +* `Issue #148 <https://bitbucket.org/pypa/setuptools/issue/148>`_: When building (bdist_egg), setuptools no longer adds + ``__init__.py`` files to namespace packages. Any packages that rely on this + behavior will need to create ``__init__.py`` files and include the + ``declare_namespace()``. +* `Issue #7 <https://bitbucket.org/pypa/setuptools/issue/7>`_: Setuptools itself is now distributed as a zip archive in addition to + tar archive. ez_setup.py now uses zip archive. This approach avoids the potential + security vulnerabilities presented by use of tar archives in ez_setup.py. + It also leverages the security features added to ZipFile.extract in Python 2.7.4. +* `Issue #65 <https://bitbucket.org/pypa/setuptools/issue/65>`_: Removed deprecated Features functionality. +* `Pull Request #28 <https://bitbucket.org/pypa/setuptools/pull-request/28>`_: Remove backport of ``_bytecode_filenames`` which is + available in Python 2.6 and later, but also has better compatibility with + Python 3 environments. +* `Issue #156 <https://bitbucket.org/pypa/setuptools/issue/156>`_: Fix spelling of __PYVENV_LAUNCHER__ variable. + +--- +2.2 +--- + +* `Issue #141 <https://bitbucket.org/pypa/setuptools/issue/141>`_: Restored fix for allowing setup_requires dependencies to + override installed dependencies during setup. +* `Issue #128 <https://bitbucket.org/pypa/setuptools/issue/128>`_: Fixed issue where only the first dependency link was honored + in a distribution where multiple dependency links were supplied. + +----- +2.1.2 +----- + +* `Issue #144 <https://bitbucket.org/pypa/setuptools/issue/144>`_: Read long_description using codecs module to avoid errors + installing on systems where LANG=C. + +----- +2.1.1 +----- + +* `Issue #139 <https://bitbucket.org/pypa/setuptools/issue/139>`_: Fix regression in re_finder for CVS repos (and maybe Git repos + as well). + +--- +2.1 +--- + +* `Issue #129 <https://bitbucket.org/pypa/setuptools/issue/129>`_: Suppress inspection of ``*.whl`` files when searching for files + in a zip-imported file. +* `Issue #131 <https://bitbucket.org/pypa/setuptools/issue/131>`_: Fix RuntimeError when constructing an egg fetcher. + +----- +2.0.2 +----- + +* Fix NameError during installation with Python implementations (e.g. Jython) + not containing parser module. +* Fix NameError in ``sdist:re_finder``. + +----- +2.0.1 +----- + +* `Issue #124 <https://bitbucket.org/pypa/setuptools/issue/124>`_: Fixed error in list detection in upload_docs. + +--- +2.0 +--- + +* `Issue #121 <https://bitbucket.org/pypa/setuptools/issue/121>`_: Exempt lib2to3 pickled grammars from DirectorySandbox. +* `Issue #41 <https://bitbucket.org/pypa/setuptools/issue/41>`_: Dropped support for Python 2.4 and Python 2.5. Clients requiring + setuptools for those versions of Python should use setuptools 1.x. +* Removed ``setuptools.command.easy_install.HAS_USER_SITE``. Clients + expecting this boolean variable should use ``site.ENABLE_USER_SITE`` + instead. +* Removed ``pkg_resources.ImpWrapper``. Clients that expected this class + should use ``pkgutil.ImpImporter`` instead. + +----- +1.4.2 +----- + +* `Issue #116 <https://bitbucket.org/pypa/setuptools/issue/116>`_: Correct TypeError when reading a local package index on Python + 3. + +----- +1.4.1 +----- + +* `Issue #114 <https://bitbucket.org/pypa/setuptools/issue/114>`_: Use ``sys.getfilesystemencoding`` for decoding config in + ``bdist_wininst`` distributions. + +* `Issue #105 <https://bitbucket.org/pypa/setuptools/issue/105>`_ and `Issue #113 <https://bitbucket.org/pypa/setuptools/issue/113>`_: Establish a more robust technique for + determining the terminal encoding:: + + 1. Try ``getpreferredencoding`` + 2. If that returns US_ASCII or None, try the encoding from + ``getdefaultlocale``. If that encoding was a "fallback" because Python + could not figure it out from the environment or OS, encoding remains + unresolved. + 3. If the encoding is resolved, then make sure Python actually implements + the encoding. + 4. On the event of an error or unknown codec, revert to fallbacks + (UTF-8 on Darwin, ASCII on everything else). + 5. On the encoding is 'mac-roman' on Darwin, use UTF-8 as 'mac-roman' was + a bug on older Python releases. + + On a side note, it would seem that the encoding only matters for when SVN + does not yet support ``--xml`` and when getting repository and svn version + numbers. The ``--xml`` technique should yield UTF-8 according to some + messages on the SVN mailing lists. So if the version numbers are always + 7-bit ASCII clean, it may be best to only support the file parsing methods + for legacy SVN releases and support for SVN without the subprocess command + would simple go away as support for the older SVNs does. + +--- +1.4 +--- + +* `Issue #27 <https://bitbucket.org/pypa/setuptools/issue/27>`_: ``easy_install`` will now use credentials from .pypirc if + present for connecting to the package index. +* `Pull Request #21 <https://bitbucket.org/pypa/setuptools/pull-request/21>`_: Omit unwanted newlines in ``package_index._encode_auth`` + when the username/password pair length indicates wrapping. + +----- +1.3.2 +----- + +* `Issue #99 <https://bitbucket.org/pypa/setuptools/issue/99>`_: Fix filename encoding issues in SVN support. + +----- +1.3.1 +----- + +* Remove exuberant warning in SVN support when SVN is not used. + +--- +1.3 +--- + +* Address security vulnerability in SSL match_hostname check as reported in + `Python #17997 <http://bugs.python.org/issue17997>`_. +* Prefer `backports.ssl_match_hostname + <https://pypi.python.org/pypi/backports.ssl_match_hostname>`_ for backport + implementation if present. +* Correct NameError in ``ssl_support`` module (``socket.error``). + +--- +1.2 +--- + +* `Issue #26 <https://bitbucket.org/pypa/setuptools/issue/26>`_: Add support for SVN 1.7. Special thanks to Philip Thiem for the + contribution. +* `Issue #93 <https://bitbucket.org/pypa/setuptools/issue/93>`_: Wheels are now distributed with every release. Note that as + reported in `Issue #108 <https://bitbucket.org/pypa/setuptools/issue/108>`_, as of Pip 1.4, scripts aren't installed properly + from wheels. Therefore, if using Pip to install setuptools from a wheel, + the ``easy_install`` command will not be available. +* Setuptools "natural" launcher support, introduced in 1.0, is now officially + supported. + +----- +1.1.7 +----- + +* Fixed behavior of NameError handling in 'script template (dev).py' (script + launcher for 'develop' installs). +* ``ez_setup.py`` now ensures partial downloads are cleaned up following + a failed download. +* `Distribute #363 <https://bitbucket.org/tarek/distribute/issue/363>`_ and `Issue #55 <https://bitbucket.org/pypa/setuptools/issue/55>`_: Skip an sdist test that fails on locales + other than UTF-8. + +----- +1.1.6 +----- + +* `Distribute #349 <https://bitbucket.org/tarek/distribute/issue/349>`_: ``sandbox.execfile`` now opens the target file in binary + mode, thus honoring a BOM in the file when compiled. + +----- +1.1.5 +----- + +* `Issue #69 <https://bitbucket.org/pypa/setuptools/issue/69>`_: Second attempt at fix (logic was reversed). + +----- +1.1.4 +----- + +* `Issue #77 <https://bitbucket.org/pypa/setuptools/issue/77>`_: Fix error in upload command (Python 2.4). + +----- +1.1.3 +----- + +* Fix NameError in previous patch. + +----- +1.1.2 +----- + +* `Issue #69 <https://bitbucket.org/pypa/setuptools/issue/69>`_: Correct issue where 404 errors are returned for URLs with + fragments in them (such as #egg=). + +----- +1.1.1 +----- + +* `Issue #75 <https://bitbucket.org/pypa/setuptools/issue/75>`_: Add ``--insecure`` option to ez_setup.py to accommodate + environments where a trusted SSL connection cannot be validated. +* `Issue #76 <https://bitbucket.org/pypa/setuptools/issue/76>`_: Fix AttributeError in upload command with Python 2.4. + +--- +1.1 +--- + +* `Issue #71 <https://bitbucket.org/pypa/setuptools/issue/71>`_ (`Distribute #333 <https://bitbucket.org/tarek/distribute/issue/333>`_): EasyInstall now puts less emphasis on the + condition when a host is blocked via ``--allow-hosts``. +* `Issue #72 <https://bitbucket.org/pypa/setuptools/issue/72>`_: Restored Python 2.4 compatibility in ``ez_setup.py``. + +--- +1.0 +--- + +* `Issue #60 <https://bitbucket.org/pypa/setuptools/issue/60>`_: On Windows, Setuptools supports deferring to another launcher, + such as Vinay Sajip's `pylauncher <https://bitbucket.org/pypa/pylauncher>`_ + (included with Python 3.3) to launch console and GUI scripts and not install + its own launcher executables. This experimental functionality is currently + only enabled if the ``SETUPTOOLS_LAUNCHER`` environment variable is set to + "natural". In the future, this behavior may become default, but only after + it has matured and seen substantial adoption. The ``SETUPTOOLS_LAUNCHER`` + also accepts "executable" to force the default behavior of creating launcher + executables. +* `Issue #63 <https://bitbucket.org/pypa/setuptools/issue/63>`_: Bootstrap script (ez_setup.py) now prefers Powershell, curl, or + wget for retrieving the Setuptools tarball for improved security of the + install. The script will still fall back to a simple ``urlopen`` on + platforms that do not have these tools. +* `Issue #65 <https://bitbucket.org/pypa/setuptools/issue/65>`_: Deprecated the ``Features`` functionality. +* `Issue #52 <https://bitbucket.org/pypa/setuptools/issue/52>`_: In ``VerifyingHTTPSConn``, handle a tunnelled (proxied) + connection. + +Backward-Incompatible Changes +============================= + +This release includes a couple of backward-incompatible changes, but most if +not all users will find 1.0 a drop-in replacement for 0.9. + +* `Issue #50 <https://bitbucket.org/pypa/setuptools/issue/50>`_: Normalized API of environment marker support. Specifically, + removed line number and filename from SyntaxErrors when returned from + `pkg_resources.invalid_marker`. Any clients depending on the specific + string representation of exceptions returned by that function may need to + be updated to account for this change. +* `Issue #50 <https://bitbucket.org/pypa/setuptools/issue/50>`_: SyntaxErrors generated by `pkg_resources.invalid_marker` are + normalized for cross-implementation consistency. +* Removed ``--ignore-conflicts-at-my-risk`` and ``--delete-conflicting`` + options to easy_install. These options have been deprecated since 0.6a11. + +----- +0.9.8 +----- + +* `Issue #53 <https://bitbucket.org/pypa/setuptools/issue/53>`_: Fix NameErrors in `_vcs_split_rev_from_url`. + +----- +0.9.7 +----- + +* `Issue #49 <https://bitbucket.org/pypa/setuptools/issue/49>`_: Correct AttributeError on PyPy where a hashlib.HASH object does + not have a `.name` attribute. +* `Issue #34 <https://bitbucket.org/pypa/setuptools/issue/34>`_: Documentation now refers to bootstrap script in code repository + referenced by bookmark. +* Add underscore-separated keys to environment markers (markerlib). + +----- +0.9.6 +----- + +* `Issue #44 <https://bitbucket.org/pypa/setuptools/issue/44>`_: Test failure on Python 2.4 when MD5 hash doesn't have a `.name` + attribute. + +----- +0.9.5 +----- + +* `Python #17980 <http://bugs.python.org/issue17980>`_: Fix security vulnerability in SSL certificate validation. + +----- +0.9.4 +----- + +* `Issue #43 <https://bitbucket.org/pypa/setuptools/issue/43>`_: Fix issue (introduced in 0.9.1) with version resolution when + upgrading over other releases of Setuptools. + +----- +0.9.3 +----- + +* `Issue #42 <https://bitbucket.org/pypa/setuptools/issue/42>`_: Fix new ``AttributeError`` introduced in last fix. + +----- +0.9.2 +----- + +* `Issue #42 <https://bitbucket.org/pypa/setuptools/issue/42>`_: Fix regression where blank checksums would trigger an + ``AttributeError``. + +----- +0.9.1 +----- + +* `Distribute #386 <https://bitbucket.org/tarek/distribute/issue/386>`_: Allow other positional and keyword arguments to os.open. +* Corrected dependency on certifi mis-referenced in 0.9. + +--- +0.9 +--- + +* `package_index` now validates hashes other than MD5 in download links. + +--- +0.8 +--- + +* Code base now runs on Python 2.4 - Python 3.3 without Python 2to3 + conversion. + +----- +0.7.8 +----- + +* `Distribute #375 <https://bitbucket.org/tarek/distribute/issue/375>`_: Yet another fix for yet another regression. + +----- +0.7.7 +----- + +* `Distribute #375 <https://bitbucket.org/tarek/distribute/issue/375>`_: Repair AttributeError created in last release (redo). +* `Issue #30 <https://bitbucket.org/pypa/setuptools/issue/30>`_: Added test for get_cache_path. + +----- +0.7.6 +----- + +* `Distribute #375 <https://bitbucket.org/tarek/distribute/issue/375>`_: Repair AttributeError created in last release. + +----- +0.7.5 +----- + +* `Issue #21 <https://bitbucket.org/pypa/setuptools/issue/21>`_: Restore Python 2.4 compatibility in ``test_easy_install``. +* `Distribute #375 <https://bitbucket.org/tarek/distribute/issue/375>`_: Merged additional warning from Distribute 0.6.46. +* Now honor the environment variable + ``SETUPTOOLS_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT`` in addition to the now + deprecated ``DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT``. + +----- +0.7.4 +----- + +* `Issue #20 <https://bitbucket.org/pypa/setuptools/issue/20>`_: Fix comparison of parsed SVN version on Python 3. + +----- +0.7.3 +----- + +* `Issue #1 <https://bitbucket.org/pypa/setuptools/issue/1>`_: Disable installation of Windows-specific files on non-Windows systems. +* Use new sysconfig module with Python 2.7 or >=3.2. + +----- +0.7.2 +----- + +* `Issue #14 <https://bitbucket.org/pypa/setuptools/issue/14>`_: Use markerlib when the `parser` module is not available. +* `Issue #10 <https://bitbucket.org/pypa/setuptools/issue/10>`_: ``ez_setup.py`` now uses HTTPS to download setuptools from PyPI. + +----- +0.7.1 +----- + +* Fix NameError (`Issue #3 <https://bitbucket.org/pypa/setuptools/issue/3>`_) again - broken in bad merge. + +--- +0.7 +--- + +* Merged Setuptools and Distribute. See docs/merge.txt for details. + +Added several features that were slated for setuptools 0.6c12: + +* Index URL now defaults to HTTPS. +* Added experimental environment marker support. Now clients may designate a + PEP-426 environment marker for "extra" dependencies. Setuptools uses this + feature in ``setup.py`` for optional SSL and certificate validation support + on older platforms. Based on Distutils-SIG discussions, the syntax is + somewhat tentative. There should probably be a PEP with a firmer spec before + the feature should be considered suitable for use. +* Added support for SSL certificate validation when installing packages from + an HTTPS service. + +----- +0.7b4 +----- + +* `Issue #3 <https://bitbucket.org/pypa/setuptools/issue/3>`_: Fixed NameError in SSL support. + +------ +0.6.49 +------ + +* Move warning check in ``get_cache_path`` to follow the directory creation + to avoid errors when the cache path does not yet exist. Fixes the error + reported in `Distribute #375 <https://bitbucket.org/tarek/distribute/issue/375>`_. + +------ +0.6.48 +------ + +* Correct AttributeError in ``ResourceManager.get_cache_path`` introduced in + 0.6.46 (redo). + +------ +0.6.47 +------ + +* Correct AttributeError in ``ResourceManager.get_cache_path`` introduced in + 0.6.46. + +------ +0.6.46 +------ + +* `Distribute #375 <https://bitbucket.org/tarek/distribute/issue/375>`_: Issue a warning if the PYTHON_EGG_CACHE or otherwise + customized egg cache location specifies a directory that's group- or + world-writable. + +------ +0.6.45 +------ + +* `Distribute #379 <https://bitbucket.org/tarek/distribute/issue/379>`_: ``distribute_setup.py`` now traps VersionConflict as well, + restoring ability to upgrade from an older setuptools version. + +------ +0.6.44 +------ + +* ``distribute_setup.py`` has been updated to allow Setuptools 0.7 to + satisfy use_setuptools. + +------ +0.6.43 +------ + +* `Distribute #378 <https://bitbucket.org/tarek/distribute/issue/378>`_: Restore support for Python 2.4 Syntax (regression in 0.6.42). + +------ +0.6.42 +------ + +* External links finder no longer yields duplicate links. +* `Distribute #337 <https://bitbucket.org/tarek/distribute/issue/337>`_: Moved site.py to setuptools/site-patch.py (graft of very old + patch from setuptools trunk which inspired PR `#31 <https://bitbucket.org/pypa/setuptools/issue/31>`_). + +------ +0.6.41 +------ + +* `Distribute #27 <https://bitbucket.org/tarek/distribute/issue/27>`_: Use public api for loading resources from zip files rather than + the private method `_zip_directory_cache`. +* Added a new function ``easy_install.get_win_launcher`` which may be used by + third-party libraries such as buildout to get a suitable script launcher. + +------ +0.6.40 +------ + +* `Distribute #376 <https://bitbucket.org/tarek/distribute/issue/376>`_: brought back cli.exe and gui.exe that were deleted in the + previous release. + +------ +0.6.39 +------ + +* Add support for console launchers on ARM platforms. +* Fix possible issue in GUI launchers where the subsystem was not supplied to + the linker. +* Launcher build script now refactored for robustness. +* `Distribute #375 <https://bitbucket.org/tarek/distribute/issue/375>`_: Resources extracted from a zip egg to the file system now also + check the contents of the file against the zip contents during each + invocation of get_resource_filename. + +------ +0.6.38 +------ + +* `Distribute #371 <https://bitbucket.org/tarek/distribute/issue/371>`_: The launcher manifest file is now installed properly. + +------ +0.6.37 +------ + +* `Distribute #143 <https://bitbucket.org/tarek/distribute/issue/143>`_: Launcher scripts, including easy_install itself, are now + accompanied by a manifest on 32-bit Windows environments to avoid the + Installer Detection Technology and thus undesirable UAC elevation described + in `this Microsoft article + <http://technet.microsoft.com/en-us/library/cc709628%28WS.10%29.aspx>`_. + +------ +0.6.36 +------ + +* `Pull Request #35 <https://bitbucket.org/pypa/setuptools/pull-request/35>`_: In `Buildout #64 <https://github.com/buildout/buildout/issues/64>`_, it was reported that + under Python 3, installation of distutils scripts could attempt to copy + the ``__pycache__`` directory as a file, causing an error, apparently only + under Windows. Easy_install now skips all directories when processing + metadata scripts. + +------ +0.6.35 +------ + + +Note this release is backward-incompatible with distribute 0.6.23-0.6.34 in +how it parses version numbers. + +* `Distribute #278 <https://bitbucket.org/tarek/distribute/issue/278>`_: Restored compatibility with distribute 0.6.22 and setuptools + 0.6. Updated the documentation to match more closely with the version + parsing as intended in setuptools 0.6. + +------ +0.6.34 +------ + +* `Distribute #341 <https://bitbucket.org/tarek/distribute/issue/341>`_: 0.6.33 fails to build under Python 2.4. + +------ +0.6.33 +------ + +* Fix 2 errors with Jython 2.5. +* Fix 1 failure with Jython 2.5 and 2.7. +* Disable workaround for Jython scripts on Linux systems. +* `Distribute #336 <https://bitbucket.org/tarek/distribute/issue/336>`_: `setup.py` no longer masks failure exit code when tests fail. +* Fix issue in pkg_resources where try/except around a platform-dependent + import would trigger hook load failures on Mercurial. See pull request 32 + for details. +* `Distribute #341 <https://bitbucket.org/tarek/distribute/issue/341>`_: Fix a ResourceWarning. + +------ +0.6.32 +------ + +* Fix test suite with Python 2.6. +* Fix some DeprecationWarnings and ResourceWarnings. +* `Distribute #335 <https://bitbucket.org/tarek/distribute/issue/335>`_: Backed out `setup_requires` superceding installed requirements + until regression can be addressed. + +------ +0.6.31 +------ + +* `Distribute #303 <https://bitbucket.org/tarek/distribute/issue/303>`_: Make sure the manifest only ever contains UTF-8 in Python 3. +* `Distribute #329 <https://bitbucket.org/tarek/distribute/issue/329>`_: Properly close files created by tests for compatibility with + Jython. +* Work around `Jython #1980 <http://bugs.jython.org/issue1980>`_ and `Jython #1981 <http://bugs.jython.org/issue1981>`_. +* `Distribute #334 <https://bitbucket.org/tarek/distribute/issue/334>`_: Provide workaround for packages that reference `sys.__stdout__` + such as numpy does. This change should address + `virtualenv `#359 <https://bitbucket.org/pypa/setuptools/issue/359>`_ <https://github.com/pypa/virtualenv/issues/359>`_ as long + as the system encoding is UTF-8 or the IO encoding is specified in the + environment, i.e.:: + + PYTHONIOENCODING=utf8 pip install numpy + +* Fix for encoding issue when installing from Windows executable on Python 3. +* `Distribute #323 <https://bitbucket.org/tarek/distribute/issue/323>`_: Allow `setup_requires` requirements to supercede installed + requirements. Added some new keyword arguments to existing pkg_resources + methods. Also had to updated how __path__ is handled for namespace packages + to ensure that when a new egg distribution containing a namespace package is + placed on sys.path, the entries in __path__ are found in the same order they + would have been in had that egg been on the path when pkg_resources was + first imported. + +------ +0.6.30 +------ + +* `Distribute #328 <https://bitbucket.org/tarek/distribute/issue/328>`_: Clean up temporary directories in distribute_setup.py. +* Fix fatal bug in distribute_setup.py. + +------ +0.6.29 +------ + +* `Pull Request #14 <https://bitbucket.org/pypa/setuptools/pull-request/14>`_: Honor file permissions in zip files. +* `Distribute #327 <https://bitbucket.org/tarek/distribute/issue/327>`_: Merged pull request `#24 <https://bitbucket.org/pypa/setuptools/issue/24>`_ to fix a dependency problem with pip. +* Merged pull request `#23 <https://bitbucket.org/pypa/setuptools/issue/23>`_ to fix https://github.com/pypa/virtualenv/issues/301. +* If Sphinx is installed, the `upload_docs` command now runs `build_sphinx` + to produce uploadable documentation. +* `Distribute #326 <https://bitbucket.org/tarek/distribute/issue/326>`_: `upload_docs` provided mangled auth credentials under Python 3. +* `Distribute #320 <https://bitbucket.org/tarek/distribute/issue/320>`_: Fix check for "createable" in distribute_setup.py. +* `Distribute #305 <https://bitbucket.org/tarek/distribute/issue/305>`_: Remove a warning that was triggered during normal operations. +* `Distribute #311 <https://bitbucket.org/tarek/distribute/issue/311>`_: Print metadata in UTF-8 independent of platform. +* `Distribute #303 <https://bitbucket.org/tarek/distribute/issue/303>`_: Read manifest file with UTF-8 encoding under Python 3. +* `Distribute #301 <https://bitbucket.org/tarek/distribute/issue/301>`_: Allow to run tests of namespace packages when using 2to3. +* `Distribute #304 <https://bitbucket.org/tarek/distribute/issue/304>`_: Prevent import loop in site.py under Python 3.3. +* `Distribute #283 <https://bitbucket.org/tarek/distribute/issue/283>`_: Reenable scanning of `*.pyc` / `*.pyo` files on Python 3.3. +* `Distribute #299 <https://bitbucket.org/tarek/distribute/issue/299>`_: The develop command didn't work on Python 3, when using 2to3, + as the egg link would go to the Python 2 source. Linking to the 2to3'd code + in build/lib makes it work, although you will have to rebuild the module + before testing it. +* `Distribute #306 <https://bitbucket.org/tarek/distribute/issue/306>`_: Even if 2to3 is used, we build in-place under Python 2. +* `Distribute #307 <https://bitbucket.org/tarek/distribute/issue/307>`_: Prints the full path when .svn/entries is broken. +* `Distribute #313 <https://bitbucket.org/tarek/distribute/issue/313>`_: Support for sdist subcommands (Python 2.7) +* `Distribute #314 <https://bitbucket.org/tarek/distribute/issue/314>`_: test_local_index() would fail an OS X. +* `Distribute #310 <https://bitbucket.org/tarek/distribute/issue/310>`_: Non-ascii characters in a namespace __init__.py causes errors. +* `Distribute #218 <https://bitbucket.org/tarek/distribute/issue/218>`_: Improved documentation on behavior of `package_data` and + `include_package_data`. Files indicated by `package_data` are now included + in the manifest. +* `distribute_setup.py` now allows a `--download-base` argument for retrieving + distribute from a specified location. + +------ +0.6.28 +------ + +* `Distribute #294 <https://bitbucket.org/tarek/distribute/issue/294>`_: setup.py can now be invoked from any directory. +* Scripts are now installed honoring the umask. +* Added support for .dist-info directories. +* `Distribute #283 <https://bitbucket.org/tarek/distribute/issue/283>`_: Fix and disable scanning of `*.pyc` / `*.pyo` files on + Python 3.3. + +------ +0.6.27 +------ + +* Support current snapshots of CPython 3.3. +* Distribute now recognizes README.rst as a standard, default readme file. +* Exclude 'encodings' modules when removing modules from sys.modules. + Workaround for `#285 <https://bitbucket.org/pypa/setuptools/issue/285>`_. +* `Distribute #231 <https://bitbucket.org/tarek/distribute/issue/231>`_: Don't fiddle with system python when used with buildout + (bootstrap.py) + +------ +0.6.26 +------ + +* `Distribute #183 <https://bitbucket.org/tarek/distribute/issue/183>`_: Symlinked files are now extracted from source distributions. +* `Distribute #227 <https://bitbucket.org/tarek/distribute/issue/227>`_: Easy_install fetch parameters are now passed during the + installation of a source distribution; now fulfillment of setup_requires + dependencies will honor the parameters passed to easy_install. + +------ +0.6.25 +------ + +* `Distribute #258 <https://bitbucket.org/tarek/distribute/issue/258>`_: Workaround a cache issue +* `Distribute #260 <https://bitbucket.org/tarek/distribute/issue/260>`_: distribute_setup.py now accepts the --user parameter for + Python 2.6 and later. +* `Distribute #262 <https://bitbucket.org/tarek/distribute/issue/262>`_: package_index.open_with_auth no longer throws LookupError + on Python 3. +* `Distribute #269 <https://bitbucket.org/tarek/distribute/issue/269>`_: AttributeError when an exception occurs reading Manifest.in + on late releases of Python. +* `Distribute #272 <https://bitbucket.org/tarek/distribute/issue/272>`_: Prevent TypeError when namespace package names are unicode + and single-install-externally-managed is used. Also fixes PIP issue + 449. +* `Distribute #273 <https://bitbucket.org/tarek/distribute/issue/273>`_: Legacy script launchers now install with Python2/3 support. + +------ +0.6.24 +------ + +* `Distribute #249 <https://bitbucket.org/tarek/distribute/issue/249>`_: Added options to exclude 2to3 fixers + +------ +0.6.23 +------ + +* `Distribute #244 <https://bitbucket.org/tarek/distribute/issue/244>`_: Fixed a test +* `Distribute #243 <https://bitbucket.org/tarek/distribute/issue/243>`_: Fixed a test +* `Distribute #239 <https://bitbucket.org/tarek/distribute/issue/239>`_: Fixed a test +* `Distribute #240 <https://bitbucket.org/tarek/distribute/issue/240>`_: Fixed a test +* `Distribute #241 <https://bitbucket.org/tarek/distribute/issue/241>`_: Fixed a test +* `Distribute #237 <https://bitbucket.org/tarek/distribute/issue/237>`_: Fixed a test +* `Distribute #238 <https://bitbucket.org/tarek/distribute/issue/238>`_: easy_install now uses 64bit executable wrappers on 64bit Python +* `Distribute #208 <https://bitbucket.org/tarek/distribute/issue/208>`_: Fixed parsed_versions, it now honors post-releases as noted in the documentation +* `Distribute #207 <https://bitbucket.org/tarek/distribute/issue/207>`_: Windows cli and gui wrappers pass CTRL-C to child python process +* `Distribute #227 <https://bitbucket.org/tarek/distribute/issue/227>`_: easy_install now passes its arguments to setup.py bdist_egg +* `Distribute #225 <https://bitbucket.org/tarek/distribute/issue/225>`_: Fixed a NameError on Python 2.5, 2.4 + +------ +0.6.21 +------ + +* `Distribute #225 <https://bitbucket.org/tarek/distribute/issue/225>`_: FIxed a regression on py2.4 + +------ +0.6.20 +------ + +* `Distribute #135 <https://bitbucket.org/tarek/distribute/issue/135>`_: Include url in warning when processing URLs in package_index. +* `Distribute #212 <https://bitbucket.org/tarek/distribute/issue/212>`_: Fix issue where easy_instal fails on Python 3 on windows installer. +* `Distribute #213 <https://bitbucket.org/tarek/distribute/issue/213>`_: Fix typo in documentation. + +------ +0.6.19 +------ + +* `Distribute #206 <https://bitbucket.org/tarek/distribute/issue/206>`_: AttributeError: 'HTTPMessage' object has no attribute 'getheaders' + +------ +0.6.18 +------ + +* `Distribute #210 <https://bitbucket.org/tarek/distribute/issue/210>`_: Fixed a regression introduced by `Distribute #204 <https://bitbucket.org/tarek/distribute/issue/204>`_ fix. + +------ +0.6.17 +------ + +* Support 'DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT' environment + variable to allow to disable installation of easy_install-${version} script. +* Support Python >=3.1.4 and >=3.2.1. +* `Distribute #204 <https://bitbucket.org/tarek/distribute/issue/204>`_: Don't try to import the parent of a namespace package in + declare_namespace +* `Distribute #196 <https://bitbucket.org/tarek/distribute/issue/196>`_: Tolerate responses with multiple Content-Length headers +* `Distribute #205 <https://bitbucket.org/tarek/distribute/issue/205>`_: Sandboxing doesn't preserve working_set. Leads to setup_requires + problems. + +------ +0.6.16 +------ + +* Builds sdist gztar even on Windows (avoiding `Distribute #193 <https://bitbucket.org/tarek/distribute/issue/193>`_). +* `Distribute #192 <https://bitbucket.org/tarek/distribute/issue/192>`_: Fixed metadata omitted on Windows when package_dir + specified with forward-slash. +* `Distribute #195 <https://bitbucket.org/tarek/distribute/issue/195>`_: Cython build support. +* `Distribute #200 <https://bitbucket.org/tarek/distribute/issue/200>`_: Issues with recognizing 64-bit packages on Windows. + +------ +0.6.15 +------ + +* Fixed typo in bdist_egg +* Several issues under Python 3 has been solved. +* `Distribute #146 <https://bitbucket.org/tarek/distribute/issue/146>`_: Fixed missing DLL files after easy_install of windows exe package. + +------ +0.6.14 +------ + +* `Distribute #170 <https://bitbucket.org/tarek/distribute/issue/170>`_: Fixed unittest failure. Thanks to Toshio. +* `Distribute #171 <https://bitbucket.org/tarek/distribute/issue/171>`_: Fixed race condition in unittests cause deadlocks in test suite. +* `Distribute #143 <https://bitbucket.org/tarek/distribute/issue/143>`_: Fixed a lookup issue with easy_install. + Thanks to David and Zooko. +* `Distribute #174 <https://bitbucket.org/tarek/distribute/issue/174>`_: Fixed the edit mode when its used with setuptools itself + +------ +0.6.13 +------ + +* `Distribute #160 <https://bitbucket.org/tarek/distribute/issue/160>`_: 2.7 gives ValueError("Invalid IPv6 URL") +* `Distribute #150 <https://bitbucket.org/tarek/distribute/issue/150>`_: Fixed using ~/.local even in a --no-site-packages virtualenv +* `Distribute #163 <https://bitbucket.org/tarek/distribute/issue/163>`_: scan index links before external links, and don't use the md5 when + comparing two distributions + +------ +0.6.12 +------ + +* `Distribute #149 <https://bitbucket.org/tarek/distribute/issue/149>`_: Fixed various failures on 2.3/2.4 + +------ +0.6.11 +------ + +* Found another case of SandboxViolation - fixed +* `Distribute #15 <https://bitbucket.org/tarek/distribute/issue/15>`_ and `Distribute #48 <https://bitbucket.org/tarek/distribute/issue/48>`_: Introduced a socket timeout of 15 seconds on url openings +* Added indexsidebar.html into MANIFEST.in +* `Distribute #108 <https://bitbucket.org/tarek/distribute/issue/108>`_: Fixed TypeError with Python3.1 +* `Distribute #121 <https://bitbucket.org/tarek/distribute/issue/121>`_: Fixed --help install command trying to actually install. +* `Distribute #112 <https://bitbucket.org/tarek/distribute/issue/112>`_: Added an os.makedirs so that Tarek's solution will work. +* `Distribute #133 <https://bitbucket.org/tarek/distribute/issue/133>`_: Added --no-find-links to easy_install +* Added easy_install --user +* `Distribute #100 <https://bitbucket.org/tarek/distribute/issue/100>`_: Fixed develop --user not taking '.' in PYTHONPATH into account +* `Distribute #134 <https://bitbucket.org/tarek/distribute/issue/134>`_: removed spurious UserWarnings. Patch by VanLindberg +* `Distribute #138 <https://bitbucket.org/tarek/distribute/issue/138>`_: cant_write_to_target error when setup_requires is used. +* `Distribute #147 <https://bitbucket.org/tarek/distribute/issue/147>`_: respect the sys.dont_write_bytecode flag + +------ +0.6.10 +------ + +* Reverted change made for the DistributionNotFound exception because + zc.buildout uses the exception message to get the name of the + distribution. + +----- +0.6.9 +----- + +* `Distribute #90 <https://bitbucket.org/tarek/distribute/issue/90>`_: unknown setuptools version can be added in the working set +* `Distribute #87 <https://bitbucket.org/tarek/distribute/issue/87>`_: setupt.py doesn't try to convert distribute_setup.py anymore + Initial Patch by arfrever. +* `Distribute #89 <https://bitbucket.org/tarek/distribute/issue/89>`_: added a side bar with a download link to the doc. +* `Distribute #86 <https://bitbucket.org/tarek/distribute/issue/86>`_: fixed missing sentence in pkg_resources doc. +* Added a nicer error message when a DistributionNotFound is raised. +* `Distribute #80 <https://bitbucket.org/tarek/distribute/issue/80>`_: test_develop now works with Python 3.1 +* `Distribute #93 <https://bitbucket.org/tarek/distribute/issue/93>`_: upload_docs now works if there is an empty sub-directory. +* `Distribute #70 <https://bitbucket.org/tarek/distribute/issue/70>`_: exec bit on non-exec files +* `Distribute #99 <https://bitbucket.org/tarek/distribute/issue/99>`_: now the standalone easy_install command doesn't uses a + "setup.cfg" if any exists in the working directory. It will use it + only if triggered by ``install_requires`` from a setup.py call + (install, develop, etc). +* `Distribute #101 <https://bitbucket.org/tarek/distribute/issue/101>`_: Allowing ``os.devnull`` in Sandbox +* `Distribute #92 <https://bitbucket.org/tarek/distribute/issue/92>`_: Fixed the "no eggs" found error with MacPort + (platform.mac_ver() fails) +* `Distribute #103 <https://bitbucket.org/tarek/distribute/issue/103>`_: test_get_script_header_jython_workaround not run + anymore under py3 with C or POSIX local. Contributed by Arfrever. +* `Distribute #104 <https://bitbucket.org/tarek/distribute/issue/104>`_: remvoved the assertion when the installation fails, + with a nicer message for the end user. +* `Distribute #100 <https://bitbucket.org/tarek/distribute/issue/100>`_: making sure there's no SandboxViolation when + the setup script patches setuptools. + +----- +0.6.8 +----- + +* Added "check_packages" in dist. (added in Setuptools 0.6c11) +* Fixed the DONT_PATCH_SETUPTOOLS state. + +----- +0.6.7 +----- + +* `Distribute #58 <https://bitbucket.org/tarek/distribute/issue/58>`_: Added --user support to the develop command +* `Distribute #11 <https://bitbucket.org/tarek/distribute/issue/11>`_: Generated scripts now wrap their call to the script entry point + in the standard "if name == 'main'" +* Added the 'DONT_PATCH_SETUPTOOLS' environment variable, so virtualenv + can drive an installation that doesn't patch a global setuptools. +* Reviewed unladen-swallow specific change from + http://code.google.com/p/unladen-swallow/source/detail?spec=svn875&r=719 + and determined that it no longer applies. Distribute should work fine with + Unladen Swallow 2009Q3. +* `Distribute #21 <https://bitbucket.org/tarek/distribute/issue/21>`_: Allow PackageIndex.open_url to gracefully handle all cases of a + httplib.HTTPException instead of just InvalidURL and BadStatusLine. +* Removed virtual-python.py from this distribution and updated documentation + to point to the actively maintained virtualenv instead. +* `Distribute #64 <https://bitbucket.org/tarek/distribute/issue/64>`_: use_setuptools no longer rebuilds the distribute egg every + time it is run +* use_setuptools now properly respects the requested version +* use_setuptools will no longer try to import a distribute egg for the + wrong Python version +* `Distribute #74 <https://bitbucket.org/tarek/distribute/issue/74>`_: no_fake should be True by default. +* `Distribute #72 <https://bitbucket.org/tarek/distribute/issue/72>`_: avoid a bootstrapping issue with easy_install -U + +----- +0.6.6 +----- + +* Unified the bootstrap file so it works on both py2.x and py3k without 2to3 + (patch by Holger Krekel) + +----- +0.6.5 +----- + +* `Distribute #65 <https://bitbucket.org/tarek/distribute/issue/65>`_: cli.exe and gui.exe are now generated at build time, + depending on the platform in use. + +* `Distribute #67 <https://bitbucket.org/tarek/distribute/issue/67>`_: Fixed doc typo (PEP 381/382) + +* Distribute no longer shadows setuptools if we require a 0.7-series + setuptools. And an error is raised when installing a 0.7 setuptools with + distribute. + +* When run from within buildout, no attempt is made to modify an existing + setuptools egg, whether in a shared egg directory or a system setuptools. + +* Fixed a hole in sandboxing allowing builtin file to write outside of + the sandbox. + +----- +0.6.4 +----- + +* Added the generation of `distribute_setup_3k.py` during the release. + This closes `Distribute #52 <https://bitbucket.org/tarek/distribute/issue/52>`_. + +* Added an upload_docs command to easily upload project documentation to + PyPI's https://pythonhosted.org. This close issue `Distribute #56 <https://bitbucket.org/tarek/distribute/issue/56>`_. + +* Fixed a bootstrap bug on the use_setuptools() API. + +----- +0.6.3 +----- + +setuptools +========== + +* Fixed a bunch of calls to file() that caused crashes on Python 3. + +bootstrapping +============= + +* Fixed a bug in sorting that caused bootstrap to fail on Python 3. + +----- +0.6.2 +----- + +setuptools +========== + +* Added Python 3 support; see docs/python3.txt. + This closes `Old Setuptools #39 <http://bugs.python.org/setuptools/issue39>`_. + +* Added option to run 2to3 automatically when installing on Python 3. + This closes issue `Distribute #31 <https://bitbucket.org/tarek/distribute/issue/31>`_. + +* Fixed invalid usage of requirement.parse, that broke develop -d. + This closes `Old Setuptools #44 <http://bugs.python.org/setuptools/issue44>`_. + +* Fixed script launcher for 64-bit Windows. + This closes `Old Setuptools #2 <http://bugs.python.org/setuptools/issue2>`_. + +* KeyError when compiling extensions. + This closes `Old Setuptools #41 <http://bugs.python.org/setuptools/issue41>`_. + +bootstrapping +============= + +* Fixed bootstrap not working on Windows. This closes issue `Distribute #49 <https://bitbucket.org/tarek/distribute/issue/49>`_. + +* Fixed 2.6 dependencies. This closes issue `Distribute #50 <https://bitbucket.org/tarek/distribute/issue/50>`_. + +* Make sure setuptools is patched when running through easy_install + This closes `Old Setuptools #40 <http://bugs.python.org/setuptools/issue40>`_. + +----- +0.6.1 +----- + +setuptools +========== + +* package_index.urlopen now catches BadStatusLine and malformed url errors. + This closes `Distribute #16 <https://bitbucket.org/tarek/distribute/issue/16>`_ and `Distribute #18 <https://bitbucket.org/tarek/distribute/issue/18>`_. + +* zip_ok is now False by default. This closes `Old Setuptools #33 <http://bugs.python.org/setuptools/issue33>`_. + +* Fixed invalid URL error catching. `Old Setuptools #20 <http://bugs.python.org/setuptools/issue20>`_. + +* Fixed invalid bootstraping with easy_install installation (`Distribute #40 <https://bitbucket.org/tarek/distribute/issue/40>`_). + Thanks to Florian Schulze for the help. + +* Removed buildout/bootstrap.py. A new repository will create a specific + bootstrap.py script. + + +bootstrapping +============= + +* The boostrap process leave setuptools alone if detected in the system + and --root or --prefix is provided, but is not in the same location. + This closes `Distribute #10 <https://bitbucket.org/tarek/distribute/issue/10>`_. + +--- +0.6 +--- + +setuptools +========== + +* Packages required at build time where not fully present at install time. + This closes `Distribute #12 <https://bitbucket.org/tarek/distribute/issue/12>`_. + +* Protected against failures in tarfile extraction. This closes `Distribute #10 <https://bitbucket.org/tarek/distribute/issue/10>`_. + +* Made Jython api_tests.txt doctest compatible. This closes `Distribute #7 <https://bitbucket.org/tarek/distribute/issue/7>`_. + +* sandbox.py replaced builtin type file with builtin function open. This + closes `Distribute #6 <https://bitbucket.org/tarek/distribute/issue/6>`_. + +* Immediately close all file handles. This closes `Distribute #3 <https://bitbucket.org/tarek/distribute/issue/3>`_. + +* Added compatibility with Subversion 1.6. This references `Distribute #1 <https://bitbucket.org/tarek/distribute/issue/1>`_. + +pkg_resources +============= + +* Avoid a call to /usr/bin/sw_vers on OSX and use the official platform API + instead. Based on a patch from ronaldoussoren. This closes issue `#5 <https://bitbucket.org/pypa/setuptools/issue/5>`_. + +* Fixed a SandboxViolation for mkdir that could occur in certain cases. + This closes `Distribute #13 <https://bitbucket.org/tarek/distribute/issue/13>`_. + +* Allow to find_on_path on systems with tight permissions to fail gracefully. + This closes `Distribute #9 <https://bitbucket.org/tarek/distribute/issue/9>`_. + +* Corrected inconsistency between documentation and code of add_entry. + This closes `Distribute #8 <https://bitbucket.org/tarek/distribute/issue/8>`_. + +* Immediately close all file handles. This closes `Distribute #3 <https://bitbucket.org/tarek/distribute/issue/3>`_. + +easy_install +============ + +* Immediately close all file handles. This closes `Distribute #3 <https://bitbucket.org/tarek/distribute/issue/3>`_. + +----- +0.6c9 +----- + + * Fixed a missing files problem when using Windows source distributions on + non-Windows platforms, due to distutils not handling manifest file line + endings correctly. + + * Updated Pyrex support to work with Pyrex 0.9.6 and higher. + + * Minor changes for Jython compatibility, including skipping tests that can't + work on Jython. + + * Fixed not installing eggs in ``install_requires`` if they were also used for + ``setup_requires`` or ``tests_require``. + + * Fixed not fetching eggs in ``install_requires`` when running tests. + + * Allow ``ez_setup.use_setuptools()`` to upgrade existing setuptools + installations when called from a standalone ``setup.py``. + + * Added a warning if a namespace package is declared, but its parent package + is not also declared as a namespace. + + * Support Subversion 1.5 + + * Removed use of deprecated ``md5`` module if ``hashlib`` is available + + * Fixed ``bdist_wininst upload`` trying to upload the ``.exe`` twice + + * Fixed ``bdist_egg`` putting a ``native_libs.txt`` in the source package's + ``.egg-info``, when it should only be in the built egg's ``EGG-INFO``. + + * Ensure that _full_name is set on all shared libs before extensions are + checked for shared lib usage. (Fixes a bug in the experimental shared + library build support.) + + * Fix to allow unpacked eggs containing native libraries to fail more + gracefully under Google App Engine (with an ``ImportError`` loading the + C-based module, instead of getting a ``NameError``). + +----- +0.6c7 +----- + + * Fixed ``distutils.filelist.findall()`` crashing on broken symlinks, and + ``egg_info`` command failing on new, uncommitted SVN directories. + + * Fix import problems with nested namespace packages installed via + ``--root`` or ``--single-version-externally-managed``, due to the + parent package not having the child package as an attribute. + +----- +0.6c6 +----- + + * Added ``--egg-path`` option to ``develop`` command, allowing you to force + ``.egg-link`` files to use relative paths (allowing them to be shared across + platforms on a networked drive). + + * Fix not building binary RPMs correctly. + + * Fix "eggsecutables" (such as setuptools' own egg) only being runnable with + bash-compatible shells. + + * Fix ``#!`` parsing problems in Windows ``.exe`` script wrappers, when there + was whitespace inside a quoted argument or at the end of the ``#!`` line + (a regression introduced in 0.6c4). + + * Fix ``test`` command possibly failing if an older version of the project + being tested was installed on ``sys.path`` ahead of the test source + directory. + + * Fix ``find_packages()`` treating ``ez_setup`` and directories with ``.`` in + their names as packages. + +----- +0.6c5 +----- + + * Fix uploaded ``bdist_rpm`` packages being described as ``bdist_egg`` + packages under Python versions less than 2.5. + + * Fix uploaded ``bdist_wininst`` packages being described as suitable for + "any" version by Python 2.5, even if a ``--target-version`` was specified. + +----- +0.6c4 +----- + + * Overhauled Windows script wrapping to support ``bdist_wininst`` better. + Scripts installed with ``bdist_wininst`` will always use ``#!python.exe`` or + ``#!pythonw.exe`` as the executable name (even when built on non-Windows + platforms!), and the wrappers will look for the executable in the script's + parent directory (which should find the right version of Python). + + * Fix ``upload`` command not uploading files built by ``bdist_rpm`` or + ``bdist_wininst`` under Python 2.3 and 2.4. + + * Add support for "eggsecutable" headers: a ``#!/bin/sh`` script that is + prepended to an ``.egg`` file to allow it to be run as a script on Unix-ish + platforms. (This is mainly so that setuptools itself can have a single-file + installer on Unix, without doing multiple downloads, dealing with firewalls, + etc.) + + * Fix problem with empty revision numbers in Subversion 1.4 ``entries`` files + + * Use cross-platform relative paths in ``easy-install.pth`` when doing + ``develop`` and the source directory is a subdirectory of the installation + target directory. + + * Fix a problem installing eggs with a system packaging tool if the project + contained an implicit namespace package; for example if the ``setup()`` + listed a namespace package ``foo.bar`` without explicitly listing ``foo`` + as a namespace package. + +----- +0.6c3 +----- + + * Fixed breakages caused by Subversion 1.4's new "working copy" format + +----- +0.6c2 +----- + + * The ``ez_setup`` module displays the conflicting version of setuptools (and + its installation location) when a script requests a version that's not + available. + + * Running ``setup.py develop`` on a setuptools-using project will now install + setuptools if needed, instead of only downloading the egg. + +----- +0.6c1 +----- + + * Fixed ``AttributeError`` when trying to download a ``setup_requires`` + dependency when a distribution lacks a ``dependency_links`` setting. + + * Made ``zip-safe`` and ``not-zip-safe`` flag files contain a single byte, so + as to play better with packaging tools that complain about zero-length + files. + + * Made ``setup.py develop`` respect the ``--no-deps`` option, which it + previously was ignoring. + + * Support ``extra_path`` option to ``setup()`` when ``install`` is run in + backward-compatibility mode. + + * Source distributions now always include a ``setup.cfg`` file that explicitly + sets ``egg_info`` options such that they produce an identical version number + to the source distribution's version number. (Previously, the default + version number could be different due to the use of ``--tag-date``, or if + the version was overridden on the command line that built the source + distribution.) + +----- +0.6b4 +----- + + * Fix ``register`` not obeying name/version set by ``egg_info`` command, if + ``egg_info`` wasn't explicitly run first on the same command line. + + * Added ``--no-date`` and ``--no-svn-revision`` options to ``egg_info`` + command, to allow suppressing tags configured in ``setup.cfg``. + + * Fixed redundant warnings about missing ``README`` file(s); it should now + appear only if you are actually a source distribution. + +----- +0.6b3 +----- + + * Fix ``bdist_egg`` not including files in subdirectories of ``.egg-info``. + + * Allow ``.py`` files found by the ``include_package_data`` option to be + automatically included. Remove duplicate data file matches if both + ``include_package_data`` and ``package_data`` are used to refer to the same + files. + +----- +0.6b1 +----- + + * Strip ``module`` from the end of compiled extension modules when computing + the name of a ``.py`` loader/wrapper. (Python's import machinery ignores + this suffix when searching for an extension module.) + +------ +0.6a11 +------ + + * Added ``test_loader`` keyword to support custom test loaders + + * Added ``setuptools.file_finders`` entry point group to allow implementing + revision control plugins. + + * Added ``--identity`` option to ``upload`` command. + + * Added ``dependency_links`` to allow specifying URLs for ``--find-links``. + + * Enhanced test loader to scan packages as well as modules, and call + ``additional_tests()`` if present to get non-unittest tests. + + * Support namespace packages in conjunction with system packagers, by omitting + the installation of any ``__init__.py`` files for namespace packages, and + adding a special ``.pth`` file to create a working package in + ``sys.modules``. + + * Made ``--single-version-externally-managed`` automatic when ``--root`` is + used, so that most system packagers won't require special support for + setuptools. + + * Fixed ``setup_requires``, ``tests_require``, etc. not using ``setup.cfg`` or + other configuration files for their option defaults when installing, and + also made the install use ``--multi-version`` mode so that the project + directory doesn't need to support .pth files. + + * ``MANIFEST.in`` is now forcibly closed when any errors occur while reading + it. Previously, the file could be left open and the actual error would be + masked by problems trying to remove the open file on Windows systems. + +------ +0.6a10 +------ + + * Fixed the ``develop`` command ignoring ``--find-links``. + +----- +0.6a9 +----- + + * The ``sdist`` command no longer uses the traditional ``MANIFEST`` file to + create source distributions. ``MANIFEST.in`` is still read and processed, + as are the standard defaults and pruning. But the manifest is built inside + the project's ``.egg-info`` directory as ``SOURCES.txt``, and it is rebuilt + every time the ``egg_info`` command is run. + + * Added the ``include_package_data`` keyword to ``setup()``, allowing you to + automatically include any package data listed in revision control or + ``MANIFEST.in`` + + * Added the ``exclude_package_data`` keyword to ``setup()``, allowing you to + trim back files included via the ``package_data`` and + ``include_package_data`` options. + + * Fixed ``--tag-svn-revision`` not working when run from a source + distribution. + + * Added warning for namespace packages with missing ``declare_namespace()`` + + * Added ``tests_require`` keyword to ``setup()``, so that e.g. packages + requiring ``nose`` to run unit tests can make this dependency optional + unless the ``test`` command is run. + + * Made all commands that use ``easy_install`` respect its configuration + options, as this was causing some problems with ``setup.py install``. + + * Added an ``unpack_directory()`` driver to ``setuptools.archive_util``, so + that you can process a directory tree through a processing filter as if it + were a zipfile or tarfile. + + * Added an internal ``install_egg_info`` command to use as part of old-style + ``install`` operations, that installs an ``.egg-info`` directory with the + package. + + * Added a ``--single-version-externally-managed`` option to the ``install`` + command so that you can more easily wrap a "flat" egg in a system package. + + * Enhanced ``bdist_rpm`` so that it installs single-version eggs that + don't rely on a ``.pth`` file. The ``--no-egg`` option has been removed, + since all RPMs are now built in a more backwards-compatible format. + + * Support full roundtrip translation of eggs to and from ``bdist_wininst`` + format. Running ``bdist_wininst`` on a setuptools-based package wraps the + egg in an .exe that will safely install it as an egg (i.e., with metadata + and entry-point wrapper scripts), and ``easy_install`` can turn the .exe + back into an ``.egg`` file or directory and install it as such. + + +----- +0.6a8 +----- + + * Fixed some problems building extensions when Pyrex was installed, especially + with Python 2.4 and/or packages using SWIG. + + * Made ``develop`` command accept all the same options as ``easy_install``, + and use the ``easy_install`` command's configuration settings as defaults. + + * Made ``egg_info --tag-svn-revision`` fall back to extracting the revision + number from ``PKG-INFO`` in case it is being run on a source distribution of + a snapshot taken from a Subversion-based project. + + * Automatically detect ``.dll``, ``.so`` and ``.dylib`` files that are being + installed as data, adding them to ``native_libs.txt`` automatically. + + * Fixed some problems with fresh checkouts of projects that don't include + ``.egg-info/PKG-INFO`` under revision control and put the project's source + code directly in the project directory. If such a package had any + requirements that get processed before the ``egg_info`` command can be run, + the setup scripts would fail with a "Missing 'Version:' header and/or + PKG-INFO file" error, because the egg runtime interpreted the unbuilt + metadata in a directory on ``sys.path`` (i.e. the current directory) as + being a corrupted egg. Setuptools now monkeypatches the distribution + metadata cache to pretend that the egg has valid version information, until + it has a chance to make it actually be so (via the ``egg_info`` command). + +----- +0.6a5 +----- + + * Fixed missing gui/cli .exe files in distribution. Fixed bugs in tests. + +----- +0.6a3 +----- + + * Added ``gui_scripts`` entry point group to allow installing GUI scripts + on Windows and other platforms. (The special handling is only for Windows; + other platforms are treated the same as for ``console_scripts``.) + +----- +0.6a2 +----- + + * Added ``console_scripts`` entry point group to allow installing scripts + without the need to create separate script files. On Windows, console + scripts get an ``.exe`` wrapper so you can just type their name. On other + platforms, the scripts are written without a file extension. + +----- +0.6a1 +----- + + * Added support for building "old-style" RPMs that don't install an egg for + the target package, using a ``--no-egg`` option. + + * The ``build_ext`` command now works better when using the ``--inplace`` + option and multiple Python versions. It now makes sure that all extensions + match the current Python version, even if newer copies were built for a + different Python version. + + * The ``upload`` command no longer attaches an extra ``.zip`` when uploading + eggs, as PyPI now supports egg uploads without trickery. + + * The ``ez_setup`` script/module now displays a warning before downloading + the setuptools egg, and attempts to check the downloaded egg against an + internal MD5 checksum table. + + * Fixed the ``--tag-svn-revision`` option of ``egg_info`` not finding the + latest revision number; it was using the revision number of the directory + containing ``setup.py``, not the highest revision number in the project. + + * Added ``eager_resources`` setup argument + + * The ``sdist`` command now recognizes Subversion "deleted file" entries and + does not include them in source distributions. + + * ``setuptools`` now embeds itself more thoroughly into the distutils, so that + other distutils extensions (e.g. py2exe, py2app) will subclass setuptools' + versions of things, rather than the native distutils ones. + + * Added ``entry_points`` and ``setup_requires`` arguments to ``setup()``; + ``setup_requires`` allows you to automatically find and download packages + that are needed in order to *build* your project (as opposed to running it). + + * ``setuptools`` now finds its commands, ``setup()`` argument validators, and + metadata writers using entry points, so that they can be extended by + third-party packages. See `Creating distutils Extensions + <http://pythonhosted.org/setuptools/setuptools.html#creating-distutils-extensions>`_ + for more details. + + * The vestigial ``depends`` command has been removed. It was never finished + or documented, and never would have worked without EasyInstall - which it + pre-dated and was never compatible with. + +------ +0.5a12 +------ + + * The zip-safety scanner now checks for modules that might be used with + ``python -m``, and marks them as unsafe for zipping, since Python 2.4 can't + handle ``-m`` on zipped modules. + +------ +0.5a11 +------ + + * Fix breakage of the "develop" command that was caused by the addition of + ``--always-unzip`` to the ``easy_install`` command. + +----- +0.5a9 +----- + + * Include ``svn:externals`` directories in source distributions as well as + normal subversion-controlled files and directories. + + * Added ``exclude=patternlist`` option to ``setuptools.find_packages()`` + + * Changed --tag-svn-revision to include an "r" in front of the revision number + for better readability. + + * Added ability to build eggs without including source files (except for any + scripts, of course), using the ``--exclude-source-files`` option to + ``bdist_egg``. + + * ``setup.py install`` now automatically detects when an "unmanaged" package + or module is going to be on ``sys.path`` ahead of a package being installed, + thereby preventing the newer version from being imported. If this occurs, + a warning message is output to ``sys.stderr``, but installation proceeds + anyway. The warning message informs the user what files or directories + need deleting, and advises them they can also use EasyInstall (with the + ``--delete-conflicting`` option) to do it automatically. + + * The ``egg_info`` command now adds a ``top_level.txt`` file to the metadata + directory that lists all top-level modules and packages in the distribution. + This is used by the ``easy_install`` command to find possibly-conflicting + "unmanaged" packages when installing the distribution. + + * Added ``zip_safe`` and ``namespace_packages`` arguments to ``setup()``. + Added package analysis to determine zip-safety if the ``zip_safe`` flag + is not given, and advise the author regarding what code might need changing. + + * Fixed the swapped ``-d`` and ``-b`` options of ``bdist_egg``. + +----- +0.5a8 +----- + + * The "egg_info" command now always sets the distribution metadata to "safe" + forms of the distribution name and version, so that distribution files will + be generated with parseable names (i.e., ones that don't include '-' in the + name or version). Also, this means that if you use the various ``--tag`` + options of "egg_info", any distributions generated will use the tags in the + version, not just egg distributions. + + * Added support for defining command aliases in distutils configuration files, + under the "[aliases]" section. To prevent recursion and to allow aliases to + call the command of the same name, a given alias can be expanded only once + per command-line invocation. You can define new aliases with the "alias" + command, either for the local, global, or per-user configuration. + + * Added "rotate" command to delete old distribution files, given a set of + patterns to match and the number of files to keep. (Keeps the most + recently-modified distribution files matching each pattern.) + + * Added "saveopts" command that saves all command-line options for the current + invocation to the local, global, or per-user configuration file. Useful for + setting defaults without having to hand-edit a configuration file. + + * Added a "setopt" command that sets a single option in a specified distutils + configuration file. + +----- +0.5a7 +----- + + * Added "upload" support for egg and source distributions, including a bug + fix for "upload" and a temporary workaround for lack of .egg support in + PyPI. + +----- +0.5a6 +----- + + * Beefed up the "sdist" command so that if you don't have a MANIFEST.in, it + will include all files under revision control (CVS or Subversion) in the + current directory, and it will regenerate the list every time you create a + source distribution, not just when you tell it to. This should make the + default "do what you mean" more often than the distutils' default behavior + did, while still retaining the old behavior in the presence of MANIFEST.in. + + * Fixed the "develop" command always updating .pth files, even if you + specified ``-n`` or ``--dry-run``. + + * Slightly changed the format of the generated version when you use + ``--tag-build`` on the "egg_info" command, so that you can make tagged + revisions compare *lower* than the version specified in setup.py (e.g. by + using ``--tag-build=dev``). + +----- +0.5a5 +----- + + * Added ``develop`` command to ``setuptools``-based packages. This command + installs an ``.egg-link`` pointing to the package's source directory, and + script wrappers that ``execfile()`` the source versions of the package's + scripts. This lets you put your development checkout(s) on sys.path without + having to actually install them. (To uninstall the link, use + use ``setup.py develop --uninstall``.) + + * Added ``egg_info`` command to ``setuptools``-based packages. This command + just creates or updates the "projectname.egg-info" directory, without + building an egg. (It's used by the ``bdist_egg``, ``test``, and ``develop`` + commands.) + + * Enhanced the ``test`` command so that it doesn't install the package, but + instead builds any C extensions in-place, updates the ``.egg-info`` + metadata, adds the source directory to ``sys.path``, and runs the tests + directly on the source. This avoids an "unmanaged" installation of the + package to ``site-packages`` or elsewhere. + + * Made ``easy_install`` a standard ``setuptools`` command, moving it from + the ``easy_install`` module to ``setuptools.command.easy_install``. Note + that if you were importing or extending it, you must now change your imports + accordingly. ``easy_install.py`` is still installed as a script, but not as + a module. + +----- +0.5a4 +----- + + * Setup scripts using setuptools can now list their dependencies directly in + the setup.py file, without having to manually create a ``depends.txt`` file. + The ``install_requires`` and ``extras_require`` arguments to ``setup()`` + are used to create a dependencies file automatically. If you are manually + creating ``depends.txt`` right now, please switch to using these setup + arguments as soon as practical, because ``depends.txt`` support will be + removed in the 0.6 release cycle. For documentation on the new arguments, + see the ``setuptools.dist.Distribution`` class. + + * Setup scripts using setuptools now always install using ``easy_install`` + internally, for ease of uninstallation and upgrading. + +----- +0.5a1 +----- + + * Added support for "self-installation" bootstrapping. Packages can now + include ``ez_setup.py`` in their source distribution, and add the following + to their ``setup.py``, in order to automatically bootstrap installation of + setuptools as part of their setup process:: + + from ez_setup import use_setuptools + use_setuptools() + + from setuptools import setup + # etc... + +----- +0.4a2 +----- + + * Added ``ez_setup.py`` installer/bootstrap script to make initial setuptools + installation easier, and to allow distributions using setuptools to avoid + having to include setuptools in their source distribution. + + * All downloads are now managed by the ``PackageIndex`` class (which is now + subclassable and replaceable), so that embedders can more easily override + download logic, give download progress reports, etc. The class has also + been moved to the new ``setuptools.package_index`` module. + + * The ``Installer`` class no longer handles downloading, manages a temporary + directory, or tracks the ``zip_ok`` option. Downloading is now handled + by ``PackageIndex``, and ``Installer`` has become an ``easy_install`` + command class based on ``setuptools.Command``. + + * There is a new ``setuptools.sandbox.run_setup()`` API to invoke a setup + script in a directory sandbox, and a new ``setuptools.archive_util`` module + with an ``unpack_archive()`` API. These were split out of EasyInstall to + allow reuse by other tools and applications. + + * ``setuptools.Command`` now supports reinitializing commands using keyword + arguments to set/reset options. Also, ``Command`` subclasses can now set + their ``command_consumes_arguments`` attribute to ``True`` in order to + receive an ``args`` option containing the rest of the command line. + +----- +0.3a2 +----- + + * Added new options to ``bdist_egg`` to allow tagging the egg's version number + with a subversion revision number, the current date, or an explicit tag + value. Run ``setup.py bdist_egg --help`` to get more information. + + * Misc. bug fixes + +----- +0.3a1 +----- + + * Initial release. + + diff --git a/lib/python3.4/site-packages/setuptools-5.5.1.dist-info/METADATA b/lib/python3.4/site-packages/setuptools-5.5.1.dist-info/METADATA new file mode 100644 index 0000000..33f1de0 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools-5.5.1.dist-info/METADATA @@ -0,0 +1,2069 @@ +Metadata-Version: 2.0 +Name: setuptools +Version: 5.5.1 +Summary: Easily download, build, install, upgrade, and uninstall Python packages +Home-page: https://bitbucket.org/pypa/setuptools +Author: Python Packaging Authority +Author-email: distutils-sig@python.org +License: PSF or ZPL +Keywords: CPAN PyPI distutils eggs package management +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Python Software Foundation License +Classifier: License :: OSI Approved :: Zope Public License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.1 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: System :: Archiving :: Packaging +Classifier: Topic :: System :: Systems Administration +Classifier: Topic :: Utilities +Provides-Extra: certs +Requires-Dist: certifi (==1.0.1); extra == 'certs' +Provides-Extra: ssl +Requires-Dist: wincertstore (==0.2); sys_platform=='win32' and extra == 'ssl' + +=============================== +Installing and Using Setuptools +=============================== + +.. contents:: **Table of Contents** + + +------------------------- +Installation Instructions +------------------------- + +The recommended way to bootstrap setuptools on any system is to download +`ez_setup.py`_ and run it using the target Python environment. Different +operating systems have different recommended techniques to accomplish this +basic routine, so below are some examples to get you started. + +Setuptools requires Python 2.6 or later. To install setuptools +on Python 2.4 or Python 2.5, use the `bootstrap script for Setuptools 1.x +<https://bitbucket.org/pypa/setuptools/raw/bootstrap-py24/ez_setup.py>`_. + +The link provided to ez_setup.py is a bookmark to bootstrap script for the +latest known stable release. + +.. _ez_setup.py: https://bootstrap.pypa.io/ez_setup.py + +Windows 8 (Powershell) +====================== + +For best results, uninstall previous versions FIRST (see `Uninstalling`_). + +Using Windows 8 or later, it's possible to install with one simple Powershell +command. Start up Powershell and paste this command:: + + > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | python - + +You must start the Powershell with Administrative privileges or you may choose +to install a user-local installation:: + + > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | python - --user + +If you have Python 3.3 or later, you can use the ``py`` command to install to +different Python versions. For example, to install to Python 3.3 if you have +Python 2.7 installed:: + + > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | py -3 - + +The recommended way to install setuptools on Windows is to download +`ez_setup.py`_ and run it. The script will download the appropriate .egg +file and install it for you. + +Once installation is complete, you will find an ``easy_install`` program in +your Python ``Scripts`` subdirectory. For simple invocation and best results, +add this directory to your ``PATH`` environment variable, if it is not already +present. If you did a user-local install, the ``Scripts`` subdirectory is +``$env:APPDATA\Python\Scripts``. + + +Windows 7 (or graphical install) +================================ + +For Windows 7 and earlier, download `ez_setup.py`_ using your favorite web +browser or other technique and "run" that file. + + +Unix (wget) +=========== + +Most Linux distributions come with wget. + +Download `ez_setup.py`_ and run it using the target Python version. The script +will download the appropriate version and install it for you:: + + > wget https://bootstrap.pypa.io/ez_setup.py -O - | python + +Note that you will may need to invoke the command with superuser privileges to +install to the system Python:: + + > wget https://bootstrap.pypa.io/ez_setup.py -O - | sudo python + +Alternatively, Setuptools may be installed to a user-local path:: + + > wget https://bootstrap.pypa.io/ez_setup.py -O - | python - --user + +Unix including Mac OS X (curl) +============================== + +If your system has curl installed, follow the ``wget`` instructions but +replace ``wget`` with ``curl`` and ``-O`` with ``-o``. For example:: + + > curl https://bootstrap.pypa.io/ez_setup.py -o - | python + + +Advanced Installation +===================== + +For more advanced installation options, such as installing to custom +locations or prefixes, download and extract the source +tarball from `Setuptools on PyPI <https://pypi.python.org/pypi/setuptools>`_ +and run setup.py with any supported distutils and Setuptools options. +For example:: + + setuptools-x.x$ python setup.py install --prefix=/opt/setuptools + +Use ``--help`` to get a full options list, but we recommend consulting +the `EasyInstall manual`_ for detailed instructions, especially `the section +on custom installation locations`_. + +.. _EasyInstall manual: https://pythonhosted.org/setuptools/EasyInstall +.. _the section on custom installation locations: https://pythonhosted.org/setuptools/EasyInstall#custom-installation-locations + + +Downloads +========= + +All setuptools downloads can be found at `the project's home page in the Python +Package Index`_. Scroll to the very bottom of the page to find the links. + +.. _the project's home page in the Python Package Index: https://pypi.python.org/pypi/setuptools + +In addition to the PyPI downloads, the development version of ``setuptools`` +is available from the `Bitbucket repo`_, and in-development versions of the +`0.6 branch`_ are available as well. + +.. _Bitbucket repo: https://bitbucket.org/pypa/setuptools/get/default.tar.gz#egg=setuptools-dev +.. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06 + +Uninstalling +============ + +On Windows, if Setuptools was installed using an ``.exe`` or ``.msi`` +installer, simply use the uninstall feature of "Add/Remove Programs" in the +Control Panel. + +Otherwise, to uninstall Setuptools or Distribute, regardless of the Python +version, delete all ``setuptools*`` and ``distribute*`` files and +directories from your system's ``site-packages`` directory +(and any other ``sys.path`` directories) FIRST. + +If you are upgrading or otherwise plan to re-install Setuptools or Distribute, +nothing further needs to be done. If you want to completely remove Setuptools, +you may also want to remove the 'easy_install' and 'easy_install-x.x' scripts +and associated executables installed to the Python scripts directory. + +-------------------------------- +Using Setuptools and EasyInstall +-------------------------------- + +Here are some of the available manuals, tutorials, and other resources for +learning about Setuptools, Python Eggs, and EasyInstall: + +* `The EasyInstall user's guide and reference manual`_ +* `The setuptools Developer's Guide`_ +* `The pkg_resources API reference`_ +* `Package Compatibility Notes`_ (user-maintained) +* `The Internal Structure of Python Eggs`_ + +Questions, comments, and bug reports should be directed to the `distutils-sig +mailing list`_. If you have written (or know of) any tutorials, documentation, +plug-ins, or other resources for setuptools users, please let us know about +them there, so this reference list can be updated. If you have working, +*tested* patches to correct problems or add features, you may submit them to +the `setuptools bug tracker`_. + +.. _setuptools bug tracker: https://bitbucket.org/pypa/setuptools/issues +.. _Package Compatibility Notes: https://pythonhosted.org/setuptools/PackageNotes +.. _The Internal Structure of Python Eggs: https://pythonhosted.org/setuptools/formats.html +.. _The setuptools Developer's Guide: https://pythonhosted.org/setuptools/setuptools.html +.. _The pkg_resources API reference: https://pythonhosted.org/setuptools/pkg_resources.html +.. _The EasyInstall user's guide and reference manual: https://pythonhosted.org/setuptools/easy_install.html +.. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/ + + +------- +Credits +------- + +* The original design for the ``.egg`` format and the ``pkg_resources`` API was + co-created by Phillip Eby and Bob Ippolito. Bob also implemented the first + version of ``pkg_resources``, and supplied the OS X operating system version + compatibility algorithm. + +* Ian Bicking implemented many early "creature comfort" features of + easy_install, including support for downloading via Sourceforge and + Subversion repositories. Ian's comments on the Web-SIG about WSGI + application deployment also inspired the concept of "entry points" in eggs, + and he has given talks at PyCon and elsewhere to inform and educate the + community about eggs and setuptools. + +* Jim Fulton contributed time and effort to build automated tests of various + aspects of ``easy_install``, and supplied the doctests for the command-line + ``.exe`` wrappers on Windows. + +* Phillip J. Eby is the seminal author of setuptools, and + first proposed the idea of an importable binary distribution format for + Python application plug-ins. + +* Significant parts of the implementation of setuptools were funded by the Open + Source Applications Foundation, to provide a plug-in infrastructure for the + Chandler PIM application. In addition, many OSAF staffers (such as Mike + "Code Bear" Taylor) contributed their time and stress as guinea pigs for the + use of eggs and setuptools, even before eggs were "cool". (Thanks, guys!) + +* Tarek Ziadé is the principal author of the Distribute fork, which + re-invigorated the community on the project, encouraged renewed innovation, + and addressed many defects. + +* Since the merge with Distribute, Jason R. Coombs is the + maintainer of setuptools. The project is maintained in coordination with + the Python Packaging Authority (PyPA) and the larger Python community. + +.. _files: + +======= +CHANGES +======= + +----- +5.5.1 +----- + +* `Issue #239 <https://bitbucket.org/pypa/setuptools/issue/239>`_: Fix typo in 5.5 such that fix did not take. + +--- +5.5 +--- + +* `Issue #239 <https://bitbucket.org/pypa/setuptools/issue/239>`_: Setuptools now includes the setup_requires directive on + Distribution objects and validates the syntax just like install_requires + and tests_require directives. + +----- +5.4.2 +----- + +* `Issue #236 <https://bitbucket.org/pypa/setuptools/issue/236>`_: Corrected regression in execfile implementation for Python 2.6. + +----- +5.4.1 +----- + +* `Python #7776 <http://bugs.python.org/issue7776>`_: (ssl_support) Correct usage of host for validation when + tunneling for HTTPS. + +--- +5.4 +--- + +* `Issue #154 <https://bitbucket.org/pypa/setuptools/issue/154>`_: ``pkg_resources`` will now cache the zip manifests rather than + re-processing the same file from disk multiple times, but only if the + environment variable ``PKG_RESOURCES_CACHE_ZIP_MANIFESTS`` is set. Clients + that package many modules in the same zip file will see some improvement + in startup time by enabling this feature. This feature is not enabled by + default because it causes a substantial increase in memory usage. + +--- +5.3 +--- + +* `Issue #185 <https://bitbucket.org/pypa/setuptools/issue/185>`_: Make svn tagging work on the new style SVN metadata. + Thanks cazabon! +* Prune revision control directories (e.g .svn) from base path + as well as sub-directories. + +--- +5.2 +--- + +* Added a `Developer Guide + <https://pythonhosted.org/setuptools/developer-guide.html>`_ to the official + documentation. +* Some code refactoring and cleanup was done with no intended behavioral + changes. +* During install_egg_info, the generated lines for namespace package .pth + files are now processed even during a dry run. + +--- +5.1 +--- + +* `Issue #202 <https://bitbucket.org/pypa/setuptools/issue/202>`_: Implemented more robust cache invalidation for the ZipImporter, + building on the work in `Issue #168 <https://bitbucket.org/pypa/setuptools/issue/168>`_. Special thanks to Jurko Gospodnetic and + PJE. + +----- +5.0.2 +----- + +* `Issue #220 <https://bitbucket.org/pypa/setuptools/issue/220>`_: Restored script templates. + +----- +5.0.1 +----- + +* Renamed script templates to end with .tmpl now that they no longer need + to be processed by 2to3. Fixes spurious syntax errors during build/install. + +--- +5.0 +--- + +* `Issue #218 <https://bitbucket.org/pypa/setuptools/issue/218>`_: Re-release of 3.8.1 to signal that it supersedes 4.x. +* Incidentally, script templates were updated not to include the triple-quote + escaping. + +------------------------- +3.7.1 and 3.8.1 and 4.0.1 +------------------------- + +* `Issue #213 <https://bitbucket.org/pypa/setuptools/issue/213>`_: Use legacy StringIO behavior for compatibility under pbr. +* `Issue #218 <https://bitbucket.org/pypa/setuptools/issue/218>`_: Setuptools 3.8.1 superseded 4.0.1, and 4.x was removed + from the available versions to install. + +--- +4.0 +--- + +* `Issue #210 <https://bitbucket.org/pypa/setuptools/issue/210>`_: ``setup.py develop`` now copies scripts in binary mode rather + than text mode, matching the behavior of the ``install`` command. + +--- +3.8 +--- + +* Extend `Issue #197 <https://bitbucket.org/pypa/setuptools/issue/197>`_ workaround to include all Python 3 versions prior to + 3.2.2. + +--- +3.7 +--- + +* `Issue #193 <https://bitbucket.org/pypa/setuptools/issue/193>`_: Improved handling of Unicode filenames when building manifests. + +--- +3.6 +--- + +* `Issue #203 <https://bitbucket.org/pypa/setuptools/issue/203>`_: Honor proxy settings for Powershell downloader in the bootstrap + routine. + +----- +3.5.2 +----- + +* `Issue #168 <https://bitbucket.org/pypa/setuptools/issue/168>`_: More robust handling of replaced zip files and stale caches. + Fixes ZipImportError complaining about a 'bad local header'. + +----- +3.5.1 +----- + +* `Issue #199 <https://bitbucket.org/pypa/setuptools/issue/199>`_: Restored ``install._install`` for compatibility with earlier + NumPy versions. + +--- +3.5 +--- + +* `Issue #195 <https://bitbucket.org/pypa/setuptools/issue/195>`_: Follow symbolic links in find_packages (restoring behavior + broken in 3.4). +* `Issue #197 <https://bitbucket.org/pypa/setuptools/issue/197>`_: On Python 3.1, PKG-INFO is now saved in a UTF-8 encoding instead + of ``sys.getpreferredencoding`` to match the behavior on Python 2.6-3.4. +* `Issue #192 <https://bitbucket.org/pypa/setuptools/issue/192>`_: Preferred bootstrap location is now + https://bootstrap.pypa.io/ez_setup.py (mirrored from former location). + +----- +3.4.4 +----- + +* `Issue #184 <https://bitbucket.org/pypa/setuptools/issue/184>`_: Correct failure where find_package over-matched packages + when directory traversal isn't short-circuited. + +----- +3.4.3 +----- + +* `Issue #183 <https://bitbucket.org/pypa/setuptools/issue/183>`_: Really fix test command with Python 3.1. + +----- +3.4.2 +----- + +* `Issue #183 <https://bitbucket.org/pypa/setuptools/issue/183>`_: Fix additional regression in test command on Python 3.1. + +----- +3.4.1 +----- + +* `Issue #180 <https://bitbucket.org/pypa/setuptools/issue/180>`_: Fix regression in test command not caught by py.test-run tests. + +--- +3.4 +--- + +* `Issue #176 <https://bitbucket.org/pypa/setuptools/issue/176>`_: Add parameter to the test command to support a custom test + runner: --test-runner or -r. +* `Issue #177 <https://bitbucket.org/pypa/setuptools/issue/177>`_: Now assume most common invocation to install command on + platforms/environments without stack support (issuing a warning). Setuptools + now installs naturally on IronPython. Behavior on CPython should be + unchanged. + +--- +3.3 +--- + +* Add ``include`` parameter to ``setuptools.find_packages()``. + +--- +3.2 +--- + +* `Pull Request #39 <https://bitbucket.org/pypa/setuptools/pull-request/39>`_: Add support for C++ targets from Cython ``.pyx`` files. +* `Issue #162 <https://bitbucket.org/pypa/setuptools/issue/162>`_: Update dependency on certifi to 1.0.1. +* `Issue #164 <https://bitbucket.org/pypa/setuptools/issue/164>`_: Update dependency on wincertstore to 0.2. + +--- +3.1 +--- + +* `Issue #161 <https://bitbucket.org/pypa/setuptools/issue/161>`_: Restore Features functionality to allow backward compatibility + (for Features) until the uses of that functionality is sufficiently removed. + +----- +3.0.2 +----- + +* Correct typo in previous bugfix. + +----- +3.0.1 +----- + +* `Issue #157 <https://bitbucket.org/pypa/setuptools/issue/157>`_: Restore support for Python 2.6 in bootstrap script where + ``zipfile.ZipFile`` does not yet have support for context managers. + +--- +3.0 +--- + +* `Issue #125 <https://bitbucket.org/pypa/setuptools/issue/125>`_: Prevent Subversion support from creating a ~/.subversion + directory just for checking the presence of a Subversion repository. +* `Issue #12 <https://bitbucket.org/pypa/setuptools/issue/12>`_: Namespace packages are now imported lazily. That is, the mere + declaration of a namespace package in an egg on ``sys.path`` no longer + causes it to be imported when ``pkg_resources`` is imported. Note that this + change means that all of a namespace package's ``__init__.py`` files must + include a ``declare_namespace()`` call in order to ensure that they will be + handled properly at runtime. In 2.x it was possible to get away without + including the declaration, but only at the cost of forcing namespace + packages to be imported early, which 3.0 no longer does. +* `Issue #148 <https://bitbucket.org/pypa/setuptools/issue/148>`_: When building (bdist_egg), setuptools no longer adds + ``__init__.py`` files to namespace packages. Any packages that rely on this + behavior will need to create ``__init__.py`` files and include the + ``declare_namespace()``. +* `Issue #7 <https://bitbucket.org/pypa/setuptools/issue/7>`_: Setuptools itself is now distributed as a zip archive in addition to + tar archive. ez_setup.py now uses zip archive. This approach avoids the potential + security vulnerabilities presented by use of tar archives in ez_setup.py. + It also leverages the security features added to ZipFile.extract in Python 2.7.4. +* `Issue #65 <https://bitbucket.org/pypa/setuptools/issue/65>`_: Removed deprecated Features functionality. +* `Pull Request #28 <https://bitbucket.org/pypa/setuptools/pull-request/28>`_: Remove backport of ``_bytecode_filenames`` which is + available in Python 2.6 and later, but also has better compatibility with + Python 3 environments. +* `Issue #156 <https://bitbucket.org/pypa/setuptools/issue/156>`_: Fix spelling of __PYVENV_LAUNCHER__ variable. + +--- +2.2 +--- + +* `Issue #141 <https://bitbucket.org/pypa/setuptools/issue/141>`_: Restored fix for allowing setup_requires dependencies to + override installed dependencies during setup. +* `Issue #128 <https://bitbucket.org/pypa/setuptools/issue/128>`_: Fixed issue where only the first dependency link was honored + in a distribution where multiple dependency links were supplied. + +----- +2.1.2 +----- + +* `Issue #144 <https://bitbucket.org/pypa/setuptools/issue/144>`_: Read long_description using codecs module to avoid errors + installing on systems where LANG=C. + +----- +2.1.1 +----- + +* `Issue #139 <https://bitbucket.org/pypa/setuptools/issue/139>`_: Fix regression in re_finder for CVS repos (and maybe Git repos + as well). + +--- +2.1 +--- + +* `Issue #129 <https://bitbucket.org/pypa/setuptools/issue/129>`_: Suppress inspection of ``*.whl`` files when searching for files + in a zip-imported file. +* `Issue #131 <https://bitbucket.org/pypa/setuptools/issue/131>`_: Fix RuntimeError when constructing an egg fetcher. + +----- +2.0.2 +----- + +* Fix NameError during installation with Python implementations (e.g. Jython) + not containing parser module. +* Fix NameError in ``sdist:re_finder``. + +----- +2.0.1 +----- + +* `Issue #124 <https://bitbucket.org/pypa/setuptools/issue/124>`_: Fixed error in list detection in upload_docs. + +--- +2.0 +--- + +* `Issue #121 <https://bitbucket.org/pypa/setuptools/issue/121>`_: Exempt lib2to3 pickled grammars from DirectorySandbox. +* `Issue #41 <https://bitbucket.org/pypa/setuptools/issue/41>`_: Dropped support for Python 2.4 and Python 2.5. Clients requiring + setuptools for those versions of Python should use setuptools 1.x. +* Removed ``setuptools.command.easy_install.HAS_USER_SITE``. Clients + expecting this boolean variable should use ``site.ENABLE_USER_SITE`` + instead. +* Removed ``pkg_resources.ImpWrapper``. Clients that expected this class + should use ``pkgutil.ImpImporter`` instead. + +----- +1.4.2 +----- + +* `Issue #116 <https://bitbucket.org/pypa/setuptools/issue/116>`_: Correct TypeError when reading a local package index on Python + 3. + +----- +1.4.1 +----- + +* `Issue #114 <https://bitbucket.org/pypa/setuptools/issue/114>`_: Use ``sys.getfilesystemencoding`` for decoding config in + ``bdist_wininst`` distributions. + +* `Issue #105 <https://bitbucket.org/pypa/setuptools/issue/105>`_ and `Issue #113 <https://bitbucket.org/pypa/setuptools/issue/113>`_: Establish a more robust technique for + determining the terminal encoding:: + + 1. Try ``getpreferredencoding`` + 2. If that returns US_ASCII or None, try the encoding from + ``getdefaultlocale``. If that encoding was a "fallback" because Python + could not figure it out from the environment or OS, encoding remains + unresolved. + 3. If the encoding is resolved, then make sure Python actually implements + the encoding. + 4. On the event of an error or unknown codec, revert to fallbacks + (UTF-8 on Darwin, ASCII on everything else). + 5. On the encoding is 'mac-roman' on Darwin, use UTF-8 as 'mac-roman' was + a bug on older Python releases. + + On a side note, it would seem that the encoding only matters for when SVN + does not yet support ``--xml`` and when getting repository and svn version + numbers. The ``--xml`` technique should yield UTF-8 according to some + messages on the SVN mailing lists. So if the version numbers are always + 7-bit ASCII clean, it may be best to only support the file parsing methods + for legacy SVN releases and support for SVN without the subprocess command + would simple go away as support for the older SVNs does. + +--- +1.4 +--- + +* `Issue #27 <https://bitbucket.org/pypa/setuptools/issue/27>`_: ``easy_install`` will now use credentials from .pypirc if + present for connecting to the package index. +* `Pull Request #21 <https://bitbucket.org/pypa/setuptools/pull-request/21>`_: Omit unwanted newlines in ``package_index._encode_auth`` + when the username/password pair length indicates wrapping. + +----- +1.3.2 +----- + +* `Issue #99 <https://bitbucket.org/pypa/setuptools/issue/99>`_: Fix filename encoding issues in SVN support. + +----- +1.3.1 +----- + +* Remove exuberant warning in SVN support when SVN is not used. + +--- +1.3 +--- + +* Address security vulnerability in SSL match_hostname check as reported in + `Python #17997 <http://bugs.python.org/issue17997>`_. +* Prefer `backports.ssl_match_hostname + <https://pypi.python.org/pypi/backports.ssl_match_hostname>`_ for backport + implementation if present. +* Correct NameError in ``ssl_support`` module (``socket.error``). + +--- +1.2 +--- + +* `Issue #26 <https://bitbucket.org/pypa/setuptools/issue/26>`_: Add support for SVN 1.7. Special thanks to Philip Thiem for the + contribution. +* `Issue #93 <https://bitbucket.org/pypa/setuptools/issue/93>`_: Wheels are now distributed with every release. Note that as + reported in `Issue #108 <https://bitbucket.org/pypa/setuptools/issue/108>`_, as of Pip 1.4, scripts aren't installed properly + from wheels. Therefore, if using Pip to install setuptools from a wheel, + the ``easy_install`` command will not be available. +* Setuptools "natural" launcher support, introduced in 1.0, is now officially + supported. + +----- +1.1.7 +----- + +* Fixed behavior of NameError handling in 'script template (dev).py' (script + launcher for 'develop' installs). +* ``ez_setup.py`` now ensures partial downloads are cleaned up following + a failed download. +* `Distribute #363 <https://bitbucket.org/tarek/distribute/issue/363>`_ and `Issue #55 <https://bitbucket.org/pypa/setuptools/issue/55>`_: Skip an sdist test that fails on locales + other than UTF-8. + +----- +1.1.6 +----- + +* `Distribute #349 <https://bitbucket.org/tarek/distribute/issue/349>`_: ``sandbox.execfile`` now opens the target file in binary + mode, thus honoring a BOM in the file when compiled. + +----- +1.1.5 +----- + +* `Issue #69 <https://bitbucket.org/pypa/setuptools/issue/69>`_: Second attempt at fix (logic was reversed). + +----- +1.1.4 +----- + +* `Issue #77 <https://bitbucket.org/pypa/setuptools/issue/77>`_: Fix error in upload command (Python 2.4). + +----- +1.1.3 +----- + +* Fix NameError in previous patch. + +----- +1.1.2 +----- + +* `Issue #69 <https://bitbucket.org/pypa/setuptools/issue/69>`_: Correct issue where 404 errors are returned for URLs with + fragments in them (such as #egg=). + +----- +1.1.1 +----- + +* `Issue #75 <https://bitbucket.org/pypa/setuptools/issue/75>`_: Add ``--insecure`` option to ez_setup.py to accommodate + environments where a trusted SSL connection cannot be validated. +* `Issue #76 <https://bitbucket.org/pypa/setuptools/issue/76>`_: Fix AttributeError in upload command with Python 2.4. + +--- +1.1 +--- + +* `Issue #71 <https://bitbucket.org/pypa/setuptools/issue/71>`_ (`Distribute #333 <https://bitbucket.org/tarek/distribute/issue/333>`_): EasyInstall now puts less emphasis on the + condition when a host is blocked via ``--allow-hosts``. +* `Issue #72 <https://bitbucket.org/pypa/setuptools/issue/72>`_: Restored Python 2.4 compatibility in ``ez_setup.py``. + +--- +1.0 +--- + +* `Issue #60 <https://bitbucket.org/pypa/setuptools/issue/60>`_: On Windows, Setuptools supports deferring to another launcher, + such as Vinay Sajip's `pylauncher <https://bitbucket.org/pypa/pylauncher>`_ + (included with Python 3.3) to launch console and GUI scripts and not install + its own launcher executables. This experimental functionality is currently + only enabled if the ``SETUPTOOLS_LAUNCHER`` environment variable is set to + "natural". In the future, this behavior may become default, but only after + it has matured and seen substantial adoption. The ``SETUPTOOLS_LAUNCHER`` + also accepts "executable" to force the default behavior of creating launcher + executables. +* `Issue #63 <https://bitbucket.org/pypa/setuptools/issue/63>`_: Bootstrap script (ez_setup.py) now prefers Powershell, curl, or + wget for retrieving the Setuptools tarball for improved security of the + install. The script will still fall back to a simple ``urlopen`` on + platforms that do not have these tools. +* `Issue #65 <https://bitbucket.org/pypa/setuptools/issue/65>`_: Deprecated the ``Features`` functionality. +* `Issue #52 <https://bitbucket.org/pypa/setuptools/issue/52>`_: In ``VerifyingHTTPSConn``, handle a tunnelled (proxied) + connection. + +Backward-Incompatible Changes +============================= + +This release includes a couple of backward-incompatible changes, but most if +not all users will find 1.0 a drop-in replacement for 0.9. + +* `Issue #50 <https://bitbucket.org/pypa/setuptools/issue/50>`_: Normalized API of environment marker support. Specifically, + removed line number and filename from SyntaxErrors when returned from + `pkg_resources.invalid_marker`. Any clients depending on the specific + string representation of exceptions returned by that function may need to + be updated to account for this change. +* `Issue #50 <https://bitbucket.org/pypa/setuptools/issue/50>`_: SyntaxErrors generated by `pkg_resources.invalid_marker` are + normalized for cross-implementation consistency. +* Removed ``--ignore-conflicts-at-my-risk`` and ``--delete-conflicting`` + options to easy_install. These options have been deprecated since 0.6a11. + +----- +0.9.8 +----- + +* `Issue #53 <https://bitbucket.org/pypa/setuptools/issue/53>`_: Fix NameErrors in `_vcs_split_rev_from_url`. + +----- +0.9.7 +----- + +* `Issue #49 <https://bitbucket.org/pypa/setuptools/issue/49>`_: Correct AttributeError on PyPy where a hashlib.HASH object does + not have a `.name` attribute. +* `Issue #34 <https://bitbucket.org/pypa/setuptools/issue/34>`_: Documentation now refers to bootstrap script in code repository + referenced by bookmark. +* Add underscore-separated keys to environment markers (markerlib). + +----- +0.9.6 +----- + +* `Issue #44 <https://bitbucket.org/pypa/setuptools/issue/44>`_: Test failure on Python 2.4 when MD5 hash doesn't have a `.name` + attribute. + +----- +0.9.5 +----- + +* `Python #17980 <http://bugs.python.org/issue17980>`_: Fix security vulnerability in SSL certificate validation. + +----- +0.9.4 +----- + +* `Issue #43 <https://bitbucket.org/pypa/setuptools/issue/43>`_: Fix issue (introduced in 0.9.1) with version resolution when + upgrading over other releases of Setuptools. + +----- +0.9.3 +----- + +* `Issue #42 <https://bitbucket.org/pypa/setuptools/issue/42>`_: Fix new ``AttributeError`` introduced in last fix. + +----- +0.9.2 +----- + +* `Issue #42 <https://bitbucket.org/pypa/setuptools/issue/42>`_: Fix regression where blank checksums would trigger an + ``AttributeError``. + +----- +0.9.1 +----- + +* `Distribute #386 <https://bitbucket.org/tarek/distribute/issue/386>`_: Allow other positional and keyword arguments to os.open. +* Corrected dependency on certifi mis-referenced in 0.9. + +--- +0.9 +--- + +* `package_index` now validates hashes other than MD5 in download links. + +--- +0.8 +--- + +* Code base now runs on Python 2.4 - Python 3.3 without Python 2to3 + conversion. + +----- +0.7.8 +----- + +* `Distribute #375 <https://bitbucket.org/tarek/distribute/issue/375>`_: Yet another fix for yet another regression. + +----- +0.7.7 +----- + +* `Distribute #375 <https://bitbucket.org/tarek/distribute/issue/375>`_: Repair AttributeError created in last release (redo). +* `Issue #30 <https://bitbucket.org/pypa/setuptools/issue/30>`_: Added test for get_cache_path. + +----- +0.7.6 +----- + +* `Distribute #375 <https://bitbucket.org/tarek/distribute/issue/375>`_: Repair AttributeError created in last release. + +----- +0.7.5 +----- + +* `Issue #21 <https://bitbucket.org/pypa/setuptools/issue/21>`_: Restore Python 2.4 compatibility in ``test_easy_install``. +* `Distribute #375 <https://bitbucket.org/tarek/distribute/issue/375>`_: Merged additional warning from Distribute 0.6.46. +* Now honor the environment variable + ``SETUPTOOLS_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT`` in addition to the now + deprecated ``DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT``. + +----- +0.7.4 +----- + +* `Issue #20 <https://bitbucket.org/pypa/setuptools/issue/20>`_: Fix comparison of parsed SVN version on Python 3. + +----- +0.7.3 +----- + +* `Issue #1 <https://bitbucket.org/pypa/setuptools/issue/1>`_: Disable installation of Windows-specific files on non-Windows systems. +* Use new sysconfig module with Python 2.7 or >=3.2. + +----- +0.7.2 +----- + +* `Issue #14 <https://bitbucket.org/pypa/setuptools/issue/14>`_: Use markerlib when the `parser` module is not available. +* `Issue #10 <https://bitbucket.org/pypa/setuptools/issue/10>`_: ``ez_setup.py`` now uses HTTPS to download setuptools from PyPI. + +----- +0.7.1 +----- + +* Fix NameError (`Issue #3 <https://bitbucket.org/pypa/setuptools/issue/3>`_) again - broken in bad merge. + +--- +0.7 +--- + +* Merged Setuptools and Distribute. See docs/merge.txt for details. + +Added several features that were slated for setuptools 0.6c12: + +* Index URL now defaults to HTTPS. +* Added experimental environment marker support. Now clients may designate a + PEP-426 environment marker for "extra" dependencies. Setuptools uses this + feature in ``setup.py`` for optional SSL and certificate validation support + on older platforms. Based on Distutils-SIG discussions, the syntax is + somewhat tentative. There should probably be a PEP with a firmer spec before + the feature should be considered suitable for use. +* Added support for SSL certificate validation when installing packages from + an HTTPS service. + +----- +0.7b4 +----- + +* `Issue #3 <https://bitbucket.org/pypa/setuptools/issue/3>`_: Fixed NameError in SSL support. + +------ +0.6.49 +------ + +* Move warning check in ``get_cache_path`` to follow the directory creation + to avoid errors when the cache path does not yet exist. Fixes the error + reported in `Distribute #375 <https://bitbucket.org/tarek/distribute/issue/375>`_. + +------ +0.6.48 +------ + +* Correct AttributeError in ``ResourceManager.get_cache_path`` introduced in + 0.6.46 (redo). + +------ +0.6.47 +------ + +* Correct AttributeError in ``ResourceManager.get_cache_path`` introduced in + 0.6.46. + +------ +0.6.46 +------ + +* `Distribute #375 <https://bitbucket.org/tarek/distribute/issue/375>`_: Issue a warning if the PYTHON_EGG_CACHE or otherwise + customized egg cache location specifies a directory that's group- or + world-writable. + +------ +0.6.45 +------ + +* `Distribute #379 <https://bitbucket.org/tarek/distribute/issue/379>`_: ``distribute_setup.py`` now traps VersionConflict as well, + restoring ability to upgrade from an older setuptools version. + +------ +0.6.44 +------ + +* ``distribute_setup.py`` has been updated to allow Setuptools 0.7 to + satisfy use_setuptools. + +------ +0.6.43 +------ + +* `Distribute #378 <https://bitbucket.org/tarek/distribute/issue/378>`_: Restore support for Python 2.4 Syntax (regression in 0.6.42). + +------ +0.6.42 +------ + +* External links finder no longer yields duplicate links. +* `Distribute #337 <https://bitbucket.org/tarek/distribute/issue/337>`_: Moved site.py to setuptools/site-patch.py (graft of very old + patch from setuptools trunk which inspired PR `#31 <https://bitbucket.org/pypa/setuptools/issue/31>`_). + +------ +0.6.41 +------ + +* `Distribute #27 <https://bitbucket.org/tarek/distribute/issue/27>`_: Use public api for loading resources from zip files rather than + the private method `_zip_directory_cache`. +* Added a new function ``easy_install.get_win_launcher`` which may be used by + third-party libraries such as buildout to get a suitable script launcher. + +------ +0.6.40 +------ + +* `Distribute #376 <https://bitbucket.org/tarek/distribute/issue/376>`_: brought back cli.exe and gui.exe that were deleted in the + previous release. + +------ +0.6.39 +------ + +* Add support for console launchers on ARM platforms. +* Fix possible issue in GUI launchers where the subsystem was not supplied to + the linker. +* Launcher build script now refactored for robustness. +* `Distribute #375 <https://bitbucket.org/tarek/distribute/issue/375>`_: Resources extracted from a zip egg to the file system now also + check the contents of the file against the zip contents during each + invocation of get_resource_filename. + +------ +0.6.38 +------ + +* `Distribute #371 <https://bitbucket.org/tarek/distribute/issue/371>`_: The launcher manifest file is now installed properly. + +------ +0.6.37 +------ + +* `Distribute #143 <https://bitbucket.org/tarek/distribute/issue/143>`_: Launcher scripts, including easy_install itself, are now + accompanied by a manifest on 32-bit Windows environments to avoid the + Installer Detection Technology and thus undesirable UAC elevation described + in `this Microsoft article + <http://technet.microsoft.com/en-us/library/cc709628%28WS.10%29.aspx>`_. + +------ +0.6.36 +------ + +* `Pull Request #35 <https://bitbucket.org/pypa/setuptools/pull-request/35>`_: In `Buildout #64 <https://github.com/buildout/buildout/issues/64>`_, it was reported that + under Python 3, installation of distutils scripts could attempt to copy + the ``__pycache__`` directory as a file, causing an error, apparently only + under Windows. Easy_install now skips all directories when processing + metadata scripts. + +------ +0.6.35 +------ + + +Note this release is backward-incompatible with distribute 0.6.23-0.6.34 in +how it parses version numbers. + +* `Distribute #278 <https://bitbucket.org/tarek/distribute/issue/278>`_: Restored compatibility with distribute 0.6.22 and setuptools + 0.6. Updated the documentation to match more closely with the version + parsing as intended in setuptools 0.6. + +------ +0.6.34 +------ + +* `Distribute #341 <https://bitbucket.org/tarek/distribute/issue/341>`_: 0.6.33 fails to build under Python 2.4. + +------ +0.6.33 +------ + +* Fix 2 errors with Jython 2.5. +* Fix 1 failure with Jython 2.5 and 2.7. +* Disable workaround for Jython scripts on Linux systems. +* `Distribute #336 <https://bitbucket.org/tarek/distribute/issue/336>`_: `setup.py` no longer masks failure exit code when tests fail. +* Fix issue in pkg_resources where try/except around a platform-dependent + import would trigger hook load failures on Mercurial. See pull request 32 + for details. +* `Distribute #341 <https://bitbucket.org/tarek/distribute/issue/341>`_: Fix a ResourceWarning. + +------ +0.6.32 +------ + +* Fix test suite with Python 2.6. +* Fix some DeprecationWarnings and ResourceWarnings. +* `Distribute #335 <https://bitbucket.org/tarek/distribute/issue/335>`_: Backed out `setup_requires` superceding installed requirements + until regression can be addressed. + +------ +0.6.31 +------ + +* `Distribute #303 <https://bitbucket.org/tarek/distribute/issue/303>`_: Make sure the manifest only ever contains UTF-8 in Python 3. +* `Distribute #329 <https://bitbucket.org/tarek/distribute/issue/329>`_: Properly close files created by tests for compatibility with + Jython. +* Work around `Jython #1980 <http://bugs.jython.org/issue1980>`_ and `Jython #1981 <http://bugs.jython.org/issue1981>`_. +* `Distribute #334 <https://bitbucket.org/tarek/distribute/issue/334>`_: Provide workaround for packages that reference `sys.__stdout__` + such as numpy does. This change should address + `virtualenv `#359 <https://bitbucket.org/pypa/setuptools/issue/359>`_ <https://github.com/pypa/virtualenv/issues/359>`_ as long + as the system encoding is UTF-8 or the IO encoding is specified in the + environment, i.e.:: + + PYTHONIOENCODING=utf8 pip install numpy + +* Fix for encoding issue when installing from Windows executable on Python 3. +* `Distribute #323 <https://bitbucket.org/tarek/distribute/issue/323>`_: Allow `setup_requires` requirements to supercede installed + requirements. Added some new keyword arguments to existing pkg_resources + methods. Also had to updated how __path__ is handled for namespace packages + to ensure that when a new egg distribution containing a namespace package is + placed on sys.path, the entries in __path__ are found in the same order they + would have been in had that egg been on the path when pkg_resources was + first imported. + +------ +0.6.30 +------ + +* `Distribute #328 <https://bitbucket.org/tarek/distribute/issue/328>`_: Clean up temporary directories in distribute_setup.py. +* Fix fatal bug in distribute_setup.py. + +------ +0.6.29 +------ + +* `Pull Request #14 <https://bitbucket.org/pypa/setuptools/pull-request/14>`_: Honor file permissions in zip files. +* `Distribute #327 <https://bitbucket.org/tarek/distribute/issue/327>`_: Merged pull request `#24 <https://bitbucket.org/pypa/setuptools/issue/24>`_ to fix a dependency problem with pip. +* Merged pull request `#23 <https://bitbucket.org/pypa/setuptools/issue/23>`_ to fix https://github.com/pypa/virtualenv/issues/301. +* If Sphinx is installed, the `upload_docs` command now runs `build_sphinx` + to produce uploadable documentation. +* `Distribute #326 <https://bitbucket.org/tarek/distribute/issue/326>`_: `upload_docs` provided mangled auth credentials under Python 3. +* `Distribute #320 <https://bitbucket.org/tarek/distribute/issue/320>`_: Fix check for "createable" in distribute_setup.py. +* `Distribute #305 <https://bitbucket.org/tarek/distribute/issue/305>`_: Remove a warning that was triggered during normal operations. +* `Distribute #311 <https://bitbucket.org/tarek/distribute/issue/311>`_: Print metadata in UTF-8 independent of platform. +* `Distribute #303 <https://bitbucket.org/tarek/distribute/issue/303>`_: Read manifest file with UTF-8 encoding under Python 3. +* `Distribute #301 <https://bitbucket.org/tarek/distribute/issue/301>`_: Allow to run tests of namespace packages when using 2to3. +* `Distribute #304 <https://bitbucket.org/tarek/distribute/issue/304>`_: Prevent import loop in site.py under Python 3.3. +* `Distribute #283 <https://bitbucket.org/tarek/distribute/issue/283>`_: Reenable scanning of `*.pyc` / `*.pyo` files on Python 3.3. +* `Distribute #299 <https://bitbucket.org/tarek/distribute/issue/299>`_: The develop command didn't work on Python 3, when using 2to3, + as the egg link would go to the Python 2 source. Linking to the 2to3'd code + in build/lib makes it work, although you will have to rebuild the module + before testing it. +* `Distribute #306 <https://bitbucket.org/tarek/distribute/issue/306>`_: Even if 2to3 is used, we build in-place under Python 2. +* `Distribute #307 <https://bitbucket.org/tarek/distribute/issue/307>`_: Prints the full path when .svn/entries is broken. +* `Distribute #313 <https://bitbucket.org/tarek/distribute/issue/313>`_: Support for sdist subcommands (Python 2.7) +* `Distribute #314 <https://bitbucket.org/tarek/distribute/issue/314>`_: test_local_index() would fail an OS X. +* `Distribute #310 <https://bitbucket.org/tarek/distribute/issue/310>`_: Non-ascii characters in a namespace __init__.py causes errors. +* `Distribute #218 <https://bitbucket.org/tarek/distribute/issue/218>`_: Improved documentation on behavior of `package_data` and + `include_package_data`. Files indicated by `package_data` are now included + in the manifest. +* `distribute_setup.py` now allows a `--download-base` argument for retrieving + distribute from a specified location. + +------ +0.6.28 +------ + +* `Distribute #294 <https://bitbucket.org/tarek/distribute/issue/294>`_: setup.py can now be invoked from any directory. +* Scripts are now installed honoring the umask. +* Added support for .dist-info directories. +* `Distribute #283 <https://bitbucket.org/tarek/distribute/issue/283>`_: Fix and disable scanning of `*.pyc` / `*.pyo` files on + Python 3.3. + +------ +0.6.27 +------ + +* Support current snapshots of CPython 3.3. +* Distribute now recognizes README.rst as a standard, default readme file. +* Exclude 'encodings' modules when removing modules from sys.modules. + Workaround for `#285 <https://bitbucket.org/pypa/setuptools/issue/285>`_. +* `Distribute #231 <https://bitbucket.org/tarek/distribute/issue/231>`_: Don't fiddle with system python when used with buildout + (bootstrap.py) + +------ +0.6.26 +------ + +* `Distribute #183 <https://bitbucket.org/tarek/distribute/issue/183>`_: Symlinked files are now extracted from source distributions. +* `Distribute #227 <https://bitbucket.org/tarek/distribute/issue/227>`_: Easy_install fetch parameters are now passed during the + installation of a source distribution; now fulfillment of setup_requires + dependencies will honor the parameters passed to easy_install. + +------ +0.6.25 +------ + +* `Distribute #258 <https://bitbucket.org/tarek/distribute/issue/258>`_: Workaround a cache issue +* `Distribute #260 <https://bitbucket.org/tarek/distribute/issue/260>`_: distribute_setup.py now accepts the --user parameter for + Python 2.6 and later. +* `Distribute #262 <https://bitbucket.org/tarek/distribute/issue/262>`_: package_index.open_with_auth no longer throws LookupError + on Python 3. +* `Distribute #269 <https://bitbucket.org/tarek/distribute/issue/269>`_: AttributeError when an exception occurs reading Manifest.in + on late releases of Python. +* `Distribute #272 <https://bitbucket.org/tarek/distribute/issue/272>`_: Prevent TypeError when namespace package names are unicode + and single-install-externally-managed is used. Also fixes PIP issue + 449. +* `Distribute #273 <https://bitbucket.org/tarek/distribute/issue/273>`_: Legacy script launchers now install with Python2/3 support. + +------ +0.6.24 +------ + +* `Distribute #249 <https://bitbucket.org/tarek/distribute/issue/249>`_: Added options to exclude 2to3 fixers + +------ +0.6.23 +------ + +* `Distribute #244 <https://bitbucket.org/tarek/distribute/issue/244>`_: Fixed a test +* `Distribute #243 <https://bitbucket.org/tarek/distribute/issue/243>`_: Fixed a test +* `Distribute #239 <https://bitbucket.org/tarek/distribute/issue/239>`_: Fixed a test +* `Distribute #240 <https://bitbucket.org/tarek/distribute/issue/240>`_: Fixed a test +* `Distribute #241 <https://bitbucket.org/tarek/distribute/issue/241>`_: Fixed a test +* `Distribute #237 <https://bitbucket.org/tarek/distribute/issue/237>`_: Fixed a test +* `Distribute #238 <https://bitbucket.org/tarek/distribute/issue/238>`_: easy_install now uses 64bit executable wrappers on 64bit Python +* `Distribute #208 <https://bitbucket.org/tarek/distribute/issue/208>`_: Fixed parsed_versions, it now honors post-releases as noted in the documentation +* `Distribute #207 <https://bitbucket.org/tarek/distribute/issue/207>`_: Windows cli and gui wrappers pass CTRL-C to child python process +* `Distribute #227 <https://bitbucket.org/tarek/distribute/issue/227>`_: easy_install now passes its arguments to setup.py bdist_egg +* `Distribute #225 <https://bitbucket.org/tarek/distribute/issue/225>`_: Fixed a NameError on Python 2.5, 2.4 + +------ +0.6.21 +------ + +* `Distribute #225 <https://bitbucket.org/tarek/distribute/issue/225>`_: FIxed a regression on py2.4 + +------ +0.6.20 +------ + +* `Distribute #135 <https://bitbucket.org/tarek/distribute/issue/135>`_: Include url in warning when processing URLs in package_index. +* `Distribute #212 <https://bitbucket.org/tarek/distribute/issue/212>`_: Fix issue where easy_instal fails on Python 3 on windows installer. +* `Distribute #213 <https://bitbucket.org/tarek/distribute/issue/213>`_: Fix typo in documentation. + +------ +0.6.19 +------ + +* `Distribute #206 <https://bitbucket.org/tarek/distribute/issue/206>`_: AttributeError: 'HTTPMessage' object has no attribute 'getheaders' + +------ +0.6.18 +------ + +* `Distribute #210 <https://bitbucket.org/tarek/distribute/issue/210>`_: Fixed a regression introduced by `Distribute #204 <https://bitbucket.org/tarek/distribute/issue/204>`_ fix. + +------ +0.6.17 +------ + +* Support 'DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT' environment + variable to allow to disable installation of easy_install-${version} script. +* Support Python >=3.1.4 and >=3.2.1. +* `Distribute #204 <https://bitbucket.org/tarek/distribute/issue/204>`_: Don't try to import the parent of a namespace package in + declare_namespace +* `Distribute #196 <https://bitbucket.org/tarek/distribute/issue/196>`_: Tolerate responses with multiple Content-Length headers +* `Distribute #205 <https://bitbucket.org/tarek/distribute/issue/205>`_: Sandboxing doesn't preserve working_set. Leads to setup_requires + problems. + +------ +0.6.16 +------ + +* Builds sdist gztar even on Windows (avoiding `Distribute #193 <https://bitbucket.org/tarek/distribute/issue/193>`_). +* `Distribute #192 <https://bitbucket.org/tarek/distribute/issue/192>`_: Fixed metadata omitted on Windows when package_dir + specified with forward-slash. +* `Distribute #195 <https://bitbucket.org/tarek/distribute/issue/195>`_: Cython build support. +* `Distribute #200 <https://bitbucket.org/tarek/distribute/issue/200>`_: Issues with recognizing 64-bit packages on Windows. + +------ +0.6.15 +------ + +* Fixed typo in bdist_egg +* Several issues under Python 3 has been solved. +* `Distribute #146 <https://bitbucket.org/tarek/distribute/issue/146>`_: Fixed missing DLL files after easy_install of windows exe package. + +------ +0.6.14 +------ + +* `Distribute #170 <https://bitbucket.org/tarek/distribute/issue/170>`_: Fixed unittest failure. Thanks to Toshio. +* `Distribute #171 <https://bitbucket.org/tarek/distribute/issue/171>`_: Fixed race condition in unittests cause deadlocks in test suite. +* `Distribute #143 <https://bitbucket.org/tarek/distribute/issue/143>`_: Fixed a lookup issue with easy_install. + Thanks to David and Zooko. +* `Distribute #174 <https://bitbucket.org/tarek/distribute/issue/174>`_: Fixed the edit mode when its used with setuptools itself + +------ +0.6.13 +------ + +* `Distribute #160 <https://bitbucket.org/tarek/distribute/issue/160>`_: 2.7 gives ValueError("Invalid IPv6 URL") +* `Distribute #150 <https://bitbucket.org/tarek/distribute/issue/150>`_: Fixed using ~/.local even in a --no-site-packages virtualenv +* `Distribute #163 <https://bitbucket.org/tarek/distribute/issue/163>`_: scan index links before external links, and don't use the md5 when + comparing two distributions + +------ +0.6.12 +------ + +* `Distribute #149 <https://bitbucket.org/tarek/distribute/issue/149>`_: Fixed various failures on 2.3/2.4 + +------ +0.6.11 +------ + +* Found another case of SandboxViolation - fixed +* `Distribute #15 <https://bitbucket.org/tarek/distribute/issue/15>`_ and `Distribute #48 <https://bitbucket.org/tarek/distribute/issue/48>`_: Introduced a socket timeout of 15 seconds on url openings +* Added indexsidebar.html into MANIFEST.in +* `Distribute #108 <https://bitbucket.org/tarek/distribute/issue/108>`_: Fixed TypeError with Python3.1 +* `Distribute #121 <https://bitbucket.org/tarek/distribute/issue/121>`_: Fixed --help install command trying to actually install. +* `Distribute #112 <https://bitbucket.org/tarek/distribute/issue/112>`_: Added an os.makedirs so that Tarek's solution will work. +* `Distribute #133 <https://bitbucket.org/tarek/distribute/issue/133>`_: Added --no-find-links to easy_install +* Added easy_install --user +* `Distribute #100 <https://bitbucket.org/tarek/distribute/issue/100>`_: Fixed develop --user not taking '.' in PYTHONPATH into account +* `Distribute #134 <https://bitbucket.org/tarek/distribute/issue/134>`_: removed spurious UserWarnings. Patch by VanLindberg +* `Distribute #138 <https://bitbucket.org/tarek/distribute/issue/138>`_: cant_write_to_target error when setup_requires is used. +* `Distribute #147 <https://bitbucket.org/tarek/distribute/issue/147>`_: respect the sys.dont_write_bytecode flag + +------ +0.6.10 +------ + +* Reverted change made for the DistributionNotFound exception because + zc.buildout uses the exception message to get the name of the + distribution. + +----- +0.6.9 +----- + +* `Distribute #90 <https://bitbucket.org/tarek/distribute/issue/90>`_: unknown setuptools version can be added in the working set +* `Distribute #87 <https://bitbucket.org/tarek/distribute/issue/87>`_: setupt.py doesn't try to convert distribute_setup.py anymore + Initial Patch by arfrever. +* `Distribute #89 <https://bitbucket.org/tarek/distribute/issue/89>`_: added a side bar with a download link to the doc. +* `Distribute #86 <https://bitbucket.org/tarek/distribute/issue/86>`_: fixed missing sentence in pkg_resources doc. +* Added a nicer error message when a DistributionNotFound is raised. +* `Distribute #80 <https://bitbucket.org/tarek/distribute/issue/80>`_: test_develop now works with Python 3.1 +* `Distribute #93 <https://bitbucket.org/tarek/distribute/issue/93>`_: upload_docs now works if there is an empty sub-directory. +* `Distribute #70 <https://bitbucket.org/tarek/distribute/issue/70>`_: exec bit on non-exec files +* `Distribute #99 <https://bitbucket.org/tarek/distribute/issue/99>`_: now the standalone easy_install command doesn't uses a + "setup.cfg" if any exists in the working directory. It will use it + only if triggered by ``install_requires`` from a setup.py call + (install, develop, etc). +* `Distribute #101 <https://bitbucket.org/tarek/distribute/issue/101>`_: Allowing ``os.devnull`` in Sandbox +* `Distribute #92 <https://bitbucket.org/tarek/distribute/issue/92>`_: Fixed the "no eggs" found error with MacPort + (platform.mac_ver() fails) +* `Distribute #103 <https://bitbucket.org/tarek/distribute/issue/103>`_: test_get_script_header_jython_workaround not run + anymore under py3 with C or POSIX local. Contributed by Arfrever. +* `Distribute #104 <https://bitbucket.org/tarek/distribute/issue/104>`_: remvoved the assertion when the installation fails, + with a nicer message for the end user. +* `Distribute #100 <https://bitbucket.org/tarek/distribute/issue/100>`_: making sure there's no SandboxViolation when + the setup script patches setuptools. + +----- +0.6.8 +----- + +* Added "check_packages" in dist. (added in Setuptools 0.6c11) +* Fixed the DONT_PATCH_SETUPTOOLS state. + +----- +0.6.7 +----- + +* `Distribute #58 <https://bitbucket.org/tarek/distribute/issue/58>`_: Added --user support to the develop command +* `Distribute #11 <https://bitbucket.org/tarek/distribute/issue/11>`_: Generated scripts now wrap their call to the script entry point + in the standard "if name == 'main'" +* Added the 'DONT_PATCH_SETUPTOOLS' environment variable, so virtualenv + can drive an installation that doesn't patch a global setuptools. +* Reviewed unladen-swallow specific change from + http://code.google.com/p/unladen-swallow/source/detail?spec=svn875&r=719 + and determined that it no longer applies. Distribute should work fine with + Unladen Swallow 2009Q3. +* `Distribute #21 <https://bitbucket.org/tarek/distribute/issue/21>`_: Allow PackageIndex.open_url to gracefully handle all cases of a + httplib.HTTPException instead of just InvalidURL and BadStatusLine. +* Removed virtual-python.py from this distribution and updated documentation + to point to the actively maintained virtualenv instead. +* `Distribute #64 <https://bitbucket.org/tarek/distribute/issue/64>`_: use_setuptools no longer rebuilds the distribute egg every + time it is run +* use_setuptools now properly respects the requested version +* use_setuptools will no longer try to import a distribute egg for the + wrong Python version +* `Distribute #74 <https://bitbucket.org/tarek/distribute/issue/74>`_: no_fake should be True by default. +* `Distribute #72 <https://bitbucket.org/tarek/distribute/issue/72>`_: avoid a bootstrapping issue with easy_install -U + +----- +0.6.6 +----- + +* Unified the bootstrap file so it works on both py2.x and py3k without 2to3 + (patch by Holger Krekel) + +----- +0.6.5 +----- + +* `Distribute #65 <https://bitbucket.org/tarek/distribute/issue/65>`_: cli.exe and gui.exe are now generated at build time, + depending on the platform in use. + +* `Distribute #67 <https://bitbucket.org/tarek/distribute/issue/67>`_: Fixed doc typo (PEP 381/382) + +* Distribute no longer shadows setuptools if we require a 0.7-series + setuptools. And an error is raised when installing a 0.7 setuptools with + distribute. + +* When run from within buildout, no attempt is made to modify an existing + setuptools egg, whether in a shared egg directory or a system setuptools. + +* Fixed a hole in sandboxing allowing builtin file to write outside of + the sandbox. + +----- +0.6.4 +----- + +* Added the generation of `distribute_setup_3k.py` during the release. + This closes `Distribute #52 <https://bitbucket.org/tarek/distribute/issue/52>`_. + +* Added an upload_docs command to easily upload project documentation to + PyPI's https://pythonhosted.org. This close issue `Distribute #56 <https://bitbucket.org/tarek/distribute/issue/56>`_. + +* Fixed a bootstrap bug on the use_setuptools() API. + +----- +0.6.3 +----- + +setuptools +========== + +* Fixed a bunch of calls to file() that caused crashes on Python 3. + +bootstrapping +============= + +* Fixed a bug in sorting that caused bootstrap to fail on Python 3. + +----- +0.6.2 +----- + +setuptools +========== + +* Added Python 3 support; see docs/python3.txt. + This closes `Old Setuptools #39 <http://bugs.python.org/setuptools/issue39>`_. + +* Added option to run 2to3 automatically when installing on Python 3. + This closes issue `Distribute #31 <https://bitbucket.org/tarek/distribute/issue/31>`_. + +* Fixed invalid usage of requirement.parse, that broke develop -d. + This closes `Old Setuptools #44 <http://bugs.python.org/setuptools/issue44>`_. + +* Fixed script launcher for 64-bit Windows. + This closes `Old Setuptools #2 <http://bugs.python.org/setuptools/issue2>`_. + +* KeyError when compiling extensions. + This closes `Old Setuptools #41 <http://bugs.python.org/setuptools/issue41>`_. + +bootstrapping +============= + +* Fixed bootstrap not working on Windows. This closes issue `Distribute #49 <https://bitbucket.org/tarek/distribute/issue/49>`_. + +* Fixed 2.6 dependencies. This closes issue `Distribute #50 <https://bitbucket.org/tarek/distribute/issue/50>`_. + +* Make sure setuptools is patched when running through easy_install + This closes `Old Setuptools #40 <http://bugs.python.org/setuptools/issue40>`_. + +----- +0.6.1 +----- + +setuptools +========== + +* package_index.urlopen now catches BadStatusLine and malformed url errors. + This closes `Distribute #16 <https://bitbucket.org/tarek/distribute/issue/16>`_ and `Distribute #18 <https://bitbucket.org/tarek/distribute/issue/18>`_. + +* zip_ok is now False by default. This closes `Old Setuptools #33 <http://bugs.python.org/setuptools/issue33>`_. + +* Fixed invalid URL error catching. `Old Setuptools #20 <http://bugs.python.org/setuptools/issue20>`_. + +* Fixed invalid bootstraping with easy_install installation (`Distribute #40 <https://bitbucket.org/tarek/distribute/issue/40>`_). + Thanks to Florian Schulze for the help. + +* Removed buildout/bootstrap.py. A new repository will create a specific + bootstrap.py script. + + +bootstrapping +============= + +* The boostrap process leave setuptools alone if detected in the system + and --root or --prefix is provided, but is not in the same location. + This closes `Distribute #10 <https://bitbucket.org/tarek/distribute/issue/10>`_. + +--- +0.6 +--- + +setuptools +========== + +* Packages required at build time where not fully present at install time. + This closes `Distribute #12 <https://bitbucket.org/tarek/distribute/issue/12>`_. + +* Protected against failures in tarfile extraction. This closes `Distribute #10 <https://bitbucket.org/tarek/distribute/issue/10>`_. + +* Made Jython api_tests.txt doctest compatible. This closes `Distribute #7 <https://bitbucket.org/tarek/distribute/issue/7>`_. + +* sandbox.py replaced builtin type file with builtin function open. This + closes `Distribute #6 <https://bitbucket.org/tarek/distribute/issue/6>`_. + +* Immediately close all file handles. This closes `Distribute #3 <https://bitbucket.org/tarek/distribute/issue/3>`_. + +* Added compatibility with Subversion 1.6. This references `Distribute #1 <https://bitbucket.org/tarek/distribute/issue/1>`_. + +pkg_resources +============= + +* Avoid a call to /usr/bin/sw_vers on OSX and use the official platform API + instead. Based on a patch from ronaldoussoren. This closes issue `#5 <https://bitbucket.org/pypa/setuptools/issue/5>`_. + +* Fixed a SandboxViolation for mkdir that could occur in certain cases. + This closes `Distribute #13 <https://bitbucket.org/tarek/distribute/issue/13>`_. + +* Allow to find_on_path on systems with tight permissions to fail gracefully. + This closes `Distribute #9 <https://bitbucket.org/tarek/distribute/issue/9>`_. + +* Corrected inconsistency between documentation and code of add_entry. + This closes `Distribute #8 <https://bitbucket.org/tarek/distribute/issue/8>`_. + +* Immediately close all file handles. This closes `Distribute #3 <https://bitbucket.org/tarek/distribute/issue/3>`_. + +easy_install +============ + +* Immediately close all file handles. This closes `Distribute #3 <https://bitbucket.org/tarek/distribute/issue/3>`_. + +----- +0.6c9 +----- + + * Fixed a missing files problem when using Windows source distributions on + non-Windows platforms, due to distutils not handling manifest file line + endings correctly. + + * Updated Pyrex support to work with Pyrex 0.9.6 and higher. + + * Minor changes for Jython compatibility, including skipping tests that can't + work on Jython. + + * Fixed not installing eggs in ``install_requires`` if they were also used for + ``setup_requires`` or ``tests_require``. + + * Fixed not fetching eggs in ``install_requires`` when running tests. + + * Allow ``ez_setup.use_setuptools()`` to upgrade existing setuptools + installations when called from a standalone ``setup.py``. + + * Added a warning if a namespace package is declared, but its parent package + is not also declared as a namespace. + + * Support Subversion 1.5 + + * Removed use of deprecated ``md5`` module if ``hashlib`` is available + + * Fixed ``bdist_wininst upload`` trying to upload the ``.exe`` twice + + * Fixed ``bdist_egg`` putting a ``native_libs.txt`` in the source package's + ``.egg-info``, when it should only be in the built egg's ``EGG-INFO``. + + * Ensure that _full_name is set on all shared libs before extensions are + checked for shared lib usage. (Fixes a bug in the experimental shared + library build support.) + + * Fix to allow unpacked eggs containing native libraries to fail more + gracefully under Google App Engine (with an ``ImportError`` loading the + C-based module, instead of getting a ``NameError``). + +----- +0.6c7 +----- + + * Fixed ``distutils.filelist.findall()`` crashing on broken symlinks, and + ``egg_info`` command failing on new, uncommitted SVN directories. + + * Fix import problems with nested namespace packages installed via + ``--root`` or ``--single-version-externally-managed``, due to the + parent package not having the child package as an attribute. + +----- +0.6c6 +----- + + * Added ``--egg-path`` option to ``develop`` command, allowing you to force + ``.egg-link`` files to use relative paths (allowing them to be shared across + platforms on a networked drive). + + * Fix not building binary RPMs correctly. + + * Fix "eggsecutables" (such as setuptools' own egg) only being runnable with + bash-compatible shells. + + * Fix ``#!`` parsing problems in Windows ``.exe`` script wrappers, when there + was whitespace inside a quoted argument or at the end of the ``#!`` line + (a regression introduced in 0.6c4). + + * Fix ``test`` command possibly failing if an older version of the project + being tested was installed on ``sys.path`` ahead of the test source + directory. + + * Fix ``find_packages()`` treating ``ez_setup`` and directories with ``.`` in + their names as packages. + +----- +0.6c5 +----- + + * Fix uploaded ``bdist_rpm`` packages being described as ``bdist_egg`` + packages under Python versions less than 2.5. + + * Fix uploaded ``bdist_wininst`` packages being described as suitable for + "any" version by Python 2.5, even if a ``--target-version`` was specified. + +----- +0.6c4 +----- + + * Overhauled Windows script wrapping to support ``bdist_wininst`` better. + Scripts installed with ``bdist_wininst`` will always use ``#!python.exe`` or + ``#!pythonw.exe`` as the executable name (even when built on non-Windows + platforms!), and the wrappers will look for the executable in the script's + parent directory (which should find the right version of Python). + + * Fix ``upload`` command not uploading files built by ``bdist_rpm`` or + ``bdist_wininst`` under Python 2.3 and 2.4. + + * Add support for "eggsecutable" headers: a ``#!/bin/sh`` script that is + prepended to an ``.egg`` file to allow it to be run as a script on Unix-ish + platforms. (This is mainly so that setuptools itself can have a single-file + installer on Unix, without doing multiple downloads, dealing with firewalls, + etc.) + + * Fix problem with empty revision numbers in Subversion 1.4 ``entries`` files + + * Use cross-platform relative paths in ``easy-install.pth`` when doing + ``develop`` and the source directory is a subdirectory of the installation + target directory. + + * Fix a problem installing eggs with a system packaging tool if the project + contained an implicit namespace package; for example if the ``setup()`` + listed a namespace package ``foo.bar`` without explicitly listing ``foo`` + as a namespace package. + +----- +0.6c3 +----- + + * Fixed breakages caused by Subversion 1.4's new "working copy" format + +----- +0.6c2 +----- + + * The ``ez_setup`` module displays the conflicting version of setuptools (and + its installation location) when a script requests a version that's not + available. + + * Running ``setup.py develop`` on a setuptools-using project will now install + setuptools if needed, instead of only downloading the egg. + +----- +0.6c1 +----- + + * Fixed ``AttributeError`` when trying to download a ``setup_requires`` + dependency when a distribution lacks a ``dependency_links`` setting. + + * Made ``zip-safe`` and ``not-zip-safe`` flag files contain a single byte, so + as to play better with packaging tools that complain about zero-length + files. + + * Made ``setup.py develop`` respect the ``--no-deps`` option, which it + previously was ignoring. + + * Support ``extra_path`` option to ``setup()`` when ``install`` is run in + backward-compatibility mode. + + * Source distributions now always include a ``setup.cfg`` file that explicitly + sets ``egg_info`` options such that they produce an identical version number + to the source distribution's version number. (Previously, the default + version number could be different due to the use of ``--tag-date``, or if + the version was overridden on the command line that built the source + distribution.) + +----- +0.6b4 +----- + + * Fix ``register`` not obeying name/version set by ``egg_info`` command, if + ``egg_info`` wasn't explicitly run first on the same command line. + + * Added ``--no-date`` and ``--no-svn-revision`` options to ``egg_info`` + command, to allow suppressing tags configured in ``setup.cfg``. + + * Fixed redundant warnings about missing ``README`` file(s); it should now + appear only if you are actually a source distribution. + +----- +0.6b3 +----- + + * Fix ``bdist_egg`` not including files in subdirectories of ``.egg-info``. + + * Allow ``.py`` files found by the ``include_package_data`` option to be + automatically included. Remove duplicate data file matches if both + ``include_package_data`` and ``package_data`` are used to refer to the same + files. + +----- +0.6b1 +----- + + * Strip ``module`` from the end of compiled extension modules when computing + the name of a ``.py`` loader/wrapper. (Python's import machinery ignores + this suffix when searching for an extension module.) + +------ +0.6a11 +------ + + * Added ``test_loader`` keyword to support custom test loaders + + * Added ``setuptools.file_finders`` entry point group to allow implementing + revision control plugins. + + * Added ``--identity`` option to ``upload`` command. + + * Added ``dependency_links`` to allow specifying URLs for ``--find-links``. + + * Enhanced test loader to scan packages as well as modules, and call + ``additional_tests()`` if present to get non-unittest tests. + + * Support namespace packages in conjunction with system packagers, by omitting + the installation of any ``__init__.py`` files for namespace packages, and + adding a special ``.pth`` file to create a working package in + ``sys.modules``. + + * Made ``--single-version-externally-managed`` automatic when ``--root`` is + used, so that most system packagers won't require special support for + setuptools. + + * Fixed ``setup_requires``, ``tests_require``, etc. not using ``setup.cfg`` or + other configuration files for their option defaults when installing, and + also made the install use ``--multi-version`` mode so that the project + directory doesn't need to support .pth files. + + * ``MANIFEST.in`` is now forcibly closed when any errors occur while reading + it. Previously, the file could be left open and the actual error would be + masked by problems trying to remove the open file on Windows systems. + +------ +0.6a10 +------ + + * Fixed the ``develop`` command ignoring ``--find-links``. + +----- +0.6a9 +----- + + * The ``sdist`` command no longer uses the traditional ``MANIFEST`` file to + create source distributions. ``MANIFEST.in`` is still read and processed, + as are the standard defaults and pruning. But the manifest is built inside + the project's ``.egg-info`` directory as ``SOURCES.txt``, and it is rebuilt + every time the ``egg_info`` command is run. + + * Added the ``include_package_data`` keyword to ``setup()``, allowing you to + automatically include any package data listed in revision control or + ``MANIFEST.in`` + + * Added the ``exclude_package_data`` keyword to ``setup()``, allowing you to + trim back files included via the ``package_data`` and + ``include_package_data`` options. + + * Fixed ``--tag-svn-revision`` not working when run from a source + distribution. + + * Added warning for namespace packages with missing ``declare_namespace()`` + + * Added ``tests_require`` keyword to ``setup()``, so that e.g. packages + requiring ``nose`` to run unit tests can make this dependency optional + unless the ``test`` command is run. + + * Made all commands that use ``easy_install`` respect its configuration + options, as this was causing some problems with ``setup.py install``. + + * Added an ``unpack_directory()`` driver to ``setuptools.archive_util``, so + that you can process a directory tree through a processing filter as if it + were a zipfile or tarfile. + + * Added an internal ``install_egg_info`` command to use as part of old-style + ``install`` operations, that installs an ``.egg-info`` directory with the + package. + + * Added a ``--single-version-externally-managed`` option to the ``install`` + command so that you can more easily wrap a "flat" egg in a system package. + + * Enhanced ``bdist_rpm`` so that it installs single-version eggs that + don't rely on a ``.pth`` file. The ``--no-egg`` option has been removed, + since all RPMs are now built in a more backwards-compatible format. + + * Support full roundtrip translation of eggs to and from ``bdist_wininst`` + format. Running ``bdist_wininst`` on a setuptools-based package wraps the + egg in an .exe that will safely install it as an egg (i.e., with metadata + and entry-point wrapper scripts), and ``easy_install`` can turn the .exe + back into an ``.egg`` file or directory and install it as such. + + +----- +0.6a8 +----- + + * Fixed some problems building extensions when Pyrex was installed, especially + with Python 2.4 and/or packages using SWIG. + + * Made ``develop`` command accept all the same options as ``easy_install``, + and use the ``easy_install`` command's configuration settings as defaults. + + * Made ``egg_info --tag-svn-revision`` fall back to extracting the revision + number from ``PKG-INFO`` in case it is being run on a source distribution of + a snapshot taken from a Subversion-based project. + + * Automatically detect ``.dll``, ``.so`` and ``.dylib`` files that are being + installed as data, adding them to ``native_libs.txt`` automatically. + + * Fixed some problems with fresh checkouts of projects that don't include + ``.egg-info/PKG-INFO`` under revision control and put the project's source + code directly in the project directory. If such a package had any + requirements that get processed before the ``egg_info`` command can be run, + the setup scripts would fail with a "Missing 'Version:' header and/or + PKG-INFO file" error, because the egg runtime interpreted the unbuilt + metadata in a directory on ``sys.path`` (i.e. the current directory) as + being a corrupted egg. Setuptools now monkeypatches the distribution + metadata cache to pretend that the egg has valid version information, until + it has a chance to make it actually be so (via the ``egg_info`` command). + +----- +0.6a5 +----- + + * Fixed missing gui/cli .exe files in distribution. Fixed bugs in tests. + +----- +0.6a3 +----- + + * Added ``gui_scripts`` entry point group to allow installing GUI scripts + on Windows and other platforms. (The special handling is only for Windows; + other platforms are treated the same as for ``console_scripts``.) + +----- +0.6a2 +----- + + * Added ``console_scripts`` entry point group to allow installing scripts + without the need to create separate script files. On Windows, console + scripts get an ``.exe`` wrapper so you can just type their name. On other + platforms, the scripts are written without a file extension. + +----- +0.6a1 +----- + + * Added support for building "old-style" RPMs that don't install an egg for + the target package, using a ``--no-egg`` option. + + * The ``build_ext`` command now works better when using the ``--inplace`` + option and multiple Python versions. It now makes sure that all extensions + match the current Python version, even if newer copies were built for a + different Python version. + + * The ``upload`` command no longer attaches an extra ``.zip`` when uploading + eggs, as PyPI now supports egg uploads without trickery. + + * The ``ez_setup`` script/module now displays a warning before downloading + the setuptools egg, and attempts to check the downloaded egg against an + internal MD5 checksum table. + + * Fixed the ``--tag-svn-revision`` option of ``egg_info`` not finding the + latest revision number; it was using the revision number of the directory + containing ``setup.py``, not the highest revision number in the project. + + * Added ``eager_resources`` setup argument + + * The ``sdist`` command now recognizes Subversion "deleted file" entries and + does not include them in source distributions. + + * ``setuptools`` now embeds itself more thoroughly into the distutils, so that + other distutils extensions (e.g. py2exe, py2app) will subclass setuptools' + versions of things, rather than the native distutils ones. + + * Added ``entry_points`` and ``setup_requires`` arguments to ``setup()``; + ``setup_requires`` allows you to automatically find and download packages + that are needed in order to *build* your project (as opposed to running it). + + * ``setuptools`` now finds its commands, ``setup()`` argument validators, and + metadata writers using entry points, so that they can be extended by + third-party packages. See `Creating distutils Extensions + <http://pythonhosted.org/setuptools/setuptools.html#creating-distutils-extensions>`_ + for more details. + + * The vestigial ``depends`` command has been removed. It was never finished + or documented, and never would have worked without EasyInstall - which it + pre-dated and was never compatible with. + +------ +0.5a12 +------ + + * The zip-safety scanner now checks for modules that might be used with + ``python -m``, and marks them as unsafe for zipping, since Python 2.4 can't + handle ``-m`` on zipped modules. + +------ +0.5a11 +------ + + * Fix breakage of the "develop" command that was caused by the addition of + ``--always-unzip`` to the ``easy_install`` command. + +----- +0.5a9 +----- + + * Include ``svn:externals`` directories in source distributions as well as + normal subversion-controlled files and directories. + + * Added ``exclude=patternlist`` option to ``setuptools.find_packages()`` + + * Changed --tag-svn-revision to include an "r" in front of the revision number + for better readability. + + * Added ability to build eggs without including source files (except for any + scripts, of course), using the ``--exclude-source-files`` option to + ``bdist_egg``. + + * ``setup.py install`` now automatically detects when an "unmanaged" package + or module is going to be on ``sys.path`` ahead of a package being installed, + thereby preventing the newer version from being imported. If this occurs, + a warning message is output to ``sys.stderr``, but installation proceeds + anyway. The warning message informs the user what files or directories + need deleting, and advises them they can also use EasyInstall (with the + ``--delete-conflicting`` option) to do it automatically. + + * The ``egg_info`` command now adds a ``top_level.txt`` file to the metadata + directory that lists all top-level modules and packages in the distribution. + This is used by the ``easy_install`` command to find possibly-conflicting + "unmanaged" packages when installing the distribution. + + * Added ``zip_safe`` and ``namespace_packages`` arguments to ``setup()``. + Added package analysis to determine zip-safety if the ``zip_safe`` flag + is not given, and advise the author regarding what code might need changing. + + * Fixed the swapped ``-d`` and ``-b`` options of ``bdist_egg``. + +----- +0.5a8 +----- + + * The "egg_info" command now always sets the distribution metadata to "safe" + forms of the distribution name and version, so that distribution files will + be generated with parseable names (i.e., ones that don't include '-' in the + name or version). Also, this means that if you use the various ``--tag`` + options of "egg_info", any distributions generated will use the tags in the + version, not just egg distributions. + + * Added support for defining command aliases in distutils configuration files, + under the "[aliases]" section. To prevent recursion and to allow aliases to + call the command of the same name, a given alias can be expanded only once + per command-line invocation. You can define new aliases with the "alias" + command, either for the local, global, or per-user configuration. + + * Added "rotate" command to delete old distribution files, given a set of + patterns to match and the number of files to keep. (Keeps the most + recently-modified distribution files matching each pattern.) + + * Added "saveopts" command that saves all command-line options for the current + invocation to the local, global, or per-user configuration file. Useful for + setting defaults without having to hand-edit a configuration file. + + * Added a "setopt" command that sets a single option in a specified distutils + configuration file. + +----- +0.5a7 +----- + + * Added "upload" support for egg and source distributions, including a bug + fix for "upload" and a temporary workaround for lack of .egg support in + PyPI. + +----- +0.5a6 +----- + + * Beefed up the "sdist" command so that if you don't have a MANIFEST.in, it + will include all files under revision control (CVS or Subversion) in the + current directory, and it will regenerate the list every time you create a + source distribution, not just when you tell it to. This should make the + default "do what you mean" more often than the distutils' default behavior + did, while still retaining the old behavior in the presence of MANIFEST.in. + + * Fixed the "develop" command always updating .pth files, even if you + specified ``-n`` or ``--dry-run``. + + * Slightly changed the format of the generated version when you use + ``--tag-build`` on the "egg_info" command, so that you can make tagged + revisions compare *lower* than the version specified in setup.py (e.g. by + using ``--tag-build=dev``). + +----- +0.5a5 +----- + + * Added ``develop`` command to ``setuptools``-based packages. This command + installs an ``.egg-link`` pointing to the package's source directory, and + script wrappers that ``execfile()`` the source versions of the package's + scripts. This lets you put your development checkout(s) on sys.path without + having to actually install them. (To uninstall the link, use + use ``setup.py develop --uninstall``.) + + * Added ``egg_info`` command to ``setuptools``-based packages. This command + just creates or updates the "projectname.egg-info" directory, without + building an egg. (It's used by the ``bdist_egg``, ``test``, and ``develop`` + commands.) + + * Enhanced the ``test`` command so that it doesn't install the package, but + instead builds any C extensions in-place, updates the ``.egg-info`` + metadata, adds the source directory to ``sys.path``, and runs the tests + directly on the source. This avoids an "unmanaged" installation of the + package to ``site-packages`` or elsewhere. + + * Made ``easy_install`` a standard ``setuptools`` command, moving it from + the ``easy_install`` module to ``setuptools.command.easy_install``. Note + that if you were importing or extending it, you must now change your imports + accordingly. ``easy_install.py`` is still installed as a script, but not as + a module. + +----- +0.5a4 +----- + + * Setup scripts using setuptools can now list their dependencies directly in + the setup.py file, without having to manually create a ``depends.txt`` file. + The ``install_requires`` and ``extras_require`` arguments to ``setup()`` + are used to create a dependencies file automatically. If you are manually + creating ``depends.txt`` right now, please switch to using these setup + arguments as soon as practical, because ``depends.txt`` support will be + removed in the 0.6 release cycle. For documentation on the new arguments, + see the ``setuptools.dist.Distribution`` class. + + * Setup scripts using setuptools now always install using ``easy_install`` + internally, for ease of uninstallation and upgrading. + +----- +0.5a1 +----- + + * Added support for "self-installation" bootstrapping. Packages can now + include ``ez_setup.py`` in their source distribution, and add the following + to their ``setup.py``, in order to automatically bootstrap installation of + setuptools as part of their setup process:: + + from ez_setup import use_setuptools + use_setuptools() + + from setuptools import setup + # etc... + +----- +0.4a2 +----- + + * Added ``ez_setup.py`` installer/bootstrap script to make initial setuptools + installation easier, and to allow distributions using setuptools to avoid + having to include setuptools in their source distribution. + + * All downloads are now managed by the ``PackageIndex`` class (which is now + subclassable and replaceable), so that embedders can more easily override + download logic, give download progress reports, etc. The class has also + been moved to the new ``setuptools.package_index`` module. + + * The ``Installer`` class no longer handles downloading, manages a temporary + directory, or tracks the ``zip_ok`` option. Downloading is now handled + by ``PackageIndex``, and ``Installer`` has become an ``easy_install`` + command class based on ``setuptools.Command``. + + * There is a new ``setuptools.sandbox.run_setup()`` API to invoke a setup + script in a directory sandbox, and a new ``setuptools.archive_util`` module + with an ``unpack_archive()`` API. These were split out of EasyInstall to + allow reuse by other tools and applications. + + * ``setuptools.Command`` now supports reinitializing commands using keyword + arguments to set/reset options. Also, ``Command`` subclasses can now set + their ``command_consumes_arguments`` attribute to ``True`` in order to + receive an ``args`` option containing the rest of the command line. + +----- +0.3a2 +----- + + * Added new options to ``bdist_egg`` to allow tagging the egg's version number + with a subversion revision number, the current date, or an explicit tag + value. Run ``setup.py bdist_egg --help`` to get more information. + + * Misc. bug fixes + +----- +0.3a1 +----- + + * Initial release. + + diff --git a/lib/python3.4/site-packages/setuptools-5.5.1.dist-info/RECORD b/lib/python3.4/site-packages/setuptools-5.5.1.dist-info/RECORD new file mode 100644 index 0000000..c07e051 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools-5.5.1.dist-info/RECORD @@ -0,0 +1,139 @@ +pkg_resources.py,sha256=6yiyV1uJ8aEQki96lMSYCmR7BFMX5dXC8cks9HTpXbM,101340 +easy_install.py,sha256=MDC9vt5AxDsXX5qcKlBz2TnW6Tpuv_AobnfhCJ9X3PM,126 +setuptools/__init__.py,sha256=CliC3Pe-ej7j-iPQ1fu9Rkh2DdrfkBzRcSIJ3vF5dmg,5195 +setuptools/script (dev).tmpl,sha256=f7MR17dTkzaqkCMSVseyOCMVrPVSMdmTQsaB8cZzfuI,201 +setuptools/dist.py,sha256=tzUrozVQmFJxuvRVyisQqT_JURxR5OgJ3zHaLvFq2Is,33406 +setuptools/lib2to3_ex.py,sha256=6jPF9sJuHiz0cyg4cwIBLl2VMAxcl3GYSZwWAOuJplU,1998 +setuptools/ssl_support.py,sha256=FASqXlRCmXAi6LUWLUIo0u14MpJqHBgkOc5KPHSRrtI,8044 +setuptools/script.tmpl,sha256=WGTt5piezO27c-Dbx6l5Q4T3Ff20A5z7872hv3aAhYY,138 +setuptools/py27compat.py,sha256=CGj-jZcFgHUkrEdLvArkxHj96tAaMbG2-yJtUVU7QVI,306 +setuptools/package_index.py,sha256=IOHE81H91eEcR80gMquyQuU9Rh9cv7_6JOLLB0iNbhs,38943 +setuptools/py26compat.py,sha256=ggKS8_aZWWHHS792vF3uXp5vmUkGNk3vjLreLTHr_-Q,431 +setuptools/unicode_utils.py,sha256=gvhAHRj1LELCz-1MP3rfXGi__O1CAm5aksO9Njd2lpU,981 +setuptools/depends.py,sha256=gMRnrqQSr_Yp_wf09O88vKSQah1YjjEi5PsDNezM2Hs,6370 +setuptools/py31compat.py,sha256=O3X_wdWrvXTifeSFbRaCMuc23cDhMHJn7QlITb5cQ8E,1637 +setuptools/archive_util.py,sha256=xr6Xl-PbXymPMuuq5TokDMs7SvKMVKsjUegw8mpj7_g,6556 +setuptools/sandbox.py,sha256=U5QwtrByF-ITHzGl1FyxhoRYdvNeic6-Ie-N6XW0Ybc,10430 +setuptools/site-patch.py,sha256=K-0-cAx36mX_PG-qPZwosG9ZLCliRjquKQ4nHiJvvzg,2389 +setuptools/extension.py,sha256=ovUmlg52d7t7LZHi5ywxFYG7EitWQbLBURv58hXPnoE,1731 +setuptools/version.py,sha256=qJXv3pmLi_JEZtT4o8yRqpzTlUe5fis2m5X-q9Ku-k8,22 +setuptools/compat.py,sha256=-Hl58PuLOEHUDM3-qsgmk50qNiyLLF3RgBgJ-eGGZG0,2094 +setuptools/svn_utils.py,sha256=wRy2bp7J3IRyYNWYKDQhH0yvvFXLQqo76Jk1IXdZXWs,18813 +setuptools/tests/__init__.py,sha256=6dOlIFbLaq185Y7B7wNUXwMGl1UCGGdM98pYTzMDmYQ,12531 +setuptools/tests/test_sandbox.py,sha256=bdUzddjT6dojVbFOHQPFqqYaLGAMcLKVlWI_ADcY9SE,2364 +setuptools/tests/test_svn.py,sha256=UI9rRTWQDw7NGRzSKmgcsnkDGX8M4QDj447kCXvf5D8,7806 +setuptools/tests/test_bdist_egg.py,sha256=yD2dP8ApUtyo4rpwzSEGgeLjxihU7xHVAK2O2bCL52U,1962 +setuptools/tests/test_packageindex.py,sha256=8vay-a5Dry_cg71WFA1rlofIWO5c9KI6D6PKN4WKKiI,7625 +setuptools/tests/script-with-bom.py,sha256=nWOGL62VEQBsH5GaZvCyRyYqobziynGaqQJisffifsc,46 +setuptools/tests/test_find_packages.py,sha256=McPOROBbIR7JK5a2tMWTA88S6LrbcGMnPAZ66Oq32AQ,6005 +setuptools/tests/test_integration.py,sha256=gXHi9iQ9LMqAjWcoGe4yvS-H3UczoOW3ij-x-afP0a4,2506 +setuptools/tests/test_upload_docs.py,sha256=N__IVGihRBRqA3PetRcIDmNFu1XOhR7ix0e50xMuq_M,2139 +setuptools/tests/environment.py,sha256=Sl9Pok7ZEakC6YXP8urDkh9-XxQpoJAzyzzZUqOGOXQ,4658 +setuptools/tests/test_easy_install.py,sha256=vjr2KNv78vgLg7otZeqz9yPXe0M7gOxiOE5MRvARblk,15704 +setuptools/tests/py26compat.py,sha256=i_JBukWMEat4AM1FtU8DAd06r0gjZR3uma_jb_gxEXU,267 +setuptools/tests/server.py,sha256=Fqk53860mwB_wDNOzixZLjCq_c8_1kaGiXBm5fHco2c,2651 +setuptools/tests/test_dist_info.py,sha256=ZgVLERe6WZpWUcwrLGk2b_cSOMZMASLV1hTV4hgQLG0,2615 +setuptools/tests/test_build_ext.py,sha256=mfWDSPPR2auCi3AbZILJp-175Dj0551d6xpvM0md4zE,650 +setuptools/tests/test_sdist.py,sha256=fafOcXHGriVAp1VwUEKEyzmyQ-9in7utziPWHC8iOLU,17986 +setuptools/tests/test_egg_info.py,sha256=mJ8BDZuIWe_4-2mo_OQGAdi0DG1mS3pm0ZAEPdS_2vc,6745 +setuptools/tests/test_resources.py,sha256=-YyuIR1EGofI44SycRIhF59NCxP_r8MD8iZj7aBGXHg,23639 +setuptools/tests/test_markerlib.py,sha256=UYBTjaug56cWxIwlCubdSTGZ-s9bqB1co54636x0xfo,2506 +setuptools/tests/test_develop.py,sha256=JFvKRFbjzsExQBmg1kN-dWijhPY4uGO1TMQFDy9QFoc,3496 +setuptools/tests/test_test.py,sha256=pnN_pLgda5uEJU0-X8n04g90OaxQ5_CTsLTxy2SqR80,3697 +setuptools/command/__init__.py,sha256=gQMXoLa0TtUtmUZY0ptSouWWA5kcTArWyDQ6QwkjoVQ,554 +setuptools/command/rotate.py,sha256=Qm7SOa32L9XG5b_C7_SSYvKM5rqFXroeQ6w8GXIsY2o,2038 +setuptools/command/egg_info.py,sha256=CY_h53JLZXafopM-E8-WbxKZd_xlp4SUqRWPQsJAYA4,15204 +setuptools/command/develop.py,sha256=uyRwABU1JnhQhZO9rS8-nenkzLwKKJt2P7WPnsXrHd4,6610 +setuptools/command/install.py,sha256=QwaFiZRU3ytIHoPh8uJ9EqV3Fu9C4ca4B7UGAo95tws,4685 +setuptools/command/bdist_egg.py,sha256=vGysGAHsTGSbSUwEBtHZ-Mtz54nU_tSwcc8DlnHfM7A,17606 +setuptools/command/bdist_wininst.py,sha256=_6dz3lpB1tY200LxKPLM7qgwTCceOMgaWFF-jW2-pm0,637 +setuptools/command/easy_install.py,sha256=QDgivpvBem2Y092MMwXo9hpcytuMPDUhpLLee8o239M,85008 +setuptools/command/upload_docs.py,sha256=617bECKkdDizKaV_kN62hXjgmwKMbpY4l3zecNEdjNk,6811 +setuptools/command/test.py,sha256=-ZbBhuoKrfOx_pgA-53H-qRw6Z9evRP8_z1KdKI7yvw,6481 +setuptools/command/install_lib.py,sha256=qqiImHhZXPhtH9hYLcdol0rFnvrS3U2cOe3FKIyzRds,3160 +setuptools/command/alias.py,sha256=1sLQxZcNh6dDQpDmm4G7UGGTol83nY1NTPmNBbm2siI,2381 +setuptools/command/saveopts.py,sha256=za7QCBcQimKKriWcoCcbhxPjUz30gSB74zuTL47xpP4,658 +setuptools/command/sdist.py,sha256=EKzb8zEYkx-FFEN71kGWKrxUKsLHhXEOQHjmnLe28oY,8559 +setuptools/command/register.py,sha256=bHlMm1qmBbSdahTOT8w6UhA-EgeQIz7p6cD-qOauaiI,270 +setuptools/command/install_egg_info.py,sha256=Wy-ElNCAAY0kAx4tZB6mlXoVme1w_UZquaPljW5j9M0,4770 +setuptools/command/bdist_rpm.py,sha256=B7l0TnzCGb-0nLlm6rS00jWLkojASwVmdhW2w5Qz_Ak,1508 +setuptools/command/build_py.py,sha256=FtFC7cRlKQCHT4-1yDey3igltuiGwFh8VsFg2U0B0kw,8644 +setuptools/command/setopt.py,sha256=Z3_kav60D2XHZjM0cRhGo7wbBYo7nr4U_U-wMMbpmu8,5080 +setuptools/command/build_ext.py,sha256=cukClSx-aLSuWmhWLmXPNDNr9ZHpFL7a01z71bwy9Go,12324 +setuptools/command/install_scripts.py,sha256=evsgRosqRxlww6l7BBx43RINpAbLd1raVkW2-dmFCyU,2041 +setuptools-5.5.1.dist-info/metadata.json,sha256=RiJE_38i771Czmivxe6h_nLkXu7FZKkPBzxB3zP5pU0,4800 +setuptools-5.5.1.dist-info/top_level.txt,sha256=79Sv9j5n-RUCvOJc3EcN2glRRw3uLcx2bkqsNuTLLJA,49 +setuptools-5.5.1.dist-info/DESCRIPTION.rst,sha256=ORIi26mtHm7Fp87FPFRezfmWHb785qcqEBzRMBxMlyY,80931 +setuptools-5.5.1.dist-info/entry_points.txt,sha256=wxdNYALrJxn2PJtYZXehrEYO05eRXr7j2I3vzIu3hK4,2872 +setuptools-5.5.1.dist-info/METADATA,sha256=8JuRjetQyrSxu5q7jkXPE7jfMKk2X9E9dtA9qU0l3EA,82293 +setuptools-5.5.1.dist-info/WHEEL,sha256=AvR0WeTpDaxT645bl5FQxUK6NPsTls2ttpcGJg3j1Xg,110 +setuptools-5.5.1.dist-info/dependency_links.txt,sha256=UaFV2I99Rbdie_2lV4pEX6M2jKNDN7RhFSbiL1-PDiY,221 +setuptools-5.5.1.dist-info/zip-safe,sha256=frcCV1k9oG9oKj3dpUqdJg1PxRT2RSN_XKdLCPjaYaY,2 +setuptools-5.5.1.dist-info/RECORD,, +_markerlib/__init__.py,sha256=GSmhZqvAitLJHhSgtqqusfq2nJ_ClP3oy3Lm0uZLIsU,552 +_markerlib/markers.py,sha256=YuFp0-osufFIoqnzG3L0Z2fDCx4Vln3VUDeXJ2DA_1I,3979 +/home/pi/oml/platform_linux_armv7l/p34/bin/easy_install,sha256=uQ3IO7ExioL_TGpEli0OwJKJCfQsRJ7_eME8XaaRiok,269 +/home/pi/oml/platform_linux_armv7l/p34/bin/easy_install-3.4,sha256=uQ3IO7ExioL_TGpEli0OwJKJCfQsRJ7_eME8XaaRiok,269 +setuptools/tests/__pycache__/py26compat.cpython-34.pyc,, +setuptools/__pycache__/unicode_utils.cpython-34.pyc,, +setuptools/__pycache__/ssl_support.cpython-34.pyc,, +setuptools/tests/__pycache__/test_easy_install.cpython-34.pyc,, +setuptools/tests/__pycache__/test_sdist.cpython-34.pyc,, +setuptools/command/__pycache__/develop.cpython-34.pyc,, +setuptools/command/__pycache__/sdist.cpython-34.pyc,, +setuptools/command/__pycache__/egg_info.cpython-34.pyc,, +setuptools/__pycache__/lib2to3_ex.cpython-34.pyc,, +setuptools/__pycache__/svn_utils.cpython-34.pyc,, +setuptools/command/__pycache__/setopt.cpython-34.pyc,, +setuptools/tests/__pycache__/test_packageindex.cpython-34.pyc,, +setuptools/tests/__pycache__/test_bdist_egg.cpython-34.pyc,, +setuptools/tests/__pycache__/server.cpython-34.pyc,, +setuptools/tests/__pycache__/script-with-bom.cpython-34.pyc,, +setuptools/__pycache__/depends.cpython-34.pyc,, +setuptools/tests/__pycache__/test_resources.cpython-34.pyc,, +setuptools/tests/__pycache__/test_develop.cpython-34.pyc,, +setuptools/tests/__pycache__/test_upload_docs.cpython-34.pyc,, +__pycache__/easy_install.cpython-34.pyc,, +setuptools/__pycache__/py27compat.cpython-34.pyc,, +setuptools/command/__pycache__/alias.cpython-34.pyc,, +setuptools/tests/__pycache__/environment.cpython-34.pyc,, +setuptools/command/__pycache__/install_lib.cpython-34.pyc,, +setuptools/tests/__pycache__/test_find_packages.cpython-34.pyc,, +setuptools/__pycache__/version.cpython-34.pyc,, +setuptools/command/__pycache__/bdist_egg.cpython-34.pyc,, +setuptools/command/__pycache__/install.cpython-34.pyc,, +setuptools/__pycache__/site-patch.cpython-34.pyc,, +setuptools/command/__pycache__/bdist_wininst.cpython-34.pyc,, +setuptools/__pycache__/package_index.cpython-34.pyc,, +setuptools/__pycache__/__init__.cpython-34.pyc,, +setuptools/__pycache__/dist.cpython-34.pyc,, +setuptools/command/__pycache__/register.cpython-34.pyc,, +setuptools/tests/__pycache__/test_svn.cpython-34.pyc,, +setuptools/tests/__pycache__/test_dist_info.cpython-34.pyc,, +setuptools/tests/__pycache__/__init__.cpython-34.pyc,, +setuptools/command/__pycache__/build_py.cpython-34.pyc,, +setuptools/command/__pycache__/__init__.cpython-34.pyc,, +setuptools/command/__pycache__/saveopts.cpython-34.pyc,, +_markerlib/__pycache__/markers.cpython-34.pyc,, +setuptools/tests/__pycache__/test_sandbox.cpython-34.pyc,, +setuptools/tests/__pycache__/test_test.cpython-34.pyc,, +setuptools/tests/__pycache__/test_build_ext.cpython-34.pyc,, +setuptools/command/__pycache__/install_scripts.cpython-34.pyc,, +setuptools/tests/__pycache__/test_egg_info.cpython-34.pyc,, +setuptools/command/__pycache__/rotate.cpython-34.pyc,, +setuptools/command/__pycache__/test.cpython-34.pyc,, +setuptools/__pycache__/extension.cpython-34.pyc,, +__pycache__/pkg_resources.cpython-34.pyc,, +setuptools/command/__pycache__/upload_docs.cpython-34.pyc,, +setuptools/__pycache__/sandbox.cpython-34.pyc,, +setuptools/__pycache__/compat.cpython-34.pyc,, +setuptools/__pycache__/py26compat.cpython-34.pyc,, +setuptools/__pycache__/archive_util.cpython-34.pyc,, +setuptools/command/__pycache__/install_egg_info.cpython-34.pyc,, +setuptools/command/__pycache__/easy_install.cpython-34.pyc,, +_markerlib/__pycache__/__init__.cpython-34.pyc,, +setuptools/tests/__pycache__/test_markerlib.cpython-34.pyc,, +setuptools/command/__pycache__/build_ext.cpython-34.pyc,, +setuptools/command/__pycache__/bdist_rpm.cpython-34.pyc,, +setuptools/tests/__pycache__/test_integration.cpython-34.pyc,, +setuptools/__pycache__/py31compat.cpython-34.pyc,, diff --git a/lib/python3.4/site-packages/setuptools-5.5.1.dist-info/WHEEL b/lib/python3.4/site-packages/setuptools-5.5.1.dist-info/WHEEL new file mode 100644 index 0000000..9dff69d --- /dev/null +++ b/lib/python3.4/site-packages/setuptools-5.5.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.24.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/lib/python3.4/site-packages/setuptools-5.5.1.dist-info/dependency_links.txt b/lib/python3.4/site-packages/setuptools-5.5.1.dist-info/dependency_links.txt new file mode 100644 index 0000000..b454c16 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools-5.5.1.dist-info/dependency_links.txt @@ -0,0 +1,2 @@ +https://pypi.python.org/packages/source/c/certifi/certifi-1.0.1.tar.gz#md5=45f5cb94b8af9e1df0f9450a8f61b790 +https://pypi.python.org/packages/source/w/wincertstore/wincertstore-0.2.zip#md5=ae728f2f007185648d0c7a8679b361e2 diff --git a/lib/python3.4/site-packages/setuptools-5.5.1.dist-info/entry_points.txt b/lib/python3.4/site-packages/setuptools-5.5.1.dist-info/entry_points.txt new file mode 100644 index 0000000..72a5ffe --- /dev/null +++ b/lib/python3.4/site-packages/setuptools-5.5.1.dist-info/entry_points.txt @@ -0,0 +1,64 @@ +[console_scripts] +easy_install = setuptools.command.easy_install:main +easy_install-3.4 = setuptools.command.easy_install:main + +[distutils.commands] +alias = setuptools.command.alias:alias +bdist_egg = setuptools.command.bdist_egg:bdist_egg +bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm +bdist_wininst = setuptools.command.bdist_wininst:bdist_wininst +build_ext = setuptools.command.build_ext:build_ext +build_py = setuptools.command.build_py:build_py +develop = setuptools.command.develop:develop +easy_install = setuptools.command.easy_install:easy_install +egg_info = setuptools.command.egg_info:egg_info +install = setuptools.command.install:install +install_egg_info = setuptools.command.install_egg_info:install_egg_info +install_lib = setuptools.command.install_lib:install_lib +install_scripts = setuptools.command.install_scripts:install_scripts +register = setuptools.command.register:register +rotate = setuptools.command.rotate:rotate +saveopts = setuptools.command.saveopts:saveopts +sdist = setuptools.command.sdist:sdist +setopt = setuptools.command.setopt:setopt +test = setuptools.command.test:test +upload_docs = setuptools.command.upload_docs:upload_docs + +[distutils.setup_keywords] +convert_2to3_doctests = setuptools.dist:assert_string_list +dependency_links = setuptools.dist:assert_string_list +eager_resources = setuptools.dist:assert_string_list +entry_points = setuptools.dist:check_entry_points +exclude_package_data = setuptools.dist:check_package_data +extras_require = setuptools.dist:check_extras +include_package_data = setuptools.dist:assert_bool +install_requires = setuptools.dist:check_requirements +namespace_packages = setuptools.dist:check_nsp +package_data = setuptools.dist:check_package_data +packages = setuptools.dist:check_packages +setup_requires = setuptools.dist:check_requirements +test_loader = setuptools.dist:check_importable +test_runner = setuptools.dist:check_importable +test_suite = setuptools.dist:check_test_suite +tests_require = setuptools.dist:check_requirements +use_2to3 = setuptools.dist:assert_bool +use_2to3_exclude_fixers = setuptools.dist:assert_string_list +use_2to3_fixers = setuptools.dist:assert_string_list +zip_safe = setuptools.dist:assert_bool + +[egg_info.writers] +PKG-INFO = setuptools.command.egg_info:write_pkg_info +dependency_links.txt = setuptools.command.egg_info:overwrite_arg +depends.txt = setuptools.command.egg_info:warn_depends_obsolete +eager_resources.txt = setuptools.command.egg_info:overwrite_arg +entry_points.txt = setuptools.command.egg_info:write_entries +namespace_packages.txt = setuptools.command.egg_info:overwrite_arg +requires.txt = setuptools.command.egg_info:write_requirements +top_level.txt = setuptools.command.egg_info:write_toplevel_names + +[setuptools.file_finders] +svn_cvs = setuptools.command.sdist:_default_revctrl + +[setuptools.installation] +eggsecutable = setuptools.command.easy_install:bootstrap + diff --git a/lib/python3.4/site-packages/setuptools-5.5.1.dist-info/metadata.json b/lib/python3.4/site-packages/setuptools-5.5.1.dist-info/metadata.json new file mode 100644 index 0000000..1001c9d --- /dev/null +++ b/lib/python3.4/site-packages/setuptools-5.5.1.dist-info/metadata.json @@ -0,0 +1 @@ +{"name": "setuptools", "extras": ["certs", "ssl"], "run_requires": [{"requires": ["certifi (==1.0.1)"], "extra": "certs"}, {"requires": ["wincertstore (==0.2)"], "environment": "sys_platform=='win32'", "extra": "ssl"}], "extensions": {"python.exports": {"distutils.setup_keywords": {"dependency_links": "setuptools.dist:assert_string_list", "tests_require": "setuptools.dist:check_requirements", "use_2to3_exclude_fixers": "setuptools.dist:assert_string_list", "convert_2to3_doctests": "setuptools.dist:assert_string_list", "install_requires": "setuptools.dist:check_requirements", "use_2to3_fixers": "setuptools.dist:assert_string_list", "test_suite": "setuptools.dist:check_test_suite", "use_2to3": "setuptools.dist:assert_bool", "extras_require": "setuptools.dist:check_extras", "namespace_packages": "setuptools.dist:check_nsp", "package_data": "setuptools.dist:check_package_data", "packages": "setuptools.dist:check_packages", "setup_requires": "setuptools.dist:check_requirements", "entry_points": "setuptools.dist:check_entry_points", "eager_resources": "setuptools.dist:assert_string_list", "include_package_data": "setuptools.dist:assert_bool", "exclude_package_data": "setuptools.dist:check_package_data", "test_loader": "setuptools.dist:check_importable", "zip_safe": "setuptools.dist:assert_bool", "test_runner": "setuptools.dist:check_importable"}, "setuptools.installation": {"eggsecutable": "setuptools.command.easy_install:bootstrap"}, "egg_info.writers": {"namespace_packages.txt": "setuptools.command.egg_info:overwrite_arg", "top_level.txt": "setuptools.command.egg_info:write_toplevel_names", "dependency_links.txt": "setuptools.command.egg_info:overwrite_arg", "entry_points.txt": "setuptools.command.egg_info:write_entries", "depends.txt": "setuptools.command.egg_info:warn_depends_obsolete", "PKG-INFO": "setuptools.command.egg_info:write_pkg_info", "eager_resources.txt": "setuptools.command.egg_info:overwrite_arg", "requires.txt": "setuptools.command.egg_info:write_requirements"}, "setuptools.file_finders": {"svn_cvs": "setuptools.command.sdist:_default_revctrl"}, "console_scripts": {"easy_install": "setuptools.command.easy_install:main", "easy_install-3.4": "setuptools.command.easy_install:main"}, "distutils.commands": {"easy_install": "setuptools.command.easy_install:easy_install", "develop": "setuptools.command.develop:develop", "bdist_egg": "setuptools.command.bdist_egg:bdist_egg", "install_egg_info": "setuptools.command.install_egg_info:install_egg_info", "install_lib": "setuptools.command.install_lib:install_lib", "bdist_wininst": "setuptools.command.bdist_wininst:bdist_wininst", "egg_info": "setuptools.command.egg_info:egg_info", "rotate": "setuptools.command.rotate:rotate", "register": "setuptools.command.register:register", "install": "setuptools.command.install:install", "bdist_rpm": "setuptools.command.bdist_rpm:bdist_rpm", "test": "setuptools.command.test:test", "sdist": "setuptools.command.sdist:sdist", "build_ext": "setuptools.command.build_ext:build_ext", "build_py": "setuptools.command.build_py:build_py", "alias": "setuptools.command.alias:alias", "install_scripts": "setuptools.command.install_scripts:install_scripts", "upload_docs": "setuptools.command.upload_docs:upload_docs", "saveopts": "setuptools.command.saveopts:saveopts", "setopt": "setuptools.command.setopt:setopt"}}, "python.details": {"document_names": {"description": "DESCRIPTION.rst"}, "contacts": [{"name": "Python Packaging Authority", "role": "author", "email": "distutils-sig@python.org"}], "project_urls": {"Home": "https://bitbucket.org/pypa/setuptools"}}, "python.commands": {"wrap_console": {"easy_install": "setuptools.command.easy_install:main", "easy_install-3.4": "setuptools.command.easy_install:main"}}}, "version": "5.5.1", "license": "PSF or ZPL", "summary": "Easily download, build, install, upgrade, and uninstall Python packages", "metadata_version": "2.0", "classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: Python Software Foundation License", "License :: OSI Approved :: Zope Public License", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.1", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: System :: Archiving :: Packaging", "Topic :: System :: Systems Administration", "Topic :: Utilities"], "generator": "bdist_wheel (0.24.0)", "keywords": ["CPAN", "PyPI", "distutils", "eggs", "package", "management"], "test_requires": [{"requires": ["setuptools[ssl]", "pytest"]}]} \ No newline at end of file diff --git a/lib/python3.4/site-packages/setuptools-5.5.1.dist-info/top_level.txt b/lib/python3.4/site-packages/setuptools-5.5.1.dist-info/top_level.txt new file mode 100644 index 0000000..3263277 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools-5.5.1.dist-info/top_level.txt @@ -0,0 +1,4 @@ +easy_install +pkg_resources +setuptools +_markerlib diff --git a/lib/python3.4/site-packages/setuptools-5.5.1.dist-info/zip-safe b/lib/python3.4/site-packages/setuptools-5.5.1.dist-info/zip-safe new file mode 100644 index 0000000..d3f5a12 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools-5.5.1.dist-info/zip-safe @@ -0,0 +1 @@ + diff --git a/lib/python3.4/site-packages/setuptools/__init__.py b/lib/python3.4/site-packages/setuptools/__init__.py new file mode 100644 index 0000000..d99ab2a --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/__init__.py @@ -0,0 +1,154 @@ +"""Extensions to the 'distutils' for large or complex distributions""" + +import os +import sys +import distutils.core +import distutils.filelist +from distutils.core import Command as _Command +from distutils.util import convert_path +from fnmatch import fnmatchcase + +import setuptools.version +from setuptools.extension import Extension +from setuptools.dist import Distribution, Feature, _get_unpatched +from setuptools.depends import Require +from setuptools.compat import filterfalse + +__all__ = [ + 'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require', + 'find_packages' +] + +__version__ = setuptools.version.__version__ + +bootstrap_install_from = None + +# If we run 2to3 on .py files, should we also convert docstrings? +# Default: yes; assume that we can detect doctests reliably +run_2to3_on_doctests = True +# Standard package names for fixer packages +lib2to3_fixer_packages = ['lib2to3.fixes'] + + +class PackageFinder(object): + @classmethod + def find(cls, where='.', exclude=(), include=('*',)): + """Return a list all Python packages found within directory 'where' + + 'where' should be supplied as a "cross-platform" (i.e. URL-style) + path; it will be converted to the appropriate local path syntax. + 'exclude' is a sequence of package names to exclude; '*' can be used + as a wildcard in the names, such that 'foo.*' will exclude all + subpackages of 'foo' (but not 'foo' itself). + + 'include' is a sequence of package names to include. If it's + specified, only the named packages will be included. If it's not + specified, all found packages will be included. 'include' can contain + shell style wildcard patterns just like 'exclude'. + + The list of included packages is built up first and then any + explicitly excluded packages are removed from it. + """ + out = cls._find_packages_iter(convert_path(where)) + out = cls.require_parents(out) + includes = cls._build_filter(*include) + excludes = cls._build_filter('ez_setup', '*__pycache__', *exclude) + out = filter(includes, out) + out = filterfalse(excludes, out) + return list(out) + + @staticmethod + def require_parents(packages): + """ + Exclude any apparent package that apparently doesn't include its + parent. + + For example, exclude 'foo.bar' if 'foo' is not present. + """ + found = [] + for pkg in packages: + base, sep, child = pkg.rpartition('.') + if base and base not in found: + continue + found.append(pkg) + yield pkg + + @staticmethod + def _all_dirs(base_path): + """ + Return all dirs in base_path, relative to base_path + """ + for root, dirs, files in os.walk(base_path, followlinks=True): + for dir in dirs: + yield os.path.relpath(os.path.join(root, dir), base_path) + + @classmethod + def _find_packages_iter(cls, base_path): + dirs = cls._all_dirs(base_path) + suitable = filterfalse(lambda n: '.' in n, dirs) + return ( + path.replace(os.path.sep, '.') + for path in suitable + if cls._looks_like_package(os.path.join(base_path, path)) + ) + + @staticmethod + def _looks_like_package(path): + return os.path.isfile(os.path.join(path, '__init__.py')) + + @staticmethod + def _build_filter(*patterns): + """ + Given a list of patterns, return a callable that will be true only if + the input matches one of the patterns. + """ + return lambda name: any(fnmatchcase(name, pat=pat) for pat in patterns) + +class PEP420PackageFinder(PackageFinder): + @staticmethod + def _looks_like_package(path): + return True + +find_packages = PackageFinder.find + +setup = distutils.core.setup + +_Command = _get_unpatched(_Command) + +class Command(_Command): + __doc__ = _Command.__doc__ + + command_consumes_arguments = False + + def __init__(self, dist, **kw): + # Add support for keyword arguments + _Command.__init__(self,dist) + for k,v in kw.items(): + setattr(self,k,v) + + def reinitialize_command(self, command, reinit_subcommands=0, **kw): + cmd = _Command.reinitialize_command(self, command, reinit_subcommands) + for k,v in kw.items(): + setattr(cmd,k,v) # update command with keywords + return cmd + +distutils.core.Command = Command # we can't patch distutils.cmd, alas + +def findall(dir = os.curdir): + """Find all files under 'dir' and return the list of full filenames + (relative to 'dir'). + """ + all_files = [] + for base, dirs, files in os.walk(dir): + if base==os.curdir or base.startswith(os.curdir+os.sep): + base = base[2:] + if base: + files = [os.path.join(base, f) for f in files] + all_files.extend(filter(os.path.isfile, files)) + return all_files + +distutils.filelist.findall = findall # fix findall bug in distutils. + +# sys.dont_write_bytecode was introduced in Python 2.6. +_dont_write_bytecode = getattr(sys, 'dont_write_bytecode', + bool(os.environ.get("PYTHONDONTWRITEBYTECODE"))) diff --git a/lib/python3.4/site-packages/setuptools/archive_util.py b/lib/python3.4/site-packages/setuptools/archive_util.py new file mode 100644 index 0000000..67a67e2 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/archive_util.py @@ -0,0 +1,166 @@ +"""Utilities for extracting common archive formats""" + + +__all__ = [ + "unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter", + "UnrecognizedFormat", "extraction_drivers", "unpack_directory", +] + +import zipfile +import tarfile +import os +import shutil +import posixpath +import contextlib +from pkg_resources import ensure_directory, ContextualZipFile +from distutils.errors import DistutilsError + +class UnrecognizedFormat(DistutilsError): + """Couldn't recognize the archive type""" + +def default_filter(src,dst): + """The default progress/filter callback; returns True for all files""" + return dst + + +def unpack_archive(filename, extract_dir, progress_filter=default_filter, + drivers=None): + """Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat`` + + `progress_filter` is a function taking two arguments: a source path + internal to the archive ('/'-separated), and a filesystem path where it + will be extracted. The callback must return the desired extract path + (which may be the same as the one passed in), or else ``None`` to skip + that file or directory. The callback can thus be used to report on the + progress of the extraction, as well as to filter the items extracted or + alter their extraction paths. + + `drivers`, if supplied, must be a non-empty sequence of functions with the + same signature as this function (minus the `drivers` argument), that raise + ``UnrecognizedFormat`` if they do not support extracting the designated + archive type. The `drivers` are tried in sequence until one is found that + does not raise an error, or until all are exhausted (in which case + ``UnrecognizedFormat`` is raised). If you do not supply a sequence of + drivers, the module's ``extraction_drivers`` constant will be used, which + means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that + order. + """ + for driver in drivers or extraction_drivers: + try: + driver(filename, extract_dir, progress_filter) + except UnrecognizedFormat: + continue + else: + return + else: + raise UnrecognizedFormat( + "Not a recognized archive type: %s" % filename + ) + + +def unpack_directory(filename, extract_dir, progress_filter=default_filter): + """"Unpack" a directory, using the same interface as for archives + + Raises ``UnrecognizedFormat`` if `filename` is not a directory + """ + if not os.path.isdir(filename): + raise UnrecognizedFormat("%s is not a directory" % (filename,)) + + paths = {filename:('',extract_dir)} + for base, dirs, files in os.walk(filename): + src,dst = paths[base] + for d in dirs: + paths[os.path.join(base,d)] = src+d+'/', os.path.join(dst,d) + for f in files: + target = os.path.join(dst,f) + target = progress_filter(src+f, target) + if not target: + continue # skip non-files + ensure_directory(target) + f = os.path.join(base,f) + shutil.copyfile(f, target) + shutil.copystat(f, target) + + +def unpack_zipfile(filename, extract_dir, progress_filter=default_filter): + """Unpack zip `filename` to `extract_dir` + + Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined + by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation + of the `progress_filter` argument. + """ + + if not zipfile.is_zipfile(filename): + raise UnrecognizedFormat("%s is not a zip file" % (filename,)) + + with ContextualZipFile(filename) as z: + for info in z.infolist(): + name = info.filename + + # don't extract absolute paths or ones with .. in them + if name.startswith('/') or '..' in name.split('/'): + continue + + target = os.path.join(extract_dir, *name.split('/')) + target = progress_filter(name, target) + if not target: + continue + if name.endswith('/'): + # directory + ensure_directory(target) + else: + # file + ensure_directory(target) + data = z.read(info.filename) + f = open(target,'wb') + try: + f.write(data) + finally: + f.close() + del data + unix_attributes = info.external_attr >> 16 + if unix_attributes: + os.chmod(target, unix_attributes) + + +def unpack_tarfile(filename, extract_dir, progress_filter=default_filter): + """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` + + Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined + by ``tarfile.open()``). See ``unpack_archive()`` for an explanation + of the `progress_filter` argument. + """ + try: + tarobj = tarfile.open(filename) + except tarfile.TarError: + raise UnrecognizedFormat( + "%s is not a compressed or uncompressed tar file" % (filename,) + ) + with contextlib.closing(tarobj): + tarobj.chown = lambda *args: None # don't do any chowning! + for member in tarobj: + name = member.name + # don't extract absolute paths or ones with .. in them + if not name.startswith('/') and '..' not in name.split('/'): + prelim_dst = os.path.join(extract_dir, *name.split('/')) + + # resolve any links and to extract the link targets as normal files + while member is not None and (member.islnk() or member.issym()): + linkpath = member.linkname + if member.issym(): + linkpath = posixpath.join(posixpath.dirname(member.name), linkpath) + linkpath = posixpath.normpath(linkpath) + member = tarobj._getmember(linkpath) + + if member is not None and (member.isfile() or member.isdir()): + final_dst = progress_filter(name, prelim_dst) + if final_dst: + if final_dst.endswith(os.sep): + final_dst = final_dst[:-1] + try: + tarobj._extract_member(member, final_dst) # XXX Ugh + except tarfile.ExtractError: + pass # chown/chmod/mkfifo/mknode/makedev failed + return True + +extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile diff --git a/lib/python3.4/site-packages/setuptools/command/__init__.py b/lib/python3.4/site-packages/setuptools/command/__init__.py new file mode 100644 index 0000000..f6dbc39 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/command/__init__.py @@ -0,0 +1,18 @@ +__all__ = [ + 'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop', + 'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts', + 'sdist', 'setopt', 'test', 'install_egg_info', 'install_scripts', + 'register', 'bdist_wininst', 'upload_docs', +] + +from distutils.command.bdist import bdist +import sys + +from setuptools.command import install_scripts + + +if 'egg' not in bdist.format_commands: + bdist.format_command['egg'] = ('bdist_egg', "Python .egg file") + bdist.format_commands.append('egg') + +del bdist, sys diff --git a/lib/python3.4/site-packages/setuptools/command/alias.py b/lib/python3.4/site-packages/setuptools/command/alias.py new file mode 100644 index 0000000..452a924 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/command/alias.py @@ -0,0 +1,78 @@ +from distutils.errors import DistutilsOptionError + +from setuptools.command.setopt import edit_config, option_base, config_file + + +def shquote(arg): + """Quote an argument for later parsing by shlex.split()""" + for c in '"', "'", "\\", "#": + if c in arg: + return repr(arg) + if arg.split() != [arg]: + return repr(arg) + return arg + + +class alias(option_base): + """Define a shortcut that invokes one or more commands""" + + description = "define a shortcut to invoke one or more commands" + command_consumes_arguments = True + + user_options = [ + ('remove', 'r', 'remove (unset) the alias'), + ] + option_base.user_options + + boolean_options = option_base.boolean_options + ['remove'] + + def initialize_options(self): + option_base.initialize_options(self) + self.args = None + self.remove = None + + def finalize_options(self): + option_base.finalize_options(self) + if self.remove and len(self.args) != 1: + raise DistutilsOptionError( + "Must specify exactly one argument (the alias name) when " + "using --remove" + ) + + def run(self): + aliases = self.distribution.get_option_dict('aliases') + + if not self.args: + print("Command Aliases") + print("---------------") + for alias in aliases: + print("setup.py alias", format_alias(alias, aliases)) + return + + elif len(self.args) == 1: + alias, = self.args + if self.remove: + command = None + elif alias in aliases: + print("setup.py alias", format_alias(alias, aliases)) + return + else: + print("No alias definition found for %r" % alias) + return + else: + alias = self.args[0] + command = ' '.join(map(shquote, self.args[1:])) + + edit_config(self.filename, {'aliases': {alias: command}}, self.dry_run) + + +def format_alias(name, aliases): + source, command = aliases[name] + if source == config_file('global'): + source = '--global-config ' + elif source == config_file('user'): + source = '--user-config ' + elif source == config_file('local'): + source = '' + else: + source = '--filename=%r' % source + return source + name + ' ' + command diff --git a/lib/python3.4/site-packages/setuptools/command/bdist_egg.py b/lib/python3.4/site-packages/setuptools/command/bdist_egg.py new file mode 100644 index 0000000..34fdeec --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/command/bdist_egg.py @@ -0,0 +1,479 @@ +"""setuptools.command.bdist_egg + +Build .egg distributions""" + +# This module should be kept compatible with Python 2.3 +from distutils.errors import DistutilsSetupError +from distutils.dir_util import remove_tree, mkpath +from distutils import log +from types import CodeType +import sys +import os +import marshal +import textwrap + +from pkg_resources import get_build_platform, Distribution, ensure_directory +from pkg_resources import EntryPoint +from setuptools.compat import basestring +from setuptools.extension import Library +from setuptools import Command + +try: + # Python 2.7 or >=3.2 + from sysconfig import get_path, get_python_version + + def _get_purelib(): + return get_path("purelib") +except ImportError: + from distutils.sysconfig import get_python_lib, get_python_version + + def _get_purelib(): + return get_python_lib(False) + + +def strip_module(filename): + if '.' in filename: + filename = os.path.splitext(filename)[0] + if filename.endswith('module'): + filename = filename[:-6] + return filename + + +def write_stub(resource, pyfile): + _stub_template = textwrap.dedent(""" + def __bootstrap__(): + global __bootstrap__, __loader__, __file__ + import sys, pkg_resources, imp + __file__ = pkg_resources.resource_filename(__name__, %r) + __loader__ = None; del __bootstrap__, __loader__ + imp.load_dynamic(__name__,__file__) + __bootstrap__() + """).lstrip() + with open(pyfile, 'w') as f: + f.write(_stub_template % resource) + + +class bdist_egg(Command): + description = "create an \"egg\" distribution" + + user_options = [ + ('bdist-dir=', 'b', + "temporary directory for creating the distribution"), + ('plat-name=', 'p', "platform name to embed in generated filenames " + "(default: %s)" % get_build_platform()), + ('exclude-source-files', None, + "remove all .py files from the generated egg"), + ('keep-temp', 'k', + "keep the pseudo-installation tree around after " + + "creating the distribution archive"), + ('dist-dir=', 'd', + "directory to put final built distributions in"), + ('skip-build', None, + "skip rebuilding everything (for testing/debugging)"), + ] + + boolean_options = [ + 'keep-temp', 'skip-build', 'exclude-source-files' + ] + + def initialize_options(self): + self.bdist_dir = None + self.plat_name = None + self.keep_temp = 0 + self.dist_dir = None + self.skip_build = 0 + self.egg_output = None + self.exclude_source_files = None + + def finalize_options(self): + ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info") + self.egg_info = ei_cmd.egg_info + + if self.bdist_dir is None: + bdist_base = self.get_finalized_command('bdist').bdist_base + self.bdist_dir = os.path.join(bdist_base, 'egg') + + if self.plat_name is None: + self.plat_name = get_build_platform() + + self.set_undefined_options('bdist', ('dist_dir', 'dist_dir')) + + if self.egg_output is None: + + # Compute filename of the output egg + basename = Distribution( + None, None, ei_cmd.egg_name, ei_cmd.egg_version, + get_python_version(), + self.distribution.has_ext_modules() and self.plat_name + ).egg_name() + + self.egg_output = os.path.join(self.dist_dir, basename + '.egg') + + def do_install_data(self): + # Hack for packages that install data to install's --install-lib + self.get_finalized_command('install').install_lib = self.bdist_dir + + site_packages = os.path.normcase(os.path.realpath(_get_purelib())) + old, self.distribution.data_files = self.distribution.data_files, [] + + for item in old: + if isinstance(item, tuple) and len(item) == 2: + if os.path.isabs(item[0]): + realpath = os.path.realpath(item[0]) + normalized = os.path.normcase(realpath) + if normalized == site_packages or normalized.startswith( + site_packages + os.sep + ): + item = realpath[len(site_packages) + 1:], item[1] + # XXX else: raise ??? + self.distribution.data_files.append(item) + + try: + log.info("installing package data to %s" % self.bdist_dir) + self.call_command('install_data', force=0, root=None) + finally: + self.distribution.data_files = old + + def get_outputs(self): + return [self.egg_output] + + def call_command(self, cmdname, **kw): + """Invoke reinitialized command `cmdname` with keyword args""" + for dirname in INSTALL_DIRECTORY_ATTRS: + kw.setdefault(dirname, self.bdist_dir) + kw.setdefault('skip_build', self.skip_build) + kw.setdefault('dry_run', self.dry_run) + cmd = self.reinitialize_command(cmdname, **kw) + self.run_command(cmdname) + return cmd + + def run(self): + # Generate metadata first + self.run_command("egg_info") + # We run install_lib before install_data, because some data hacks + # pull their data path from the install_lib command. + log.info("installing library code to %s" % self.bdist_dir) + instcmd = self.get_finalized_command('install') + old_root = instcmd.root + instcmd.root = None + if self.distribution.has_c_libraries() and not self.skip_build: + self.run_command('build_clib') + cmd = self.call_command('install_lib', warn_dir=0) + instcmd.root = old_root + + all_outputs, ext_outputs = self.get_ext_outputs() + self.stubs = [] + to_compile = [] + for (p, ext_name) in enumerate(ext_outputs): + filename, ext = os.path.splitext(ext_name) + pyfile = os.path.join(self.bdist_dir, strip_module(filename) + + '.py') + self.stubs.append(pyfile) + log.info("creating stub loader for %s" % ext_name) + if not self.dry_run: + write_stub(os.path.basename(ext_name), pyfile) + to_compile.append(pyfile) + ext_outputs[p] = ext_name.replace(os.sep, '/') + + if to_compile: + cmd.byte_compile(to_compile) + if self.distribution.data_files: + self.do_install_data() + + # Make the EGG-INFO directory + archive_root = self.bdist_dir + egg_info = os.path.join(archive_root, 'EGG-INFO') + self.mkpath(egg_info) + if self.distribution.scripts: + script_dir = os.path.join(egg_info, 'scripts') + log.info("installing scripts to %s" % script_dir) + self.call_command('install_scripts', install_dir=script_dir, + no_ep=1) + + self.copy_metadata_to(egg_info) + native_libs = os.path.join(egg_info, "native_libs.txt") + if all_outputs: + log.info("writing %s" % native_libs) + if not self.dry_run: + ensure_directory(native_libs) + libs_file = open(native_libs, 'wt') + libs_file.write('\n'.join(all_outputs)) + libs_file.write('\n') + libs_file.close() + elif os.path.isfile(native_libs): + log.info("removing %s" % native_libs) + if not self.dry_run: + os.unlink(native_libs) + + write_safety_flag( + os.path.join(archive_root, 'EGG-INFO'), self.zip_safe() + ) + + if os.path.exists(os.path.join(self.egg_info, 'depends.txt')): + log.warn( + "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n" + "Use the install_requires/extras_require setup() args instead." + ) + + if self.exclude_source_files: + self.zap_pyfiles() + + # Make the archive + make_zipfile(self.egg_output, archive_root, verbose=self.verbose, + dry_run=self.dry_run, mode=self.gen_header()) + if not self.keep_temp: + remove_tree(self.bdist_dir, dry_run=self.dry_run) + + # Add to 'Distribution.dist_files' so that the "upload" command works + getattr(self.distribution, 'dist_files', []).append( + ('bdist_egg', get_python_version(), self.egg_output)) + + def zap_pyfiles(self): + log.info("Removing .py files from temporary directory") + for base, dirs, files in walk_egg(self.bdist_dir): + for name in files: + if name.endswith('.py'): + path = os.path.join(base, name) + log.debug("Deleting %s", path) + os.unlink(path) + + def zip_safe(self): + safe = getattr(self.distribution, 'zip_safe', None) + if safe is not None: + return safe + log.warn("zip_safe flag not set; analyzing archive contents...") + return analyze_egg(self.bdist_dir, self.stubs) + + def gen_header(self): + epm = EntryPoint.parse_map(self.distribution.entry_points or '') + ep = epm.get('setuptools.installation', {}).get('eggsecutable') + if ep is None: + return 'w' # not an eggsecutable, do it the usual way. + + if not ep.attrs or ep.extras: + raise DistutilsSetupError( + "eggsecutable entry point (%r) cannot have 'extras' " + "or refer to a module" % (ep,) + ) + + pyver = sys.version[:3] + pkg = ep.module_name + full = '.'.join(ep.attrs) + base = ep.attrs[0] + basename = os.path.basename(self.egg_output) + + header = ( + "#!/bin/sh\n" + 'if [ `basename $0` = "%(basename)s" ]\n' + 'then exec python%(pyver)s -c "' + "import sys, os; sys.path.insert(0, os.path.abspath('$0')); " + "from %(pkg)s import %(base)s; sys.exit(%(full)s())" + '" "$@"\n' + 'else\n' + ' echo $0 is not the correct name for this egg file.\n' + ' echo Please rename it back to %(basename)s and try again.\n' + ' exec false\n' + 'fi\n' + ) % locals() + + if not self.dry_run: + mkpath(os.path.dirname(self.egg_output), dry_run=self.dry_run) + f = open(self.egg_output, 'w') + f.write(header) + f.close() + return 'a' + + def copy_metadata_to(self, target_dir): + "Copy metadata (egg info) to the target_dir" + # normalize the path (so that a forward-slash in egg_info will + # match using startswith below) + norm_egg_info = os.path.normpath(self.egg_info) + prefix = os.path.join(norm_egg_info, '') + for path in self.ei_cmd.filelist.files: + if path.startswith(prefix): + target = os.path.join(target_dir, path[len(prefix):]) + ensure_directory(target) + self.copy_file(path, target) + + def get_ext_outputs(self): + """Get a list of relative paths to C extensions in the output distro""" + + all_outputs = [] + ext_outputs = [] + + paths = {self.bdist_dir: ''} + for base, dirs, files in os.walk(self.bdist_dir): + for filename in files: + if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS: + all_outputs.append(paths[base] + filename) + for filename in dirs: + paths[os.path.join(base, filename)] = (paths[base] + + filename + '/') + + if self.distribution.has_ext_modules(): + build_cmd = self.get_finalized_command('build_ext') + for ext in build_cmd.extensions: + if isinstance(ext, Library): + continue + fullname = build_cmd.get_ext_fullname(ext.name) + filename = build_cmd.get_ext_filename(fullname) + if not os.path.basename(filename).startswith('dl-'): + if os.path.exists(os.path.join(self.bdist_dir, filename)): + ext_outputs.append(filename) + + return all_outputs, ext_outputs + + +NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split()) + + +def walk_egg(egg_dir): + """Walk an unpacked egg's contents, skipping the metadata directory""" + walker = os.walk(egg_dir) + base, dirs, files = next(walker) + if 'EGG-INFO' in dirs: + dirs.remove('EGG-INFO') + yield base, dirs, files + for bdf in walker: + yield bdf + + +def analyze_egg(egg_dir, stubs): + # check for existing flag in EGG-INFO + for flag, fn in safety_flags.items(): + if os.path.exists(os.path.join(egg_dir, 'EGG-INFO', fn)): + return flag + if not can_scan(): + return False + safe = True + for base, dirs, files in walk_egg(egg_dir): + for name in files: + if name.endswith('.py') or name.endswith('.pyw'): + continue + elif name.endswith('.pyc') or name.endswith('.pyo'): + # always scan, even if we already know we're not safe + safe = scan_module(egg_dir, base, name, stubs) and safe + return safe + + +def write_safety_flag(egg_dir, safe): + # Write or remove zip safety flag file(s) + for flag, fn in safety_flags.items(): + fn = os.path.join(egg_dir, fn) + if os.path.exists(fn): + if safe is None or bool(safe) != flag: + os.unlink(fn) + elif safe is not None and bool(safe) == flag: + f = open(fn, 'wt') + f.write('\n') + f.close() + + +safety_flags = { + True: 'zip-safe', + False: 'not-zip-safe', +} + + +def scan_module(egg_dir, base, name, stubs): + """Check whether module possibly uses unsafe-for-zipfile stuff""" + + filename = os.path.join(base, name) + if filename[:-1] in stubs: + return True # Extension module + pkg = base[len(egg_dir) + 1:].replace(os.sep, '.') + module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0] + if sys.version_info < (3, 3): + skip = 8 # skip magic & date + else: + skip = 12 # skip magic & date & file size + f = open(filename, 'rb') + f.read(skip) + code = marshal.load(f) + f.close() + safe = True + symbols = dict.fromkeys(iter_symbols(code)) + for bad in ['__file__', '__path__']: + if bad in symbols: + log.warn("%s: module references %s", module, bad) + safe = False + if 'inspect' in symbols: + for bad in [ + 'getsource', 'getabsfile', 'getsourcefile', 'getfile' + 'getsourcelines', 'findsource', 'getcomments', 'getframeinfo', + 'getinnerframes', 'getouterframes', 'stack', 'trace' + ]: + if bad in symbols: + log.warn("%s: module MAY be using inspect.%s", module, bad) + safe = False + if '__name__' in symbols and '__main__' in symbols and '.' not in module: + if sys.version[:3] == "2.4": # -m works w/zipfiles in 2.5 + log.warn("%s: top-level module may be 'python -m' script", module) + safe = False + return safe + + +def iter_symbols(code): + """Yield names and strings used by `code` and its nested code objects""" + for name in code.co_names: + yield name + for const in code.co_consts: + if isinstance(const, basestring): + yield const + elif isinstance(const, CodeType): + for name in iter_symbols(const): + yield name + + +def can_scan(): + if not sys.platform.startswith('java') and sys.platform != 'cli': + # CPython, PyPy, etc. + return True + log.warn("Unable to analyze compiled code on this platform.") + log.warn("Please ask the author to include a 'zip_safe'" + " setting (either True or False) in the package's setup.py") + +# Attribute names of options for commands that might need to be convinced to +# install to the egg build directory + +INSTALL_DIRECTORY_ATTRS = [ + 'install_lib', 'install_dir', 'install_data', 'install_base' +] + + +def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None, + mode='w'): + """Create a zip file from all the files under 'base_dir'. The output + zip file will be named 'base_dir' + ".zip". Uses either the "zipfile" + Python module (if available) or the InfoZIP "zip" utility (if installed + and found on the default search path). If neither tool is available, + raises DistutilsExecError. Returns the name of the output zip file. + """ + import zipfile + + mkpath(os.path.dirname(zip_filename), dry_run=dry_run) + log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir) + + def visit(z, dirname, names): + for name in names: + path = os.path.normpath(os.path.join(dirname, name)) + if os.path.isfile(path): + p = path[len(base_dir) + 1:] + if not dry_run: + z.write(path, p) + log.debug("adding '%s'" % p) + + if compress is None: + # avoid 2.3 zipimport bug when 64 bits + compress = (sys.version >= "2.4") + + compression = [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED][bool(compress)] + if not dry_run: + z = zipfile.ZipFile(zip_filename, mode, compression=compression) + for dirname, dirs, files in os.walk(base_dir): + visit(z, dirname, files) + z.close() + else: + for dirname, dirs, files in os.walk(base_dir): + visit(None, dirname, files) + return zip_filename diff --git a/lib/python3.4/site-packages/setuptools/command/bdist_rpm.py b/lib/python3.4/site-packages/setuptools/command/bdist_rpm.py new file mode 100644 index 0000000..7073092 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/command/bdist_rpm.py @@ -0,0 +1,43 @@ +import distutils.command.bdist_rpm as orig + + +class bdist_rpm(orig.bdist_rpm): + """ + Override the default bdist_rpm behavior to do the following: + + 1. Run egg_info to ensure the name and version are properly calculated. + 2. Always run 'install' using --single-version-externally-managed to + disable eggs in RPM distributions. + 3. Replace dash with underscore in the version numbers for better RPM + compatibility. + """ + + def run(self): + # ensure distro name is up-to-date + self.run_command('egg_info') + + orig.bdist_rpm.run(self) + + def _make_spec_file(self): + version = self.distribution.get_version() + rpmversion = version.replace('-', '_') + spec = orig.bdist_rpm._make_spec_file(self) + line23 = '%define version ' + version + line24 = '%define version ' + rpmversion + spec = [ + line.replace( + "Source0: %{name}-%{version}.tar", + "Source0: %{name}-%{unmangled_version}.tar" + ).replace( + "setup.py install ", + "setup.py install --single-version-externally-managed " + ).replace( + "%setup", + "%setup -n %{name}-%{unmangled_version}" + ).replace(line23, line24) + for line in spec + ] + insert_loc = spec.index(line24) + 1 + unmangled_version = "%define unmangled_version " + version + spec.insert(insert_loc, unmangled_version) + return spec diff --git a/lib/python3.4/site-packages/setuptools/command/bdist_wininst.py b/lib/python3.4/site-packages/setuptools/command/bdist_wininst.py new file mode 100644 index 0000000..073de97 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/command/bdist_wininst.py @@ -0,0 +1,21 @@ +import distutils.command.bdist_wininst as orig + + +class bdist_wininst(orig.bdist_wininst): + def reinitialize_command(self, command, reinit_subcommands=0): + """ + Supplement reinitialize_command to work around + http://bugs.python.org/issue20819 + """ + cmd = self.distribution.reinitialize_command( + command, reinit_subcommands) + if command in ('install', 'install_lib'): + cmd.install_lib = None + return cmd + + def run(self): + self._is_running = True + try: + orig.bdist_wininst.run(self) + finally: + self._is_running = False diff --git a/lib/python3.4/site-packages/setuptools/command/build_ext.py b/lib/python3.4/site-packages/setuptools/command/build_ext.py new file mode 100644 index 0000000..53bf9cd --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/command/build_ext.py @@ -0,0 +1,305 @@ +from distutils.command.build_ext import build_ext as _du_build_ext +from distutils.file_util import copy_file +from distutils.ccompiler import new_compiler +from distutils.sysconfig import customize_compiler +from distutils.errors import DistutilsError +from distutils import log +import os +import sys + +from setuptools.extension import Library + +try: + # Attempt to use Pyrex for building extensions, if available + from Pyrex.Distutils.build_ext import build_ext as _build_ext +except ImportError: + _build_ext = _du_build_ext + +try: + # Python 2.7 or >=3.2 + from sysconfig import _CONFIG_VARS +except ImportError: + from distutils.sysconfig import get_config_var + + get_config_var("LDSHARED") # make sure _config_vars is initialized + del get_config_var + from distutils.sysconfig import _config_vars as _CONFIG_VARS + +have_rtld = False +use_stubs = False +libtype = 'shared' + +if sys.platform == "darwin": + use_stubs = True +elif os.name != 'nt': + try: + from dl import RTLD_NOW + + have_rtld = True + use_stubs = True + except ImportError: + pass + + +def if_dl(s): + if have_rtld: + return s + return '' + + +class build_ext(_build_ext): + def run(self): + """Build extensions in build directory, then copy if --inplace""" + old_inplace, self.inplace = self.inplace, 0 + _build_ext.run(self) + self.inplace = old_inplace + if old_inplace: + self.copy_extensions_to_source() + + def copy_extensions_to_source(self): + build_py = self.get_finalized_command('build_py') + for ext in self.extensions: + fullname = self.get_ext_fullname(ext.name) + filename = self.get_ext_filename(fullname) + modpath = fullname.split('.') + package = '.'.join(modpath[:-1]) + package_dir = build_py.get_package_dir(package) + dest_filename = os.path.join(package_dir, + os.path.basename(filename)) + src_filename = os.path.join(self.build_lib, filename) + + # Always copy, even if source is older than destination, to ensure + # that the right extensions for the current Python/platform are + # used. + copy_file( + src_filename, dest_filename, verbose=self.verbose, + dry_run=self.dry_run + ) + if ext._needs_stub: + self.write_stub(package_dir or os.curdir, ext, True) + + if _build_ext is not _du_build_ext and not hasattr(_build_ext, + 'pyrex_sources'): + # Workaround for problems using some Pyrex versions w/SWIG and/or 2.4 + def swig_sources(self, sources, *otherargs): + # first do any Pyrex processing + sources = _build_ext.swig_sources(self, sources) or sources + # Then do any actual SWIG stuff on the remainder + return _du_build_ext.swig_sources(self, sources, *otherargs) + + def get_ext_filename(self, fullname): + filename = _build_ext.get_ext_filename(self, fullname) + if fullname in self.ext_map: + ext = self.ext_map[fullname] + if isinstance(ext, Library): + fn, ext = os.path.splitext(filename) + return self.shlib_compiler.library_filename(fn, libtype) + elif use_stubs and ext._links_to_dynamic: + d, fn = os.path.split(filename) + return os.path.join(d, 'dl-' + fn) + return filename + + def initialize_options(self): + _build_ext.initialize_options(self) + self.shlib_compiler = None + self.shlibs = [] + self.ext_map = {} + + def finalize_options(self): + _build_ext.finalize_options(self) + self.extensions = self.extensions or [] + self.check_extensions_list(self.extensions) + self.shlibs = [ext for ext in self.extensions + if isinstance(ext, Library)] + if self.shlibs: + self.setup_shlib_compiler() + for ext in self.extensions: + ext._full_name = self.get_ext_fullname(ext.name) + for ext in self.extensions: + fullname = ext._full_name + self.ext_map[fullname] = ext + + # distutils 3.1 will also ask for module names + # XXX what to do with conflicts? + self.ext_map[fullname.split('.')[-1]] = ext + + ltd = ext._links_to_dynamic = \ + self.shlibs and self.links_to_dynamic(ext) or False + ext._needs_stub = ltd and use_stubs and not isinstance(ext, + Library) + filename = ext._file_name = self.get_ext_filename(fullname) + libdir = os.path.dirname(os.path.join(self.build_lib, filename)) + if ltd and libdir not in ext.library_dirs: + ext.library_dirs.append(libdir) + if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs: + ext.runtime_library_dirs.append(os.curdir) + + def setup_shlib_compiler(self): + compiler = self.shlib_compiler = new_compiler( + compiler=self.compiler, dry_run=self.dry_run, force=self.force + ) + if sys.platform == "darwin": + tmp = _CONFIG_VARS.copy() + try: + # XXX Help! I don't have any idea whether these are right... + _CONFIG_VARS['LDSHARED'] = ( + "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup") + _CONFIG_VARS['CCSHARED'] = " -dynamiclib" + _CONFIG_VARS['SO'] = ".dylib" + customize_compiler(compiler) + finally: + _CONFIG_VARS.clear() + _CONFIG_VARS.update(tmp) + else: + customize_compiler(compiler) + + if self.include_dirs is not None: + compiler.set_include_dirs(self.include_dirs) + if self.define is not None: + # 'define' option is a list of (name,value) tuples + for (name, value) in self.define: + compiler.define_macro(name, value) + if self.undef is not None: + for macro in self.undef: + compiler.undefine_macro(macro) + if self.libraries is not None: + compiler.set_libraries(self.libraries) + if self.library_dirs is not None: + compiler.set_library_dirs(self.library_dirs) + if self.rpath is not None: + compiler.set_runtime_library_dirs(self.rpath) + if self.link_objects is not None: + compiler.set_link_objects(self.link_objects) + + # hack so distutils' build_extension() builds a library instead + compiler.link_shared_object = link_shared_object.__get__(compiler) + + def get_export_symbols(self, ext): + if isinstance(ext, Library): + return ext.export_symbols + return _build_ext.get_export_symbols(self, ext) + + def build_extension(self, ext): + _compiler = self.compiler + try: + if isinstance(ext, Library): + self.compiler = self.shlib_compiler + _build_ext.build_extension(self, ext) + if ext._needs_stub: + self.write_stub( + self.get_finalized_command('build_py').build_lib, ext + ) + finally: + self.compiler = _compiler + + def links_to_dynamic(self, ext): + """Return true if 'ext' links to a dynamic lib in the same package""" + # XXX this should check to ensure the lib is actually being built + # XXX as dynamic, and not just using a locally-found version or a + # XXX static-compiled version + libnames = dict.fromkeys([lib._full_name for lib in self.shlibs]) + pkg = '.'.join(ext._full_name.split('.')[:-1] + ['']) + for libname in ext.libraries: + if pkg + libname in libnames: + return True + return False + + def get_outputs(self): + outputs = _build_ext.get_outputs(self) + optimize = self.get_finalized_command('build_py').optimize + for ext in self.extensions: + if ext._needs_stub: + base = os.path.join(self.build_lib, *ext._full_name.split('.')) + outputs.append(base + '.py') + outputs.append(base + '.pyc') + if optimize: + outputs.append(base + '.pyo') + return outputs + + def write_stub(self, output_dir, ext, compile=False): + log.info("writing stub loader for %s to %s", ext._full_name, + output_dir) + stub_file = (os.path.join(output_dir, *ext._full_name.split('.')) + + '.py') + if compile and os.path.exists(stub_file): + raise DistutilsError(stub_file + " already exists! Please delete.") + if not self.dry_run: + f = open(stub_file, 'w') + f.write( + '\n'.join([ + "def __bootstrap__():", + " global __bootstrap__, __file__, __loader__", + " import sys, os, pkg_resources, imp" + if_dl(", dl"), + " __file__ = pkg_resources.resource_filename" + "(__name__,%r)" + % os.path.basename(ext._file_name), + " del __bootstrap__", + " if '__loader__' in globals():", + " del __loader__", + if_dl(" old_flags = sys.getdlopenflags()"), + " old_dir = os.getcwd()", + " try:", + " os.chdir(os.path.dirname(__file__))", + if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"), + " imp.load_dynamic(__name__,__file__)", + " finally:", + if_dl(" sys.setdlopenflags(old_flags)"), + " os.chdir(old_dir)", + "__bootstrap__()", + "" # terminal \n + ]) + ) + f.close() + if compile: + from distutils.util import byte_compile + + byte_compile([stub_file], optimize=0, + force=True, dry_run=self.dry_run) + optimize = self.get_finalized_command('install_lib').optimize + if optimize > 0: + byte_compile([stub_file], optimize=optimize, + force=True, dry_run=self.dry_run) + if os.path.exists(stub_file) and not self.dry_run: + os.unlink(stub_file) + + +if use_stubs or os.name == 'nt': + # Build shared libraries + # + def link_shared_object( + self, objects, output_libname, output_dir=None, libraries=None, + library_dirs=None, runtime_library_dirs=None, export_symbols=None, + debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, + target_lang=None): + self.link( + self.SHARED_LIBRARY, objects, output_libname, + output_dir, libraries, library_dirs, runtime_library_dirs, + export_symbols, debug, extra_preargs, extra_postargs, + build_temp, target_lang + ) +else: + # Build static libraries everywhere else + libtype = 'static' + + def link_shared_object( + self, objects, output_libname, output_dir=None, libraries=None, + library_dirs=None, runtime_library_dirs=None, export_symbols=None, + debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, + target_lang=None): + # XXX we need to either disallow these attrs on Library instances, + # or warn/abort here if set, or something... + # libraries=None, library_dirs=None, runtime_library_dirs=None, + # export_symbols=None, extra_preargs=None, extra_postargs=None, + # build_temp=None + + assert output_dir is None # distutils build_ext doesn't pass this + output_dir, filename = os.path.split(output_libname) + basename, ext = os.path.splitext(filename) + if self.library_filename("x").startswith('lib'): + # strip 'lib' prefix; this is kludgy if some platform uses + # a different prefix + basename = basename[3:] + + self.create_static_lib( + objects, basename, output_dir, debug, target_lang + ) diff --git a/lib/python3.4/site-packages/setuptools/command/build_py.py b/lib/python3.4/site-packages/setuptools/command/build_py.py new file mode 100644 index 0000000..9808069 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/command/build_py.py @@ -0,0 +1,230 @@ +from glob import glob +from distutils.util import convert_path +import distutils.command.build_py as orig +import os +import sys +import fnmatch +import textwrap + +try: + from setuptools.lib2to3_ex import Mixin2to3 +except ImportError: + class Mixin2to3: + def run_2to3(self, files, doctests=True): + "do nothing" + + +class build_py(orig.build_py, Mixin2to3): + """Enhanced 'build_py' command that includes data files with packages + + The data files are specified via a 'package_data' argument to 'setup()'. + See 'setuptools.dist.Distribution' for more details. + + Also, this version of the 'build_py' command allows you to specify both + 'py_modules' and 'packages' in the same setup operation. + """ + + def finalize_options(self): + orig.build_py.finalize_options(self) + self.package_data = self.distribution.package_data + self.exclude_package_data = (self.distribution.exclude_package_data or + {}) + if 'data_files' in self.__dict__: + del self.__dict__['data_files'] + self.__updated_files = [] + self.__doctests_2to3 = [] + + def run(self): + """Build modules, packages, and copy data files to build directory""" + if not self.py_modules and not self.packages: + return + + if self.py_modules: + self.build_modules() + + if self.packages: + self.build_packages() + self.build_package_data() + + self.run_2to3(self.__updated_files, False) + self.run_2to3(self.__updated_files, True) + self.run_2to3(self.__doctests_2to3, True) + + # Only compile actual .py files, using our base class' idea of what our + # output files are. + self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0)) + + def __getattr__(self, attr): + if attr == 'data_files': # lazily compute data files + self.data_files = files = self._get_data_files() + return files + return orig.build_py.__getattr__(self, attr) + + def build_module(self, module, module_file, package): + outfile, copied = orig.build_py.build_module(self, module, module_file, + package) + if copied: + self.__updated_files.append(outfile) + return outfile, copied + + def _get_data_files(self): + """Generate list of '(package,src_dir,build_dir,filenames)' tuples""" + self.analyze_manifest() + data = [] + for package in self.packages or (): + # Locate package source directory + src_dir = self.get_package_dir(package) + + # Compute package build directory + build_dir = os.path.join(*([self.build_lib] + package.split('.'))) + + # Length of path to strip from found files + plen = len(src_dir) + 1 + + # Strip directory from globbed filenames + filenames = [ + file[plen:] for file in self.find_data_files(package, src_dir) + ] + data.append((package, src_dir, build_dir, filenames)) + return data + + def find_data_files(self, package, src_dir): + """Return filenames for package's data files in 'src_dir'""" + globs = (self.package_data.get('', []) + + self.package_data.get(package, [])) + files = self.manifest_files.get(package, [])[:] + for pattern in globs: + # Each pattern has to be converted to a platform-specific path + files.extend(glob(os.path.join(src_dir, convert_path(pattern)))) + return self.exclude_data_files(package, src_dir, files) + + def build_package_data(self): + """Copy data files into build directory""" + for package, src_dir, build_dir, filenames in self.data_files: + for filename in filenames: + target = os.path.join(build_dir, filename) + self.mkpath(os.path.dirname(target)) + srcfile = os.path.join(src_dir, filename) + outf, copied = self.copy_file(srcfile, target) + srcfile = os.path.abspath(srcfile) + if (copied and + srcfile in self.distribution.convert_2to3_doctests): + self.__doctests_2to3.append(outf) + + def analyze_manifest(self): + self.manifest_files = mf = {} + if not self.distribution.include_package_data: + return + src_dirs = {} + for package in self.packages or (): + # Locate package source directory + src_dirs[assert_relative(self.get_package_dir(package))] = package + + self.run_command('egg_info') + ei_cmd = self.get_finalized_command('egg_info') + for path in ei_cmd.filelist.files: + d, f = os.path.split(assert_relative(path)) + prev = None + oldf = f + while d and d != prev and d not in src_dirs: + prev = d + d, df = os.path.split(d) + f = os.path.join(df, f) + if d in src_dirs: + if path.endswith('.py') and f == oldf: + continue # it's a module, not data + mf.setdefault(src_dirs[d], []).append(path) + + def get_data_files(self): + pass # kludge 2.4 for lazy computation + + if sys.version < "2.4": # Python 2.4 already has this code + def get_outputs(self, include_bytecode=1): + """Return complete list of files copied to the build directory + + This includes both '.py' files and data files, as well as '.pyc' + and '.pyo' files if 'include_bytecode' is true. (This method is + needed for the 'install_lib' command to do its job properly, and to + generate a correct installation manifest.) + """ + return orig.build_py.get_outputs(self, include_bytecode) + [ + os.path.join(build_dir, filename) + for package, src_dir, build_dir, filenames in self.data_files + for filename in filenames + ] + + def check_package(self, package, package_dir): + """Check namespace packages' __init__ for declare_namespace""" + try: + return self.packages_checked[package] + except KeyError: + pass + + init_py = orig.build_py.check_package(self, package, package_dir) + self.packages_checked[package] = init_py + + if not init_py or not self.distribution.namespace_packages: + return init_py + + for pkg in self.distribution.namespace_packages: + if pkg == package or pkg.startswith(package + '.'): + break + else: + return init_py + + f = open(init_py, 'rbU') + if 'declare_namespace'.encode() not in f.read(): + from distutils.errors import DistutilsError + + raise DistutilsError( + "Namespace package problem: %s is a namespace package, but " + "its\n__init__.py does not call declare_namespace()! Please " + 'fix it.\n(See the setuptools manual under ' + '"Namespace Packages" for details.)\n"' % (package,) + ) + f.close() + return init_py + + def initialize_options(self): + self.packages_checked = {} + orig.build_py.initialize_options(self) + + def get_package_dir(self, package): + res = orig.build_py.get_package_dir(self, package) + if self.distribution.src_root is not None: + return os.path.join(self.distribution.src_root, res) + return res + + def exclude_data_files(self, package, src_dir, files): + """Filter filenames for package's data files in 'src_dir'""" + globs = (self.exclude_package_data.get('', []) + + self.exclude_package_data.get(package, [])) + bad = [] + for pattern in globs: + bad.extend( + fnmatch.filter( + files, os.path.join(src_dir, convert_path(pattern)) + ) + ) + bad = dict.fromkeys(bad) + seen = {} + return [ + f for f in files if f not in bad + and f not in seen and seen.setdefault(f, 1) # ditch dupes + ] + + +def assert_relative(path): + if not os.path.isabs(path): + return path + from distutils.errors import DistutilsSetupError + + msg = textwrap.dedent(""" + Error: setup script specifies an absolute path: + + %s + + setup() arguments must *always* be /-separated paths relative to the + setup.py directory, *never* absolute paths. + """).lstrip() % path + raise DistutilsSetupError(msg) diff --git a/lib/python3.4/site-packages/setuptools/command/develop.py b/lib/python3.4/site-packages/setuptools/command/develop.py new file mode 100644 index 0000000..368b64f --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/command/develop.py @@ -0,0 +1,169 @@ +from distutils.util import convert_path +from distutils import log +from distutils.errors import DistutilsError, DistutilsOptionError +import os +import glob + +from pkg_resources import Distribution, PathMetadata, normalize_path +from setuptools.command.easy_install import easy_install +from setuptools.compat import PY3 +import setuptools + + +class develop(easy_install): + """Set up package for development""" + + description = "install package in 'development mode'" + + user_options = easy_install.user_options + [ + ("uninstall", "u", "Uninstall this source package"), + ("egg-path=", None, "Set the path to be used in the .egg-link file"), + ] + + boolean_options = easy_install.boolean_options + ['uninstall'] + + command_consumes_arguments = False # override base + + def run(self): + if self.uninstall: + self.multi_version = True + self.uninstall_link() + else: + self.install_for_development() + self.warn_deprecated_options() + + def initialize_options(self): + self.uninstall = None + self.egg_path = None + easy_install.initialize_options(self) + self.setup_path = None + self.always_copy_from = '.' # always copy eggs installed in curdir + + def finalize_options(self): + ei = self.get_finalized_command("egg_info") + if ei.broken_egg_info: + template = "Please rename %r to %r before using 'develop'" + args = ei.egg_info, ei.broken_egg_info + raise DistutilsError(template % args) + self.args = [ei.egg_name] + + easy_install.finalize_options(self) + self.expand_basedirs() + self.expand_dirs() + # pick up setup-dir .egg files only: no .egg-info + self.package_index.scan(glob.glob('*.egg')) + + self.egg_link = os.path.join(self.install_dir, ei.egg_name + + '.egg-link') + self.egg_base = ei.egg_base + if self.egg_path is None: + self.egg_path = os.path.abspath(ei.egg_base) + + target = normalize_path(self.egg_base) + egg_path = normalize_path(os.path.join(self.install_dir, + self.egg_path)) + if egg_path != target: + raise DistutilsOptionError( + "--egg-path must be a relative path from the install" + " directory to " + target + ) + + # Make a distribution for the package's source + self.dist = Distribution( + target, + PathMetadata(target, os.path.abspath(ei.egg_info)), + project_name=ei.egg_name + ) + + p = self.egg_base.replace(os.sep, '/') + if p != os.curdir: + p = '../' * (p.count('/') + 1) + self.setup_path = p + p = normalize_path(os.path.join(self.install_dir, self.egg_path, p)) + if p != normalize_path(os.curdir): + raise DistutilsOptionError( + "Can't get a consistent path to setup script from" + " installation directory", p, normalize_path(os.curdir)) + + def install_for_development(self): + if PY3 and getattr(self.distribution, 'use_2to3', False): + # If we run 2to3 we can not do this inplace: + + # Ensure metadata is up-to-date + self.reinitialize_command('build_py', inplace=0) + self.run_command('build_py') + bpy_cmd = self.get_finalized_command("build_py") + build_path = normalize_path(bpy_cmd.build_lib) + + # Build extensions + self.reinitialize_command('egg_info', egg_base=build_path) + self.run_command('egg_info') + + self.reinitialize_command('build_ext', inplace=0) + self.run_command('build_ext') + + # Fixup egg-link and easy-install.pth + ei_cmd = self.get_finalized_command("egg_info") + self.egg_path = build_path + self.dist.location = build_path + # XXX + self.dist._provider = PathMetadata(build_path, ei_cmd.egg_info) + else: + # Without 2to3 inplace works fine: + self.run_command('egg_info') + + # Build extensions in-place + self.reinitialize_command('build_ext', inplace=1) + self.run_command('build_ext') + + self.install_site_py() # ensure that target dir is site-safe + if setuptools.bootstrap_install_from: + self.easy_install(setuptools.bootstrap_install_from) + setuptools.bootstrap_install_from = None + + # create an .egg-link in the installation dir, pointing to our egg + log.info("Creating %s (link to %s)", self.egg_link, self.egg_base) + if not self.dry_run: + f = open(self.egg_link, "w") + f.write(self.egg_path + "\n" + self.setup_path) + f.close() + # postprocess the installed distro, fixing up .pth, installing scripts, + # and handling requirements + self.process_distribution(None, self.dist, not self.no_deps) + + def uninstall_link(self): + if os.path.exists(self.egg_link): + log.info("Removing %s (link to %s)", self.egg_link, self.egg_base) + egg_link_file = open(self.egg_link) + contents = [line.rstrip() for line in egg_link_file] + egg_link_file.close() + if contents not in ([self.egg_path], + [self.egg_path, self.setup_path]): + log.warn("Link points to %s: uninstall aborted", contents) + return + if not self.dry_run: + os.unlink(self.egg_link) + if not self.dry_run: + self.update_pth(self.dist) # remove any .pth link to us + if self.distribution.scripts: + # XXX should also check for entry point scripts! + log.warn("Note: you must uninstall or replace scripts manually!") + + def install_egg_scripts(self, dist): + if dist is not self.dist: + # Installing a dependency, so fall back to normal behavior + return easy_install.install_egg_scripts(self, dist) + + # create wrapper scripts in the script dir, pointing to dist.scripts + + # new-style... + self.install_wrapper_scripts(dist) + + # ...and old-style + for script_name in self.distribution.scripts or []: + script_path = os.path.abspath(convert_path(script_name)) + script_name = os.path.basename(script_path) + f = open(script_path, 'rU') + script_text = f.read() + f.close() + self.install_script(dist, script_name, script_text, script_path) diff --git a/lib/python3.4/site-packages/setuptools/command/easy_install.py b/lib/python3.4/site-packages/setuptools/command/easy_install.py new file mode 100644 index 0000000..5c3d4d3 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/command/easy_install.py @@ -0,0 +1,2204 @@ +#!/usr/bin/env python + +""" +Easy Install +------------ + +A tool for doing automatic download/extract/build of distutils-based Python +packages. For detailed documentation, see the accompanying EasyInstall.txt +file, or visit the `EasyInstall home page`__. + +__ https://pythonhosted.org/setuptools/easy_install.html + +""" + +from glob import glob +from distutils.util import get_platform +from distutils.util import convert_path, subst_vars +from distutils.errors import DistutilsArgError, DistutilsOptionError, \ + DistutilsError, DistutilsPlatformError +from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS +from distutils import log, dir_util +from distutils.command.build_scripts import first_line_re +import sys +import os +import zipimport +import shutil +import tempfile +import zipfile +import re +import stat +import random +import platform +import textwrap +import warnings +import site +import struct + +from setuptools import Command, _dont_write_bytecode +from setuptools.sandbox import run_setup +from setuptools.py31compat import get_path, get_config_vars +from setuptools.command import setopt +from setuptools.archive_util import unpack_archive +from setuptools.package_index import PackageIndex +from setuptools.package_index import URL_SCHEME +from setuptools.command import bdist_egg, egg_info +from setuptools.compat import (iteritems, maxsize, basestring, unicode, + reraise, PY2, PY3) +from pkg_resources import ( + yield_lines, normalize_path, resource_string, ensure_directory, + get_distribution, find_distributions, Environment, Requirement, + Distribution, PathMetadata, EggMetadata, WorkingSet, DistributionNotFound, + VersionConflict, DEVELOP_DIST, +) +import pkg_resources + + +sys_executable = os.environ.get('__PYVENV_LAUNCHER__', + os.path.normpath(sys.executable)) + +__all__ = [ + 'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg', + 'main', 'get_exe_prefixes', +] + + +def is_64bit(): + return struct.calcsize("P") == 8 + + +def samefile(p1, p2): + both_exist = os.path.exists(p1) and os.path.exists(p2) + use_samefile = hasattr(os.path, 'samefile') and both_exist + if use_samefile: + return os.path.samefile(p1, p2) + norm_p1 = os.path.normpath(os.path.normcase(p1)) + norm_p2 = os.path.normpath(os.path.normcase(p2)) + return norm_p1 == norm_p2 + + +if PY2: + def _to_ascii(s): + return s + + def isascii(s): + try: + unicode(s, 'ascii') + return True + except UnicodeError: + return False +else: + def _to_ascii(s): + return s.encode('ascii') + + def isascii(s): + try: + s.encode('ascii') + return True + except UnicodeError: + return False + + +class easy_install(Command): + """Manage a download/build/install process""" + description = "Find/get/install Python packages" + command_consumes_arguments = True + + user_options = [ + ('prefix=', None, "installation prefix"), + ("zip-ok", "z", "install package as a zipfile"), + ("multi-version", "m", "make apps have to require() a version"), + ("upgrade", "U", "force upgrade (searches PyPI for latest versions)"), + ("install-dir=", "d", "install package to DIR"), + ("script-dir=", "s", "install scripts to DIR"), + ("exclude-scripts", "x", "Don't install scripts"), + ("always-copy", "a", "Copy all needed packages to install dir"), + ("index-url=", "i", "base URL of Python Package Index"), + ("find-links=", "f", "additional URL(s) to search for packages"), + ("build-directory=", "b", + "download/extract/build in DIR; keep the results"), + ('optimize=', 'O', + "also compile with optimization: -O1 for \"python -O\", " + "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), + ('record=', None, + "filename in which to record list of installed files"), + ('always-unzip', 'Z', "don't install as a zipfile, no matter what"), + ('site-dirs=', 'S', "list of directories where .pth files work"), + ('editable', 'e', "Install specified packages in editable form"), + ('no-deps', 'N', "don't install dependencies"), + ('allow-hosts=', 'H', "pattern(s) that hostnames must match"), + ('local-snapshots-ok', 'l', + "allow building eggs from local checkouts"), + ('version', None, "print version information and exit"), + ('install-layout=', None, "installation layout to choose (known values: deb)"), + ('force-installation-into-system-dir', '0', "force installation into /usr"), + ('no-find-links', None, + "Don't load find-links defined in packages being installed") + ] + boolean_options = [ + 'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy', + 'editable', + 'no-deps', 'local-snapshots-ok', 'version', 'force-installation-into-system-dir' + ] + + if site.ENABLE_USER_SITE: + help_msg = "install in user site-package '%s'" % site.USER_SITE + user_options.append(('user', None, help_msg)) + boolean_options.append('user') + + negative_opt = {'always-unzip': 'zip-ok'} + create_index = PackageIndex + + def initialize_options(self): + if site.ENABLE_USER_SITE: + whereami = os.path.abspath(__file__) + self.user = whereami.startswith(site.USER_SITE) + else: + self.user = 0 + + self.zip_ok = self.local_snapshots_ok = None + self.install_dir = self.script_dir = self.exclude_scripts = None + self.index_url = None + self.find_links = None + self.build_directory = None + self.args = None + self.optimize = self.record = None + self.upgrade = self.always_copy = self.multi_version = None + self.editable = self.no_deps = self.allow_hosts = None + self.root = self.prefix = self.no_report = None + self.version = None + self.install_purelib = None # for pure module distributions + self.install_platlib = None # non-pure (dists w/ extensions) + self.install_headers = None # for C/C++ headers + self.install_lib = None # set to either purelib or platlib + self.install_scripts = None + self.install_data = None + self.install_base = None + self.install_platbase = None + if site.ENABLE_USER_SITE: + self.install_userbase = site.USER_BASE + self.install_usersite = site.USER_SITE + else: + self.install_userbase = None + self.install_usersite = None + self.no_find_links = None + + # Options not specifiable via command line + self.package_index = None + self.pth_file = self.always_copy_from = None + self.site_dirs = None + self.installed_projects = {} + self.sitepy_installed = False + # enable custom installation, known values: deb + self.install_layout = None + self.force_installation_into_system_dir = None + self.multiarch = None + + # Always read easy_install options, even if we are subclassed, or have + # an independent instance created. This ensures that defaults will + # always come from the standard configuration file(s)' "easy_install" + # section, even if this is a "develop" or "install" command, or some + # other embedding. + self._dry_run = None + self.verbose = self.distribution.verbose + self.distribution._set_command_options( + self, self.distribution.get_option_dict('easy_install') + ) + + def delete_blockers(self, blockers): + for filename in blockers: + if os.path.exists(filename) or os.path.islink(filename): + log.info("Deleting %s", filename) + if not self.dry_run: + if (os.path.isdir(filename) and + not os.path.islink(filename)): + rmtree(filename) + else: + os.unlink(filename) + + def finalize_options(self): + if self.version: + print('setuptools %s' % get_distribution('setuptools').version) + sys.exit() + + py_version = sys.version.split()[0] + prefix, exec_prefix = get_config_vars('prefix', 'exec_prefix') + + self.config_vars = { + 'dist_name': self.distribution.get_name(), + 'dist_version': self.distribution.get_version(), + 'dist_fullname': self.distribution.get_fullname(), + 'py_version': py_version, + 'py_version_short': py_version[0:3], + 'py_version_nodot': py_version[0] + py_version[2], + 'sys_prefix': prefix, + 'prefix': prefix, + 'sys_exec_prefix': exec_prefix, + 'exec_prefix': exec_prefix, + # Only python 3.2+ has abiflags + 'abiflags': getattr(sys, 'abiflags', ''), + } + + if site.ENABLE_USER_SITE: + self.config_vars['userbase'] = self.install_userbase + self.config_vars['usersite'] = self.install_usersite + + # fix the install_dir if "--user" was used + # XXX: duplicate of the code in the setup command + if self.user and site.ENABLE_USER_SITE: + self.create_home_path() + if self.install_userbase is None: + raise DistutilsPlatformError( + "User base directory is not specified") + self.install_base = self.install_platbase = self.install_userbase + if os.name == 'posix': + self.select_scheme("unix_user") + else: + self.select_scheme(os.name + "_user") + + self.expand_basedirs() + self.expand_dirs() + + if self.install_layout: + if not self.install_layout.lower() in ['deb']: + raise DistutilsOptionError("unknown value for --install-layout") + import sysconfig + if sys.version_info[:2] >= (3, 3): + self.multiarch = sysconfig.get_config_var('MULTIARCH') + + self._expand('install_dir', 'script_dir', 'build_directory', + 'site_dirs') + # If a non-default installation directory was specified, default the + # script directory to match it. + if self.script_dir is None: + self.script_dir = self.install_dir + + if self.no_find_links is None: + self.no_find_links = False + + # Let install_dir get set by install_lib command, which in turn + # gets its info from the install command, and takes into account + # --prefix and --home and all that other crud. + self.set_undefined_options( + 'install_lib', ('install_dir', 'install_dir') + ) + # Likewise, set default script_dir from 'install_scripts.install_dir' + self.set_undefined_options( + 'install_scripts', ('install_dir', 'script_dir') + ) + + if self.user and self.install_purelib: + self.install_dir = self.install_purelib + self.script_dir = self.install_scripts + + if self.prefix == '/usr' and not self.force_installation_into_system_dir: + raise DistutilsOptionError("""installation into /usr + +Trying to install into the system managed parts of the file system. Please +consider to install to another location, or use the option +--force-installation-into-system-dir to overwrite this warning. +""") + + # default --record from the install command + self.set_undefined_options('install', ('record', 'record')) + # Should this be moved to the if statement below? It's not used + # elsewhere + normpath = map(normalize_path, sys.path) + self.all_site_dirs = get_site_dirs() + if self.site_dirs is not None: + site_dirs = [ + os.path.expanduser(s.strip()) for s in + self.site_dirs.split(',') + ] + for d in site_dirs: + if not os.path.isdir(d): + log.warn("%s (in --site-dirs) does not exist", d) + elif normalize_path(d) not in normpath: + raise DistutilsOptionError( + d + " (in --site-dirs) is not on sys.path" + ) + else: + self.all_site_dirs.append(normalize_path(d)) + if not self.editable: + self.check_site_dir() + self.index_url = self.index_url or "https://pypi.python.org/simple" + self.shadow_path = self.all_site_dirs[:] + for path_item in self.install_dir, normalize_path(self.script_dir): + if path_item not in self.shadow_path: + self.shadow_path.insert(0, path_item) + + if self.allow_hosts is not None: + hosts = [s.strip() for s in self.allow_hosts.split(',')] + else: + hosts = ['*'] + if self.package_index is None: + self.package_index = self.create_index( + self.index_url, search_path=self.shadow_path, hosts=hosts, + ) + self.local_index = Environment(self.shadow_path + sys.path) + + if self.find_links is not None: + if isinstance(self.find_links, basestring): + self.find_links = self.find_links.split() + else: + self.find_links = [] + if self.local_snapshots_ok: + self.package_index.scan_egg_links(self.shadow_path + sys.path) + if not self.no_find_links: + self.package_index.add_find_links(self.find_links) + self.set_undefined_options('install_lib', ('optimize', 'optimize')) + if not isinstance(self.optimize, int): + try: + self.optimize = int(self.optimize) + if not (0 <= self.optimize <= 2): + raise ValueError + except ValueError: + raise DistutilsOptionError("--optimize must be 0, 1, or 2") + + if self.editable and not self.build_directory: + raise DistutilsArgError( + "Must specify a build directory (-b) when using --editable" + ) + if not self.args: + raise DistutilsArgError( + "No urls, filenames, or requirements specified (see --help)") + + self.outputs = [] + + def _expand_attrs(self, attrs): + for attr in attrs: + val = getattr(self, attr) + if val is not None: + if os.name == 'posix' or os.name == 'nt': + val = os.path.expanduser(val) + val = subst_vars(val, self.config_vars) + setattr(self, attr, val) + + def expand_basedirs(self): + """Calls `os.path.expanduser` on install_base, install_platbase and + root.""" + self._expand_attrs(['install_base', 'install_platbase', 'root']) + + def expand_dirs(self): + """Calls `os.path.expanduser` on install dirs.""" + self._expand_attrs(['install_purelib', 'install_platlib', + 'install_lib', 'install_headers', + 'install_scripts', 'install_data', ]) + + def run(self): + if self.verbose != self.distribution.verbose: + log.set_verbosity(self.verbose) + try: + for spec in self.args: + self.easy_install(spec, not self.no_deps) + if self.record: + outputs = self.outputs + if self.root: # strip any package prefix + root_len = len(self.root) + for counter in range(len(outputs)): + outputs[counter] = outputs[counter][root_len:] + from distutils import file_util + + self.execute( + file_util.write_file, (self.record, outputs), + "writing list of installed files to '%s'" % + self.record + ) + self.warn_deprecated_options() + finally: + log.set_verbosity(self.distribution.verbose) + + def pseudo_tempname(self): + """Return a pseudo-tempname base in the install directory. + This code is intentionally naive; if a malicious party can write to + the target directory you're already in deep doodoo. + """ + try: + pid = os.getpid() + except: + pid = random.randint(0, maxsize) + return os.path.join(self.install_dir, "test-easy-install-%s" % pid) + + def warn_deprecated_options(self): + pass + + def check_site_dir(self): + """Verify that self.install_dir is .pth-capable dir, if needed""" + + instdir = normalize_path(self.install_dir) + pth_file = os.path.join(instdir, 'easy-install.pth') + + # Is it a configured, PYTHONPATH, implicit, or explicit site dir? + is_site_dir = instdir in self.all_site_dirs + + if not is_site_dir and not self.multi_version: + # No? Then directly test whether it does .pth file processing + is_site_dir = self.check_pth_processing() + else: + # make sure we can write to target dir + testfile = self.pseudo_tempname() + '.write-test' + test_exists = os.path.exists(testfile) + try: + if test_exists: + os.unlink(testfile) + open(testfile, 'w').close() + os.unlink(testfile) + except (OSError, IOError): + self.cant_write_to_target() + + if not is_site_dir and not self.multi_version: + # Can't install non-multi to non-site dir + raise DistutilsError(self.no_default_version_msg()) + + if is_site_dir: + if self.pth_file is None: + self.pth_file = PthDistributions(pth_file, self.all_site_dirs) + else: + self.pth_file = None + + PYTHONPATH = os.environ.get('PYTHONPATH', '').split(os.pathsep) + if instdir not in map(normalize_path, [_f for _f in PYTHONPATH if _f]): + # only PYTHONPATH dirs need a site.py, so pretend it's there + self.sitepy_installed = True + elif self.multi_version and not os.path.exists(pth_file): + self.sitepy_installed = True # don't need site.py in this case + self.pth_file = None # and don't create a .pth file + self.install_dir = instdir + + def cant_write_to_target(self): + template = """can't create or remove files in install directory + +The following error occurred while trying to add or remove files in the +installation directory: + + %s + +The installation directory you specified (via --install-dir, --prefix, or +the distutils default setting) was: + + %s +""" + msg = template % (sys.exc_info()[1], self.install_dir,) + + if not os.path.exists(self.install_dir): + msg += """ +This directory does not currently exist. Please create it and try again, or +choose a different installation directory (using the -d or --install-dir +option). +""" + else: + msg += """ +Perhaps your account does not have write access to this directory? If the +installation directory is a system-owned directory, you may need to sign in +as the administrator or "root" account. If you do not have administrative +access to this machine, you may wish to choose a different installation +directory, preferably one that is listed in your PYTHONPATH environment +variable. + +For information on other options, you may wish to consult the +documentation at: + + https://pythonhosted.org/setuptools/easy_install.html + +Please make the appropriate changes for your system and try again. +""" + raise DistutilsError(msg) + + def check_pth_processing(self): + """Empirically verify whether .pth files are supported in inst. dir""" + instdir = self.install_dir + log.info("Checking .pth file support in %s", instdir) + pth_file = self.pseudo_tempname() + ".pth" + ok_file = pth_file + '.ok' + ok_exists = os.path.exists(ok_file) + try: + if ok_exists: + os.unlink(ok_file) + dirname = os.path.dirname(ok_file) + if not os.path.exists(dirname): + os.makedirs(dirname) + f = open(pth_file, 'w') + except (OSError, IOError): + self.cant_write_to_target() + else: + try: + f.write("import os; f = open(%r, 'w'); f.write('OK'); " + "f.close()\n" % (ok_file,)) + f.close() + f = None + executable = sys.executable + if os.name == 'nt': + dirname, basename = os.path.split(executable) + alt = os.path.join(dirname, 'pythonw.exe') + if (basename.lower() == 'python.exe' and + os.path.exists(alt)): + # use pythonw.exe to avoid opening a console window + executable = alt + + from distutils.spawn import spawn + + spawn([executable, '-E', '-c', 'pass'], 0) + + if os.path.exists(ok_file): + log.info( + "TEST PASSED: %s appears to support .pth files", + instdir + ) + return True + finally: + if f: + f.close() + if os.path.exists(ok_file): + os.unlink(ok_file) + if os.path.exists(pth_file): + os.unlink(pth_file) + if not self.multi_version: + log.warn("TEST FAILED: %s does NOT support .pth files", instdir) + return False + + def install_egg_scripts(self, dist): + """Write all the scripts for `dist`, unless scripts are excluded""" + if not self.exclude_scripts and dist.metadata_isdir('scripts'): + for script_name in dist.metadata_listdir('scripts'): + if dist.metadata_isdir('scripts/' + script_name): + # The "script" is a directory, likely a Python 3 + # __pycache__ directory, so skip it. + continue + self.install_script( + dist, script_name, + dist.get_metadata('scripts/' + script_name) + ) + self.install_wrapper_scripts(dist) + + def add_output(self, path): + if os.path.isdir(path): + for base, dirs, files in os.walk(path): + for filename in files: + self.outputs.append(os.path.join(base, filename)) + else: + self.outputs.append(path) + + def not_editable(self, spec): + if self.editable: + raise DistutilsArgError( + "Invalid argument %r: you can't use filenames or URLs " + "with --editable (except via the --find-links option)." + % (spec,) + ) + + def check_editable(self, spec): + if not self.editable: + return + + if os.path.exists(os.path.join(self.build_directory, spec.key)): + raise DistutilsArgError( + "%r already exists in %s; can't do a checkout there" % + (spec.key, self.build_directory) + ) + + def easy_install(self, spec, deps=False): + tmpdir = tempfile.mkdtemp(prefix="easy_install-") + download = None + if not self.editable: + self.install_site_py() + + try: + if not isinstance(spec, Requirement): + if URL_SCHEME(spec): + # It's a url, download it to tmpdir and process + self.not_editable(spec) + download = self.package_index.download(spec, tmpdir) + return self.install_item(None, download, tmpdir, deps, + True) + + elif os.path.exists(spec): + # Existing file or directory, just process it directly + self.not_editable(spec) + return self.install_item(None, spec, tmpdir, deps, True) + else: + spec = parse_requirement_arg(spec) + + self.check_editable(spec) + dist = self.package_index.fetch_distribution( + spec, tmpdir, self.upgrade, self.editable, + not self.always_copy, self.local_index + ) + if dist is None: + msg = "Could not find suitable distribution for %r" % spec + if self.always_copy: + msg += " (--always-copy skips system and development eggs)" + raise DistutilsError(msg) + elif dist.precedence == DEVELOP_DIST: + # .egg-info dists don't need installing, just process deps + self.process_distribution(spec, dist, deps, "Using") + return dist + else: + return self.install_item(spec, dist.location, tmpdir, deps) + + finally: + if os.path.exists(tmpdir): + rmtree(tmpdir) + + def install_item(self, spec, download, tmpdir, deps, install_needed=False): + + # Installation is also needed if file in tmpdir or is not an egg + install_needed = install_needed or self.always_copy + install_needed = install_needed or os.path.dirname(download) == tmpdir + install_needed = install_needed or not download.endswith('.egg') + install_needed = install_needed or ( + self.always_copy_from is not None and + os.path.dirname(normalize_path(download)) == + normalize_path(self.always_copy_from) + ) + + if spec and not install_needed: + # at this point, we know it's a local .egg, we just don't know if + # it's already installed. + for dist in self.local_index[spec.project_name]: + if dist.location == download: + break + else: + install_needed = True # it's not in the local index + + log.info("Processing %s", os.path.basename(download)) + + if install_needed: + dists = self.install_eggs(spec, download, tmpdir) + for dist in dists: + self.process_distribution(spec, dist, deps) + else: + dists = [self.egg_distribution(download)] + self.process_distribution(spec, dists[0], deps, "Using") + + if spec is not None: + for dist in dists: + if dist in spec: + return dist + + def select_scheme(self, name): + """Sets the install directories by applying the install schemes.""" + # it's the caller's problem if they supply a bad name! + scheme = INSTALL_SCHEMES[name] + for key in SCHEME_KEYS: + attrname = 'install_' + key + if getattr(self, attrname) is None: + setattr(self, attrname, scheme[key]) + + def process_distribution(self, requirement, dist, deps=True, *info): + self.update_pth(dist) + self.package_index.add(dist) + if dist in self.local_index[dist.key]: + self.local_index.remove(dist) + self.local_index.add(dist) + self.install_egg_scripts(dist) + self.installed_projects[dist.key] = dist + log.info(self.installation_report(requirement, dist, *info)) + if (dist.has_metadata('dependency_links.txt') and + not self.no_find_links): + self.package_index.add_find_links( + dist.get_metadata_lines('dependency_links.txt') + ) + if not deps and not self.always_copy: + return + elif requirement is not None and dist.key != requirement.key: + log.warn("Skipping dependencies for %s", dist) + return # XXX this is not the distribution we were looking for + elif requirement is None or dist not in requirement: + # if we wound up with a different version, resolve what we've got + distreq = dist.as_requirement() + requirement = requirement or distreq + requirement = Requirement( + distreq.project_name, distreq.specs, requirement.extras + ) + log.info("Processing dependencies for %s", requirement) + try: + distros = WorkingSet([]).resolve( + [requirement], self.local_index, self.easy_install + ) + except DistributionNotFound: + e = sys.exc_info()[1] + raise DistutilsError( + "Could not find required distribution %s" % e.args + ) + except VersionConflict: + e = sys.exc_info()[1] + raise DistutilsError( + "Installed distribution %s conflicts with requirement %s" + % e.args + ) + if self.always_copy or self.always_copy_from: + # Force all the relevant distros to be copied or activated + for dist in distros: + if dist.key not in self.installed_projects: + self.easy_install(dist.as_requirement()) + log.info("Finished processing dependencies for %s", requirement) + + def should_unzip(self, dist): + if self.zip_ok is not None: + return not self.zip_ok + if dist.has_metadata('not-zip-safe'): + return True + if not dist.has_metadata('zip-safe'): + return True + return False + + def maybe_move(self, spec, dist_filename, setup_base): + dst = os.path.join(self.build_directory, spec.key) + if os.path.exists(dst): + msg = ("%r already exists in %s; build directory %s will not be " + "kept") + log.warn(msg, spec.key, self.build_directory, setup_base) + return setup_base + if os.path.isdir(dist_filename): + setup_base = dist_filename + else: + if os.path.dirname(dist_filename) == setup_base: + os.unlink(dist_filename) # get it out of the tmp dir + contents = os.listdir(setup_base) + if len(contents) == 1: + dist_filename = os.path.join(setup_base, contents[0]) + if os.path.isdir(dist_filename): + # if the only thing there is a directory, move it instead + setup_base = dist_filename + ensure_directory(dst) + shutil.move(setup_base, dst) + return dst + + def install_wrapper_scripts(self, dist): + if not self.exclude_scripts: + for args in get_script_args(dist): + self.write_script(*args) + + def install_script(self, dist, script_name, script_text, dev_path=None): + """Generate a legacy script wrapper and install it""" + spec = str(dist.as_requirement()) + is_script = is_python_script(script_text, script_name) + + if is_script: + script_text = (get_script_header(script_text) + + self._load_template(dev_path) % locals()) + self.write_script(script_name, _to_ascii(script_text), 'b') + + @staticmethod + def _load_template(dev_path): + """ + There are a couple of template scripts in the package. This + function loads one of them and prepares it for use. + """ + # See https://bitbucket.org/pypa/setuptools/issue/134 for info + # on script file naming and downstream issues with SVR4 + name = 'script.tmpl' + if dev_path: + name = name.replace('.tmpl', ' (dev).tmpl') + + raw_bytes = resource_string('setuptools', name) + return raw_bytes.decode('utf-8') + + def write_script(self, script_name, contents, mode="t", blockers=()): + """Write an executable file to the scripts directory""" + self.delete_blockers( # clean up old .py/.pyw w/o a script + [os.path.join(self.script_dir, x) for x in blockers] + ) + log.info("Installing %s script to %s", script_name, self.script_dir) + target = os.path.join(self.script_dir, script_name) + self.add_output(target) + + mask = current_umask() + if not self.dry_run: + ensure_directory(target) + if os.path.exists(target): + os.unlink(target) + f = open(target, "w" + mode) + f.write(contents) + f.close() + chmod(target, 0o777 - mask) + + def install_eggs(self, spec, dist_filename, tmpdir): + # .egg dirs or files are already built, so just return them + if dist_filename.lower().endswith('.egg'): + return [self.install_egg(dist_filename, tmpdir)] + elif dist_filename.lower().endswith('.exe'): + return [self.install_exe(dist_filename, tmpdir)] + + # Anything else, try to extract and build + setup_base = tmpdir + if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'): + unpack_archive(dist_filename, tmpdir, self.unpack_progress) + elif os.path.isdir(dist_filename): + setup_base = os.path.abspath(dist_filename) + + if (setup_base.startswith(tmpdir) # something we downloaded + and self.build_directory and spec is not None): + setup_base = self.maybe_move(spec, dist_filename, setup_base) + + # Find the setup.py file + setup_script = os.path.join(setup_base, 'setup.py') + + if not os.path.exists(setup_script): + setups = glob(os.path.join(setup_base, '*', 'setup.py')) + if not setups: + raise DistutilsError( + "Couldn't find a setup script in %s" % + os.path.abspath(dist_filename) + ) + if len(setups) > 1: + raise DistutilsError( + "Multiple setup scripts in %s" % + os.path.abspath(dist_filename) + ) + setup_script = setups[0] + + # Now run it, and return the result + if self.editable: + log.info(self.report_editable(spec, setup_script)) + return [] + else: + return self.build_and_install(setup_script, setup_base) + + def egg_distribution(self, egg_path): + if os.path.isdir(egg_path): + metadata = PathMetadata(egg_path, os.path.join(egg_path, + 'EGG-INFO')) + else: + metadata = EggMetadata(zipimport.zipimporter(egg_path)) + return Distribution.from_filename(egg_path, metadata=metadata) + + def install_egg(self, egg_path, tmpdir): + destination = os.path.join(self.install_dir, + os.path.basename(egg_path)) + destination = os.path.abspath(destination) + if not self.dry_run: + ensure_directory(destination) + + dist = self.egg_distribution(egg_path) + if not samefile(egg_path, destination): + if os.path.isdir(destination) and not os.path.islink(destination): + dir_util.remove_tree(destination, dry_run=self.dry_run) + elif os.path.exists(destination): + self.execute(os.unlink, (destination,), "Removing " + + destination) + try: + new_dist_is_zipped = False + if os.path.isdir(egg_path): + if egg_path.startswith(tmpdir): + f, m = shutil.move, "Moving" + else: + f, m = shutil.copytree, "Copying" + elif self.should_unzip(dist): + self.mkpath(destination) + f, m = self.unpack_and_compile, "Extracting" + else: + new_dist_is_zipped = True + if egg_path.startswith(tmpdir): + f, m = shutil.move, "Moving" + else: + f, m = shutil.copy2, "Copying" + self.execute(f, (egg_path, destination), + (m + " %s to %s") % + (os.path.basename(egg_path), + os.path.dirname(destination))) + update_dist_caches(destination, + fix_zipimporter_caches=new_dist_is_zipped) + except: + update_dist_caches(destination, fix_zipimporter_caches=False) + raise + + self.add_output(destination) + return self.egg_distribution(destination) + + def install_exe(self, dist_filename, tmpdir): + # See if it's valid, get data + cfg = extract_wininst_cfg(dist_filename) + if cfg is None: + raise DistutilsError( + "%s is not a valid distutils Windows .exe" % dist_filename + ) + # Create a dummy distribution object until we build the real distro + dist = Distribution( + None, + project_name=cfg.get('metadata', 'name'), + version=cfg.get('metadata', 'version'), platform=get_platform(), + ) + + # Convert the .exe to an unpacked egg + egg_path = dist.location = os.path.join(tmpdir, dist.egg_name() + + '.egg') + egg_tmp = egg_path + '.tmp' + _egg_info = os.path.join(egg_tmp, 'EGG-INFO') + pkg_inf = os.path.join(_egg_info, 'PKG-INFO') + ensure_directory(pkg_inf) # make sure EGG-INFO dir exists + dist._provider = PathMetadata(egg_tmp, _egg_info) # XXX + self.exe_to_egg(dist_filename, egg_tmp) + + # Write EGG-INFO/PKG-INFO + if not os.path.exists(pkg_inf): + f = open(pkg_inf, 'w') + f.write('Metadata-Version: 1.0\n') + for k, v in cfg.items('metadata'): + if k != 'target_version': + f.write('%s: %s\n' % (k.replace('_', '-').title(), v)) + f.close() + script_dir = os.path.join(_egg_info, 'scripts') + self.delete_blockers( # delete entry-point scripts to avoid duping + [os.path.join(script_dir, args[0]) for args in + get_script_args(dist)] + ) + # Build .egg file from tmpdir + bdist_egg.make_zipfile( + egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run + ) + # install the .egg + return self.install_egg(egg_path, tmpdir) + + def exe_to_egg(self, dist_filename, egg_tmp): + """Extract a bdist_wininst to the directories an egg would use""" + # Check for .pth file and set up prefix translations + prefixes = get_exe_prefixes(dist_filename) + to_compile = [] + native_libs = [] + top_level = {} + + def process(src, dst): + s = src.lower() + for old, new in prefixes: + if s.startswith(old): + src = new + src[len(old):] + parts = src.split('/') + dst = os.path.join(egg_tmp, *parts) + dl = dst.lower() + if dl.endswith('.pyd') or dl.endswith('.dll'): + parts[-1] = bdist_egg.strip_module(parts[-1]) + top_level[os.path.splitext(parts[0])[0]] = 1 + native_libs.append(src) + elif dl.endswith('.py') and old != 'SCRIPTS/': + top_level[os.path.splitext(parts[0])[0]] = 1 + to_compile.append(dst) + return dst + if not src.endswith('.pth'): + log.warn("WARNING: can't process %s", src) + return None + + # extract, tracking .pyd/.dll->native_libs and .py -> to_compile + unpack_archive(dist_filename, egg_tmp, process) + stubs = [] + for res in native_libs: + if res.lower().endswith('.pyd'): # create stubs for .pyd's + parts = res.split('/') + resource = parts[-1] + parts[-1] = bdist_egg.strip_module(parts[-1]) + '.py' + pyfile = os.path.join(egg_tmp, *parts) + to_compile.append(pyfile) + stubs.append(pyfile) + bdist_egg.write_stub(resource, pyfile) + self.byte_compile(to_compile) # compile .py's + bdist_egg.write_safety_flag( + os.path.join(egg_tmp, 'EGG-INFO'), + bdist_egg.analyze_egg(egg_tmp, stubs)) # write zip-safety flag + + for name in 'top_level', 'native_libs': + if locals()[name]: + txt = os.path.join(egg_tmp, 'EGG-INFO', name + '.txt') + if not os.path.exists(txt): + f = open(txt, 'w') + f.write('\n'.join(locals()[name]) + '\n') + f.close() + + def installation_report(self, req, dist, what="Installed"): + """Helpful installation message for display to package users""" + msg = "\n%(what)s %(eggloc)s%(extras)s" + if self.multi_version and not self.no_report: + msg += """ + +Because this distribution was installed --multi-version, before you can +import modules from this package in an application, you will need to +'import pkg_resources' and then use a 'require()' call similar to one of +these examples, in order to select the desired version: + + pkg_resources.require("%(name)s") # latest installed version + pkg_resources.require("%(name)s==%(version)s") # this exact version + pkg_resources.require("%(name)s>=%(version)s") # this version or higher +""" + if self.install_dir not in map(normalize_path, sys.path): + msg += """ + +Note also that the installation directory must be on sys.path at runtime for +this to work. (e.g. by being the application's script directory, by being on +PYTHONPATH, or by being added to sys.path by your code.) +""" + eggloc = dist.location + name = dist.project_name + version = dist.version + extras = '' # TODO: self.report_extras(req, dist) + return msg % locals() + + def report_editable(self, spec, setup_script): + dirname = os.path.dirname(setup_script) + python = sys.executable + return """\nExtracted editable version of %(spec)s to %(dirname)s + +If it uses setuptools in its setup script, you can activate it in +"development" mode by going to that directory and running:: + + %(python)s setup.py develop + +See the setuptools documentation for the "develop" command for more info. +""" % locals() + + def run_setup(self, setup_script, setup_base, args): + sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg) + sys.modules.setdefault('distutils.command.egg_info', egg_info) + + args = list(args) + if self.verbose > 2: + v = 'v' * (self.verbose - 1) + args.insert(0, '-' + v) + elif self.verbose < 2: + args.insert(0, '-q') + if self.dry_run: + args.insert(0, '-n') + log.info( + "Running %s %s", setup_script[len(setup_base) + 1:], ' '.join(args) + ) + try: + run_setup(setup_script, args) + except SystemExit: + v = sys.exc_info()[1] + raise DistutilsError("Setup script exited with %s" % (v.args[0],)) + + def build_and_install(self, setup_script, setup_base): + args = ['bdist_egg', '--dist-dir'] + + dist_dir = tempfile.mkdtemp( + prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script) + ) + try: + self._set_fetcher_options(os.path.dirname(setup_script)) + args.append(dist_dir) + + self.run_setup(setup_script, setup_base, args) + all_eggs = Environment([dist_dir]) + eggs = [] + for key in all_eggs: + for dist in all_eggs[key]: + eggs.append(self.install_egg(dist.location, setup_base)) + if not eggs and not self.dry_run: + log.warn("No eggs found in %s (setup script problem?)", + dist_dir) + return eggs + finally: + rmtree(dist_dir) + log.set_verbosity(self.verbose) # restore our log verbosity + + def _set_fetcher_options(self, base): + """ + When easy_install is about to run bdist_egg on a source dist, that + source dist might have 'setup_requires' directives, requiring + additional fetching. Ensure the fetcher options given to easy_install + are available to that command as well. + """ + # find the fetch options from easy_install and write them out + # to the setup.cfg file. + ei_opts = self.distribution.get_option_dict('easy_install').copy() + fetch_directives = ( + 'find_links', 'site_dirs', 'index_url', 'optimize', + 'site_dirs', 'allow_hosts', + ) + fetch_options = {} + for key, val in ei_opts.items(): + if key not in fetch_directives: + continue + fetch_options[key.replace('_', '-')] = val[1] + # create a settings dictionary suitable for `edit_config` + settings = dict(easy_install=fetch_options) + cfg_filename = os.path.join(base, 'setup.cfg') + setopt.edit_config(cfg_filename, settings) + + def update_pth(self, dist): + if self.pth_file is None: + return + + for d in self.pth_file[dist.key]: # drop old entries + if self.multi_version or d.location != dist.location: + log.info("Removing %s from easy-install.pth file", d) + self.pth_file.remove(d) + if d.location in self.shadow_path: + self.shadow_path.remove(d.location) + + if not self.multi_version: + if dist.location in self.pth_file.paths: + log.info( + "%s is already the active version in easy-install.pth", + dist + ) + else: + log.info("Adding %s to easy-install.pth file", dist) + self.pth_file.add(dist) # add new entry + if dist.location not in self.shadow_path: + self.shadow_path.append(dist.location) + + if not self.dry_run: + + self.pth_file.save() + + if dist.key == 'setuptools': + # Ensure that setuptools itself never becomes unavailable! + # XXX should this check for latest version? + filename = os.path.join(self.install_dir, 'setuptools.pth') + if os.path.islink(filename): + os.unlink(filename) + f = open(filename, 'wt') + f.write(self.pth_file.make_relative(dist.location) + '\n') + f.close() + + def unpack_progress(self, src, dst): + # Progress filter for unpacking + log.debug("Unpacking %s to %s", src, dst) + return dst # only unpack-and-compile skips files for dry run + + def unpack_and_compile(self, egg_path, destination): + to_compile = [] + to_chmod = [] + + def pf(src, dst): + if dst.endswith('.py') and not src.startswith('EGG-INFO/'): + to_compile.append(dst) + elif dst.endswith('.dll') or dst.endswith('.so'): + to_chmod.append(dst) + self.unpack_progress(src, dst) + return not self.dry_run and dst or None + + unpack_archive(egg_path, destination, pf) + self.byte_compile(to_compile) + if not self.dry_run: + for f in to_chmod: + mode = ((os.stat(f)[stat.ST_MODE]) | 0o555) & 0o7755 + chmod(f, mode) + + def byte_compile(self, to_compile): + if _dont_write_bytecode: + self.warn('byte-compiling is disabled, skipping.') + return + + from distutils.util import byte_compile + + try: + # try to make the byte compile messages quieter + log.set_verbosity(self.verbose - 1) + + byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run) + if self.optimize: + byte_compile( + to_compile, optimize=self.optimize, force=1, + dry_run=self.dry_run + ) + finally: + log.set_verbosity(self.verbose) # restore original verbosity + + def no_default_version_msg(self): + template = """bad install directory or PYTHONPATH + +You are attempting to install a package to a directory that is not +on PYTHONPATH and which Python does not read ".pth" files from. The +installation directory you specified (via --install-dir, --prefix, or +the distutils default setting) was: + + %s + +and your PYTHONPATH environment variable currently contains: + + %r + +Here are some of your options for correcting the problem: + +* You can choose a different installation directory, i.e., one that is + on PYTHONPATH or supports .pth files + +* You can add the installation directory to the PYTHONPATH environment + variable. (It must then also be on PYTHONPATH whenever you run + Python and want to use the package(s) you are installing.) + +* You can set up the installation directory to support ".pth" files by + using one of the approaches described here: + + https://pythonhosted.org/setuptools/easy_install.html#custom-installation-locations + +Please make the appropriate changes for your system and try again.""" + return template % (self.install_dir, os.environ.get('PYTHONPATH', '')) + + def install_site_py(self): + """Make sure there's a site.py in the target dir, if needed""" + + if self.sitepy_installed: + return # already did it, or don't need to + + sitepy = os.path.join(self.install_dir, "site.py") + source = resource_string("setuptools", "site-patch.py") + current = "" + + if os.path.exists(sitepy): + log.debug("Checking existing site.py in %s", self.install_dir) + f = open(sitepy, 'rb') + current = f.read() + # we want str, not bytes + if PY3: + current = current.decode() + + f.close() + if not current.startswith('def __boot():'): + raise DistutilsError( + "%s is not a setuptools-generated site.py; please" + " remove it." % sitepy + ) + + if current != source: + log.info("Creating %s", sitepy) + if not self.dry_run: + ensure_directory(sitepy) + f = open(sitepy, 'wb') + f.write(source) + f.close() + self.byte_compile([sitepy]) + + self.sitepy_installed = True + + def create_home_path(self): + """Create directories under ~.""" + if not self.user: + return + home = convert_path(os.path.expanduser("~")) + for name, path in iteritems(self.config_vars): + if path.startswith(home) and not os.path.isdir(path): + self.debug_print("os.makedirs('%s', 0o700)" % path) + os.makedirs(path, 0o700) + + if sys.version[:3] in ('2.3', '2.4', '2.5') or 'real_prefix' in sys.__dict__: + sitedir_name = 'site-packages' + else: + sitedir_name = 'dist-packages' + + INSTALL_SCHEMES = dict( + posix=dict( + install_dir='$base/lib/python$py_version_short/site-packages', + script_dir='$base/bin', + ), + unix_local = dict( + install_dir = '$base/local/lib/python$py_version_short/%s' % sitedir_name, + script_dir = '$base/local/bin', + ), + posix_local = dict( + install_dir = '$base/local/lib/python$py_version_short/%s' % sitedir_name, + script_dir = '$base/local/bin', + ), + deb_system = dict( + install_dir = '$base/lib/python3/%s' % sitedir_name, + script_dir = '$base/bin', + ), + ) + + DEFAULT_SCHEME = dict( + install_dir='$base/Lib/site-packages', + script_dir='$base/Scripts', + ) + + def _expand(self, *attrs): + config_vars = self.get_finalized_command('install').config_vars + + if self.prefix or self.install_layout: + if self.install_layout and self.install_layout.lower() in ['deb']: + scheme_name = "deb_system" + self.prefix = '/usr' + elif self.prefix or 'real_prefix' in sys.__dict__: + scheme_name = os.name + else: + scheme_name = "posix_local" + # Set default install_dir/scripts from --prefix + config_vars = config_vars.copy() + config_vars['base'] = self.prefix + scheme = self.INSTALL_SCHEMES.get(scheme_name,self.DEFAULT_SCHEME) + for attr, val in scheme.items(): + if getattr(self, attr, None) is None: + setattr(self, attr, val) + + from distutils.util import subst_vars + + for attr in attrs: + val = getattr(self, attr) + if val is not None: + val = subst_vars(val, config_vars) + if os.name == 'posix': + val = os.path.expanduser(val) + setattr(self, attr, val) + + +def get_site_dirs(): + # return a list of 'site' dirs + sitedirs = [_f for _f in os.environ.get('PYTHONPATH', + '').split(os.pathsep) if _f] + prefixes = [sys.prefix] + if sys.exec_prefix != sys.prefix: + prefixes.append(sys.exec_prefix) + for prefix in prefixes: + if prefix: + if sys.platform in ('os2emx', 'riscos'): + sitedirs.append(os.path.join(prefix, "Lib", "site-packages")) + elif os.sep == '/': + sitedirs.extend([os.path.join(prefix, + "lib", + "python" + sys.version[:3], + "site-packages"), + os.path.join(prefix, "lib", "site-python")]) + else: + if sys.version[:3] in ('2.3', '2.4', '2.5'): + sdir = "site-packages" + else: + sdir = "dist-packages" + sitedirs.extend( + [os.path.join(prefix, "local/lib", "python" + sys.version[:3], sdir), + os.path.join(prefix, "lib", "python" + sys.version[:3], sdir)] + ) + if sys.platform == 'darwin': + # for framework builds *only* we add the standard Apple + # locations. Currently only per-user, but /Library and + # /Network/Library could be added too + if 'Python.framework' in prefix: + home = os.environ.get('HOME') + if home: + sitedirs.append( + os.path.join(home, + 'Library', + 'Python', + sys.version[:3], + 'site-packages')) + lib_paths = get_path('purelib'), get_path('platlib') + for site_lib in lib_paths: + if site_lib not in sitedirs: + sitedirs.append(site_lib) + + if site.ENABLE_USER_SITE: + sitedirs.append(site.USER_SITE) + + sitedirs = list(map(normalize_path, sitedirs)) + + return sitedirs + + +def expand_paths(inputs): + """Yield sys.path directories that might contain "old-style" packages""" + + seen = {} + + for dirname in inputs: + dirname = normalize_path(dirname) + if dirname in seen: + continue + + seen[dirname] = 1 + if not os.path.isdir(dirname): + continue + + files = os.listdir(dirname) + yield dirname, files + + for name in files: + if not name.endswith('.pth'): + # We only care about the .pth files + continue + if name in ('easy-install.pth', 'setuptools.pth'): + # Ignore .pth files that we control + continue + + # Read the .pth file + f = open(os.path.join(dirname, name)) + lines = list(yield_lines(f)) + f.close() + + # Yield existing non-dupe, non-import directory lines from it + for line in lines: + if not line.startswith("import"): + line = normalize_path(line.rstrip()) + if line not in seen: + seen[line] = 1 + if not os.path.isdir(line): + continue + yield line, os.listdir(line) + + +def extract_wininst_cfg(dist_filename): + """Extract configuration data from a bdist_wininst .exe + + Returns a ConfigParser.RawConfigParser, or None + """ + f = open(dist_filename, 'rb') + try: + endrec = zipfile._EndRecData(f) + if endrec is None: + return None + + prepended = (endrec[9] - endrec[5]) - endrec[6] + if prepended < 12: # no wininst data here + return None + f.seek(prepended - 12) + + from setuptools.compat import StringIO, ConfigParser + import struct + + tag, cfglen, bmlen = struct.unpack("<iii", f.read(12)) + if tag not in (0x1234567A, 0x1234567B): + return None # not a valid tag + + f.seek(prepended - (12 + cfglen)) + cfg = ConfigParser.RawConfigParser( + {'version': '', 'target_version': ''}) + try: + part = f.read(cfglen) + # part is in bytes, but we need to read up to the first null + # byte. + if sys.version_info >= (2, 6): + null_byte = bytes([0]) + else: + null_byte = chr(0) + config = part.split(null_byte, 1)[0] + # Now the config is in bytes, but for RawConfigParser, it should + # be text, so decode it. + config = config.decode(sys.getfilesystemencoding()) + cfg.readfp(StringIO(config)) + except ConfigParser.Error: + return None + if not cfg.has_section('metadata') or not cfg.has_section('Setup'): + return None + return cfg + + finally: + f.close() + + +def get_exe_prefixes(exe_filename): + """Get exe->egg path translations for a given .exe file""" + + prefixes = [ + ('PURELIB/', ''), ('PLATLIB/pywin32_system32', ''), + ('PLATLIB/', ''), + ('SCRIPTS/', 'EGG-INFO/scripts/'), + ('DATA/lib/site-packages', ''), + ] + z = zipfile.ZipFile(exe_filename) + try: + for info in z.infolist(): + name = info.filename + parts = name.split('/') + if len(parts) == 3 and parts[2] == 'PKG-INFO': + if parts[1].endswith('.egg-info'): + prefixes.insert(0, ('/'.join(parts[:2]), 'EGG-INFO/')) + break + if len(parts) != 2 or not name.endswith('.pth'): + continue + if name.endswith('-nspkg.pth'): + continue + if parts[0].upper() in ('PURELIB', 'PLATLIB'): + contents = z.read(name) + if PY3: + contents = contents.decode() + for pth in yield_lines(contents): + pth = pth.strip().replace('\\', '/') + if not pth.startswith('import'): + prefixes.append((('%s/%s/' % (parts[0], pth)), '')) + finally: + z.close() + prefixes = [(x.lower(), y) for x, y in prefixes] + prefixes.sort() + prefixes.reverse() + return prefixes + + +def parse_requirement_arg(spec): + try: + return Requirement.parse(spec) + except ValueError: + raise DistutilsError( + "Not a URL, existing file, or requirement spec: %r" % (spec,) + ) + + +class PthDistributions(Environment): + """A .pth file with Distribution paths in it""" + + dirty = False + + def __init__(self, filename, sitedirs=()): + self.filename = filename + self.sitedirs = list(map(normalize_path, sitedirs)) + self.basedir = normalize_path(os.path.dirname(self.filename)) + self._load() + Environment.__init__(self, [], None, None) + for path in yield_lines(self.paths): + list(map(self.add, find_distributions(path, True))) + + def _load(self): + self.paths = [] + saw_import = False + seen = dict.fromkeys(self.sitedirs) + if os.path.isfile(self.filename): + f = open(self.filename, 'rt') + for line in f: + if line.startswith('import'): + saw_import = True + continue + path = line.rstrip() + self.paths.append(path) + if not path.strip() or path.strip().startswith('#'): + continue + # skip non-existent paths, in case somebody deleted a package + # manually, and duplicate paths as well + path = self.paths[-1] = normalize_path( + os.path.join(self.basedir, path) + ) + if not os.path.exists(path) or path in seen: + self.paths.pop() # skip it + self.dirty = True # we cleaned up, so we're dirty now :) + continue + seen[path] = 1 + f.close() + + if self.paths and not saw_import: + self.dirty = True # ensure anything we touch has import wrappers + while self.paths and not self.paths[-1].strip(): + self.paths.pop() + + def save(self): + """Write changed .pth file back to disk""" + if not self.dirty: + return + + data = '\n'.join(map(self.make_relative, self.paths)) + if data: + log.debug("Saving %s", self.filename) + data = ( + "import sys; sys.__plen = len(sys.path)\n" + "%s\n" + "import sys; new=sys.path[sys.__plen:];" + " del sys.path[sys.__plen:];" + " p=getattr(sys,'__egginsert',0); sys.path[p:p]=new;" + " sys.__egginsert = p+len(new)\n" + ) % data + + if os.path.islink(self.filename): + os.unlink(self.filename) + f = open(self.filename, 'wt') + f.write(data) + f.close() + + elif os.path.exists(self.filename): + log.debug("Deleting empty %s", self.filename) + os.unlink(self.filename) + + self.dirty = False + + def add(self, dist): + """Add `dist` to the distribution map""" + if (dist.location not in self.paths and ( + dist.location not in self.sitedirs or + dist.location == os.getcwd() # account for '.' being in PYTHONPATH + )): + self.paths.append(dist.location) + self.dirty = True + Environment.add(self, dist) + + def remove(self, dist): + """Remove `dist` from the distribution map""" + while dist.location in self.paths: + self.paths.remove(dist.location) + self.dirty = True + Environment.remove(self, dist) + + def make_relative(self, path): + npath, last = os.path.split(normalize_path(path)) + baselen = len(self.basedir) + parts = [last] + sep = os.altsep == '/' and '/' or os.sep + while len(npath) >= baselen: + if npath == self.basedir: + parts.append(os.curdir) + parts.reverse() + return sep.join(parts) + npath, last = os.path.split(npath) + parts.append(last) + else: + return path + + +def _first_line_re(): + """ + Return a regular expression based on first_line_re suitable for matching + strings. + """ + if isinstance(first_line_re.pattern, str): + return first_line_re + + # first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern. + return re.compile(first_line_re.pattern.decode()) + + +def get_script_header(script_text, executable=sys_executable, wininst=False): + """Create a #! line, getting options (if any) from script_text""" + first = (script_text + '\n').splitlines()[0] + match = _first_line_re().match(first) + options = '' + if match: + options = match.group(1) or '' + if options: + options = ' ' + options + if wininst: + executable = "python.exe" + else: + executable = nt_quote_arg(executable) + hdr = "#!%(executable)s%(options)s\n" % locals() + if not isascii(hdr): + # Non-ascii path to sys.executable, use -x to prevent warnings + if options: + if options.strip().startswith('-'): + options = ' -x' + options.strip()[1:] + # else: punt, we can't do it, let the warning happen anyway + else: + options = ' -x' + executable = fix_jython_executable(executable, options) + hdr = "#!%(executable)s%(options)s\n" % locals() + return hdr + + +def auto_chmod(func, arg, exc): + if func is os.remove and os.name == 'nt': + chmod(arg, stat.S_IWRITE) + return func(arg) + et, ev, _ = sys.exc_info() + reraise(et, (ev[0], ev[1] + (" %s %s" % (func, arg)))) + + +def update_dist_caches(dist_path, fix_zipimporter_caches): + """ + Fix any globally cached `dist_path` related data + + `dist_path` should be a path of a newly installed egg distribution (zipped + or unzipped). + + sys.path_importer_cache contains finder objects that have been cached when + importing data from the original distribution. Any such finders need to be + cleared since the replacement distribution might be packaged differently, + e.g. a zipped egg distribution might get replaced with an unzipped egg + folder or vice versa. Having the old finders cached may then cause Python + to attempt loading modules from the replacement distribution using an + incorrect loader. + + zipimport.zipimporter objects are Python loaders charged with importing + data packaged inside zip archives. If stale loaders referencing the + original distribution, are left behind, they can fail to load modules from + the replacement distribution. E.g. if an old zipimport.zipimporter instance + is used to load data from a new zipped egg archive, it may cause the + operation to attempt to locate the requested data in the wrong location - + one indicated by the original distribution's zip archive directory + information. Such an operation may then fail outright, e.g. report having + read a 'bad local file header', or even worse, it may fail silently & + return invalid data. + + zipimport._zip_directory_cache contains cached zip archive directory + information for all existing zipimport.zipimporter instances and all such + instances connected to the same archive share the same cached directory + information. + + If asked, and the underlying Python implementation allows it, we can fix + all existing zipimport.zipimporter instances instead of having to track + them down and remove them one by one, by updating their shared cached zip + archive directory information. This, of course, assumes that the + replacement distribution is packaged as a zipped egg. + + If not asked to fix existing zipimport.zipimporter instances, we still do + our best to clear any remaining zipimport.zipimporter related cached data + that might somehow later get used when attempting to load data from the new + distribution and thus cause such load operations to fail. Note that when + tracking down such remaining stale data, we can not catch every conceivable + usage from here, and we clear only those that we know of and have found to + cause problems if left alive. Any remaining caches should be updated by + whomever is in charge of maintaining them, i.e. they should be ready to + handle us replacing their zip archives with new distributions at runtime. + + """ + # There are several other known sources of stale zipimport.zipimporter + # instances that we do not clear here, but might if ever given a reason to + # do so: + # * Global setuptools pkg_resources.working_set (a.k.a. 'master working + # set') may contain distributions which may in turn contain their + # zipimport.zipimporter loaders. + # * Several zipimport.zipimporter loaders held by local variables further + # up the function call stack when running the setuptools installation. + # * Already loaded modules may have their __loader__ attribute set to the + # exact loader instance used when importing them. Python 3.4 docs state + # that this information is intended mostly for introspection and so is + # not expected to cause us problems. + normalized_path = normalize_path(dist_path) + _uncache(normalized_path, sys.path_importer_cache) + if fix_zipimporter_caches: + _replace_zip_directory_cache_data(normalized_path) + else: + # Here, even though we do not want to fix existing and now stale + # zipimporter cache information, we still want to remove it. Related to + # Python's zip archive directory information cache, we clear each of + # its stale entries in two phases: + # 1. Clear the entry so attempting to access zip archive information + # via any existing stale zipimport.zipimporter instances fails. + # 2. Remove the entry from the cache so any newly constructed + # zipimport.zipimporter instances do not end up using old stale + # zip archive directory information. + # This whole stale data removal step does not seem strictly necessary, + # but has been left in because it was done before we started replacing + # the zip archive directory information cache content if possible, and + # there are no relevant unit tests that we can depend on to tell us if + # this is really needed. + _remove_and_clear_zip_directory_cache_data(normalized_path) + + +def _collect_zipimporter_cache_entries(normalized_path, cache): + """ + Return zipimporter cache entry keys related to a given normalized path. + + Alternative path spellings (e.g. those using different character case or + those using alternative path separators) related to the same path are + included. Any sub-path entries are included as well, i.e. those + corresponding to zip archives embedded in other zip archives. + + """ + result = [] + prefix_len = len(normalized_path) + for p in cache: + np = normalize_path(p) + if (np.startswith(normalized_path) and + np[prefix_len:prefix_len + 1] in (os.sep, '')): + result.append(p) + return result + + +def _update_zipimporter_cache(normalized_path, cache, updater=None): + """ + Update zipimporter cache data for a given normalized path. + + Any sub-path entries are processed as well, i.e. those corresponding to zip + archives embedded in other zip archives. + + Given updater is a callable taking a cache entry key and the original entry + (after already removing the entry from the cache), and expected to update + the entry and possibly return a new one to be inserted in its place. + Returning None indicates that the entry should not be replaced with a new + one. If no updater is given, the cache entries are simply removed without + any additional processing, the same as if the updater simply returned None. + + """ + for p in _collect_zipimporter_cache_entries(normalized_path, cache): + # N.B. pypy's custom zipimport._zip_directory_cache implementation does + # not support the complete dict interface: + # * Does not support item assignment, thus not allowing this function + # to be used only for removing existing cache entries. + # * Does not support the dict.pop() method, forcing us to use the + # get/del patterns instead. For more detailed information see the + # following links: + # https://bitbucket.org/pypa/setuptools/issue/202/more-robust-zipimporter-cache-invalidation#comment-10495960 + # https://bitbucket.org/pypy/pypy/src/dd07756a34a41f674c0cacfbc8ae1d4cc9ea2ae4/pypy/module/zipimport/interp_zipimport.py#cl-99 + old_entry = cache[p] + del cache[p] + new_entry = updater and updater(p, old_entry) + if new_entry is not None: + cache[p] = new_entry + + +def _uncache(normalized_path, cache): + _update_zipimporter_cache(normalized_path, cache) + + +def _remove_and_clear_zip_directory_cache_data(normalized_path): + def clear_and_remove_cached_zip_archive_directory_data(path, old_entry): + old_entry.clear() + + _update_zipimporter_cache( + normalized_path, zipimport._zip_directory_cache, + updater=clear_and_remove_cached_zip_archive_directory_data) + +# PyPy Python implementation does not allow directly writing to the +# zipimport._zip_directory_cache and so prevents us from attempting to correct +# its content. The best we can do there is clear the problematic cache content +# and have PyPy repopulate it as needed. The downside is that if there are any +# stale zipimport.zipimporter instances laying around, attempting to use them +# will fail due to not having its zip archive directory information available +# instead of being automatically corrected to use the new correct zip archive +# directory information. +if '__pypy__' in sys.builtin_module_names: + _replace_zip_directory_cache_data = \ + _remove_and_clear_zip_directory_cache_data +else: + def _replace_zip_directory_cache_data(normalized_path): + def replace_cached_zip_archive_directory_data(path, old_entry): + # N.B. In theory, we could load the zip directory information just + # once for all updated path spellings, and then copy it locally and + # update its contained path strings to contain the correct + # spelling, but that seems like a way too invasive move (this cache + # structure is not officially documented anywhere and could in + # theory change with new Python releases) for no significant + # benefit. + old_entry.clear() + zipimport.zipimporter(path) + old_entry.update(zipimport._zip_directory_cache[path]) + return old_entry + + _update_zipimporter_cache( + normalized_path, zipimport._zip_directory_cache, + updater=replace_cached_zip_archive_directory_data) + + +def is_python(text, filename='<string>'): + "Is this string a valid Python script?" + try: + compile(text, filename, 'exec') + except (SyntaxError, TypeError): + return False + else: + return True + + +def is_sh(executable): + """Determine if the specified executable is a .sh (contains a #! line)""" + try: + fp = open(executable) + magic = fp.read(2) + fp.close() + except (OSError, IOError): + return executable + return magic == '#!' + + +def nt_quote_arg(arg): + """Quote a command line argument according to Windows parsing rules""" + + result = [] + needquote = False + nb = 0 + + needquote = (" " in arg) or ("\t" in arg) + if needquote: + result.append('"') + + for c in arg: + if c == '\\': + nb += 1 + elif c == '"': + # double preceding backslashes, then add a \" + result.append('\\' * (nb * 2) + '\\"') + nb = 0 + else: + if nb: + result.append('\\' * nb) + nb = 0 + result.append(c) + + if nb: + result.append('\\' * nb) + + if needquote: + result.append('\\' * nb) # double the trailing backslashes + result.append('"') + + return ''.join(result) + + +def is_python_script(script_text, filename): + """Is this text, as a whole, a Python script? (as opposed to shell/bat/etc. + """ + if filename.endswith('.py') or filename.endswith('.pyw'): + return True # extension says it's Python + if is_python(script_text, filename): + return True # it's syntactically valid Python + if script_text.startswith('#!'): + # It begins with a '#!' line, so check if 'python' is in it somewhere + return 'python' in script_text.splitlines()[0].lower() + + return False # Not any Python I can recognize + + +try: + from os import chmod as _chmod +except ImportError: + # Jython compatibility + def _chmod(*args): + pass + + +def chmod(path, mode): + log.debug("changing mode of %s to %o", path, mode) + try: + _chmod(path, mode) + except os.error: + e = sys.exc_info()[1] + log.debug("chmod failed: %s", e) + + +def fix_jython_executable(executable, options): + if sys.platform.startswith('java') and is_sh(executable): + # Workaround for Jython is not needed on Linux systems. + import java + + if java.lang.System.getProperty("os.name") == "Linux": + return executable + + # Workaround Jython's sys.executable being a .sh (an invalid + # shebang line interpreter) + if options: + # Can't apply the workaround, leave it broken + log.warn( + "WARNING: Unable to adapt shebang line for Jython," + " the following script is NOT executable\n" + " see http://bugs.jython.org/issue1112 for" + " more information.") + else: + return '/usr/bin/env %s' % executable + return executable + + +class ScriptWriter(object): + """ + Encapsulates behavior around writing entry point scripts for console and + gui apps. + """ + + template = textwrap.dedent(""" + # EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r + __requires__ = %(spec)r + import sys + from pkg_resources import load_entry_point + + if __name__ == '__main__': + sys.exit( + load_entry_point(%(spec)r, %(group)r, %(name)r)() + ) + """).lstrip() + + @classmethod + def get_script_args(cls, dist, executable=sys_executable, wininst=False): + """ + Yield write_script() argument tuples for a distribution's entrypoints + """ + gen_class = cls.get_writer(wininst) + spec = str(dist.as_requirement()) + header = get_script_header("", executable, wininst) + for type_ in 'console', 'gui': + group = type_ + '_scripts' + for name, ep in dist.get_entry_map(group).items(): + script_text = gen_class.template % locals() + for res in gen_class._get_script_args(type_, name, header, + script_text): + yield res + + @classmethod + def get_writer(cls, force_windows): + if force_windows or sys.platform == 'win32': + return WindowsScriptWriter.get_writer() + return cls + + @classmethod + def _get_script_args(cls, type_, name, header, script_text): + # Simply write the stub with no extension. + yield (name, header + script_text) + + +class WindowsScriptWriter(ScriptWriter): + @classmethod + def get_writer(cls): + """ + Get a script writer suitable for Windows + """ + writer_lookup = dict( + executable=WindowsExecutableLauncherWriter, + natural=cls, + ) + # for compatibility, use the executable launcher by default + launcher = os.environ.get('SETUPTOOLS_LAUNCHER', 'executable') + return writer_lookup[launcher] + + @classmethod + def _get_script_args(cls, type_, name, header, script_text): + "For Windows, add a .py extension" + ext = dict(console='.pya', gui='.pyw')[type_] + if ext not in os.environ['PATHEXT'].lower().split(';'): + warnings.warn("%s not listed in PATHEXT; scripts will not be " + "recognized as executables." % ext, UserWarning) + old = ['.pya', '.py', '-script.py', '.pyc', '.pyo', '.pyw', '.exe'] + old.remove(ext) + header = cls._adjust_header(type_, header) + blockers = [name + x for x in old] + yield name + ext, header + script_text, 't', blockers + + @staticmethod + def _adjust_header(type_, orig_header): + """ + Make sure 'pythonw' is used for gui and and 'python' is used for + console (regardless of what sys.executable is). + """ + pattern = 'pythonw.exe' + repl = 'python.exe' + if type_ == 'gui': + pattern, repl = repl, pattern + pattern_ob = re.compile(re.escape(pattern), re.IGNORECASE) + new_header = pattern_ob.sub(string=orig_header, repl=repl) + clean_header = new_header[2:-1].strip('"') + if sys.platform == 'win32' and not os.path.exists(clean_header): + # the adjusted version doesn't exist, so return the original + return orig_header + return new_header + + +class WindowsExecutableLauncherWriter(WindowsScriptWriter): + @classmethod + def _get_script_args(cls, type_, name, header, script_text): + """ + For Windows, add a .py extension and an .exe launcher + """ + if type_ == 'gui': + launcher_type = 'gui' + ext = '-script.pyw' + old = ['.pyw'] + else: + launcher_type = 'cli' + ext = '-script.py' + old = ['.py', '.pyc', '.pyo'] + hdr = cls._adjust_header(type_, header) + blockers = [name + x for x in old] + yield (name + ext, hdr + script_text, 't', blockers) + yield ( + name + '.exe', get_win_launcher(launcher_type), + 'b' # write in binary mode + ) + if not is_64bit(): + # install a manifest for the launcher to prevent Windows + # from detecting it as an installer (which it will for + # launchers like easy_install.exe). Consider only + # adding a manifest for launchers detected as installers. + # See Distribute #143 for details. + m_name = name + '.exe.manifest' + yield (m_name, load_launcher_manifest(name), 't') + + +# for backward-compatibility +get_script_args = ScriptWriter.get_script_args + + +def get_win_launcher(type): + """ + Load the Windows launcher (executable) suitable for launching a script. + + `type` should be either 'cli' or 'gui' + + Returns the executable as a byte string. + """ + launcher_fn = '%s.exe' % type + if platform.machine().lower() == 'arm': + launcher_fn = launcher_fn.replace(".", "-arm.") + if is_64bit(): + launcher_fn = launcher_fn.replace(".", "-64.") + else: + launcher_fn = launcher_fn.replace(".", "-32.") + return resource_string('setuptools', launcher_fn) + + +def load_launcher_manifest(name): + manifest = pkg_resources.resource_string(__name__, 'launcher manifest.xml') + if PY2: + return manifest % vars() + else: + return manifest.decode('utf-8') % vars() + + +def rmtree(path, ignore_errors=False, onerror=auto_chmod): + """Recursively delete a directory tree. + + This code is taken from the Python 2.4 version of 'shutil', because + the 2.3 version doesn't really work right. + """ + if ignore_errors: + def onerror(*args): + pass + elif onerror is None: + def onerror(*args): + raise + names = [] + try: + names = os.listdir(path) + except os.error: + onerror(os.listdir, path, sys.exc_info()) + for name in names: + fullname = os.path.join(path, name) + try: + mode = os.lstat(fullname).st_mode + except os.error: + mode = 0 + if stat.S_ISDIR(mode): + rmtree(fullname, ignore_errors, onerror) + else: + try: + os.remove(fullname) + except os.error: + onerror(os.remove, fullname, sys.exc_info()) + try: + os.rmdir(path) + except os.error: + onerror(os.rmdir, path, sys.exc_info()) + + +def current_umask(): + tmp = os.umask(0o022) + os.umask(tmp) + return tmp + + +def bootstrap(): + # This function is called when setuptools*.egg is run using /bin/sh + import setuptools + + argv0 = os.path.dirname(setuptools.__path__[0]) + sys.argv[0] = argv0 + sys.argv.append(argv0) + main() + + +def main(argv=None, **kw): + from setuptools import setup + from setuptools.dist import Distribution + import distutils.core + + USAGE = """\ +usage: %(script)s [options] requirement_or_url ... + or: %(script)s --help +""" + + def gen_usage(script_name): + return USAGE % dict( + script=os.path.basename(script_name), + ) + + def with_ei_usage(f): + old_gen_usage = distutils.core.gen_usage + try: + distutils.core.gen_usage = gen_usage + return f() + finally: + distutils.core.gen_usage = old_gen_usage + + class DistributionWithoutHelpCommands(Distribution): + common_usage = "" + + def _show_help(self, *args, **kw): + with_ei_usage(lambda: Distribution._show_help(self, *args, **kw)) + + if argv is None: + argv = sys.argv[1:] + + with_ei_usage( + lambda: setup( + script_args=['-q', 'easy_install', '-v'] + argv, + script_name=sys.argv[0] or 'easy_install', + distclass=DistributionWithoutHelpCommands, **kw + ) + ) diff --git a/lib/python3.4/site-packages/setuptools/command/egg_info.py b/lib/python3.4/site-packages/setuptools/command/egg_info.py new file mode 100644 index 0000000..72493d0 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/command/egg_info.py @@ -0,0 +1,435 @@ +"""setuptools.command.egg_info + +Create a distribution's .egg-info directory and contents""" + +from distutils.filelist import FileList as _FileList +from distutils.util import convert_path +from distutils import log +import distutils.errors +import os +import re +import sys + +from setuptools import Command +from setuptools.command.sdist import sdist +from setuptools.compat import basestring, PY3, StringIO +from setuptools import svn_utils +from setuptools.command.sdist import walk_revctrl +from pkg_resources import ( + parse_requirements, safe_name, parse_version, + safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename) +import setuptools.unicode_utils as unicode_utils + + +class egg_info(Command): + description = "create a distribution's .egg-info directory" + + user_options = [ + ('egg-base=', 'e', "directory containing .egg-info directories" + " (default: top of the source tree)"), + ('tag-svn-revision', 'r', + "Add subversion revision ID to version number"), + ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"), + ('tag-build=', 'b', "Specify explicit tag to add to version number"), + ('no-svn-revision', 'R', + "Don't add subversion revision ID [default]"), + ('no-date', 'D', "Don't include date stamp [default]"), + ] + + boolean_options = ['tag-date', 'tag-svn-revision'] + negative_opt = {'no-svn-revision': 'tag-svn-revision', + 'no-date': 'tag-date'} + + def initialize_options(self): + self.egg_name = None + self.egg_version = None + self.egg_base = None + self.egg_info = None + self.tag_build = None + self.tag_svn_revision = 0 + self.tag_date = 0 + self.broken_egg_info = False + self.vtags = None + + def save_version_info(self, filename): + from setuptools.command.setopt import edit_config + + values = dict( + egg_info=dict( + tag_svn_revision=0, + tag_date=0, + tag_build=self.tags(), + ) + ) + edit_config(filename, values) + + def finalize_options(self): + self.egg_name = safe_name(self.distribution.get_name()) + self.vtags = self.tags() + self.egg_version = self.tagged_version() + + try: + list( + parse_requirements('%s==%s' % (self.egg_name, + self.egg_version)) + ) + except ValueError: + raise distutils.errors.DistutilsOptionError( + "Invalid distribution name or version syntax: %s-%s" % + (self.egg_name, self.egg_version) + ) + + if self.egg_base is None: + dirs = self.distribution.package_dir + self.egg_base = (dirs or {}).get('', os.curdir) + + self.ensure_dirname('egg_base') + self.egg_info = to_filename(self.egg_name) + '.egg-info' + if self.egg_base != os.curdir: + self.egg_info = os.path.join(self.egg_base, self.egg_info) + if '-' in self.egg_name: + self.check_broken_egg_info() + + # Set package version for the benefit of dumber commands + # (e.g. sdist, bdist_wininst, etc.) + # + self.distribution.metadata.version = self.egg_version + + # If we bootstrapped around the lack of a PKG-INFO, as might be the + # case in a fresh checkout, make sure that any special tags get added + # to the version info + # + pd = self.distribution._patched_dist + if pd is not None and pd.key == self.egg_name.lower(): + pd._version = self.egg_version + pd._parsed_version = parse_version(self.egg_version) + self.distribution._patched_dist = None + + def write_or_delete_file(self, what, filename, data, force=False): + """Write `data` to `filename` or delete if empty + + If `data` is non-empty, this routine is the same as ``write_file()``. + If `data` is empty but not ``None``, this is the same as calling + ``delete_file(filename)`. If `data` is ``None``, then this is a no-op + unless `filename` exists, in which case a warning is issued about the + orphaned file (if `force` is false), or deleted (if `force` is true). + """ + if data: + self.write_file(what, filename, data) + elif os.path.exists(filename): + if data is None and not force: + log.warn( + "%s not set in setup(), but %s exists", what, filename + ) + return + else: + self.delete_file(filename) + + def write_file(self, what, filename, data): + """Write `data` to `filename` (if not a dry run) after announcing it + + `what` is used in a log message to identify what is being written + to the file. + """ + log.info("writing %s to %s", what, filename) + if PY3: + data = data.encode("utf-8") + if not self.dry_run: + f = open(filename, 'wb') + f.write(data) + f.close() + + def delete_file(self, filename): + """Delete `filename` (if not a dry run) after announcing it""" + log.info("deleting %s", filename) + if not self.dry_run: + os.unlink(filename) + + def tagged_version(self): + version = self.distribution.get_version() + # egg_info may be called more than once for a distribution, + # in which case the version string already contains all tags. + if self.vtags and version.endswith(self.vtags): + return safe_version(version) + return safe_version(version + self.vtags) + + def run(self): + self.mkpath(self.egg_info) + installer = self.distribution.fetch_build_egg + for ep in iter_entry_points('egg_info.writers'): + writer = ep.load(installer=installer) + writer(self, ep.name, os.path.join(self.egg_info, ep.name)) + + # Get rid of native_libs.txt if it was put there by older bdist_egg + nl = os.path.join(self.egg_info, "native_libs.txt") + if os.path.exists(nl): + self.delete_file(nl) + + self.find_sources() + + def tags(self): + version = '' + if self.tag_build: + version += self.tag_build + if self.tag_svn_revision: + rev = self.get_svn_revision() + if rev: # is 0 if it's not an svn working copy + version += '-r%s' % rev + if self.tag_date: + import time + + version += time.strftime("-%Y%m%d") + return version + + @staticmethod + def get_svn_revision(): + return str(svn_utils.SvnInfo.load(os.curdir).get_revision()) + + def find_sources(self): + """Generate SOURCES.txt manifest file""" + manifest_filename = os.path.join(self.egg_info, "SOURCES.txt") + mm = manifest_maker(self.distribution) + mm.manifest = manifest_filename + mm.run() + self.filelist = mm.filelist + + def check_broken_egg_info(self): + bei = self.egg_name + '.egg-info' + if self.egg_base != os.curdir: + bei = os.path.join(self.egg_base, bei) + if os.path.exists(bei): + log.warn( + "-" * 78 + '\n' + "Note: Your current .egg-info directory has a '-' in its name;" + '\nthis will not work correctly with "setup.py develop".\n\n' + 'Please rename %s to %s to correct this problem.\n' + '-' * 78, + bei, self.egg_info + ) + self.broken_egg_info = self.egg_info + self.egg_info = bei # make it work for now + + +class FileList(_FileList): + """File list that accepts only existing, platform-independent paths""" + + def append(self, item): + if item.endswith('\r'): # Fix older sdists built on Windows + item = item[:-1] + path = convert_path(item) + + if self._safe_path(path): + self.files.append(path) + + def extend(self, paths): + self.files.extend(filter(self._safe_path, paths)) + + def _repair(self): + """ + Replace self.files with only safe paths + + Because some owners of FileList manipulate the underlying + ``files`` attribute directly, this method must be called to + repair those paths. + """ + self.files = list(filter(self._safe_path, self.files)) + + def _safe_path(self, path): + enc_warn = "'%s' not %s encodable -- skipping" + + # To avoid accidental trans-codings errors, first to unicode + u_path = unicode_utils.filesys_decode(path) + if u_path is None: + log.warn("'%s' in unexpected encoding -- skipping" % path) + return False + + # Must ensure utf-8 encodability + utf8_path = unicode_utils.try_encode(u_path, "utf-8") + if utf8_path is None: + log.warn(enc_warn, path, 'utf-8') + return False + + try: + # accept is either way checks out + if os.path.exists(u_path) or os.path.exists(utf8_path): + return True + # this will catch any encode errors decoding u_path + except UnicodeEncodeError: + log.warn(enc_warn, path, sys.getfilesystemencoding()) + + +class manifest_maker(sdist): + template = "MANIFEST.in" + + def initialize_options(self): + self.use_defaults = 1 + self.prune = 1 + self.manifest_only = 1 + self.force_manifest = 1 + + def finalize_options(self): + pass + + def run(self): + self.filelist = FileList() + if not os.path.exists(self.manifest): + self.write_manifest() # it must exist so it'll get in the list + self.filelist.findall() + self.add_defaults() + if os.path.exists(self.template): + self.read_template() + self.prune_file_list() + self.filelist.sort() + self.filelist.remove_duplicates() + self.write_manifest() + + def _manifest_normalize(self, path): + path = unicode_utils.filesys_decode(path) + return path.replace(os.sep, '/') + + def write_manifest(self): + """ + Write the file list in 'self.filelist' to the manifest file + named by 'self.manifest'. + """ + self.filelist._repair() + + # Now _repairs should encodability, but not unicode + files = [self._manifest_normalize(f) for f in self.filelist.files] + msg = "writing manifest file '%s'" % self.manifest + self.execute(write_file, (self.manifest, files), msg) + + def warn(self, msg): # suppress missing-file warnings from sdist + if not msg.startswith("standard file not found:"): + sdist.warn(self, msg) + + def add_defaults(self): + sdist.add_defaults(self) + self.filelist.append(self.template) + self.filelist.append(self.manifest) + rcfiles = list(walk_revctrl()) + if rcfiles: + self.filelist.extend(rcfiles) + elif os.path.exists(self.manifest): + self.read_manifest() + ei_cmd = self.get_finalized_command('egg_info') + self.filelist.include_pattern("*", prefix=ei_cmd.egg_info) + + def prune_file_list(self): + build = self.get_finalized_command('build') + base_dir = self.distribution.get_fullname() + self.filelist.exclude_pattern(None, prefix=build.build_base) + self.filelist.exclude_pattern(None, prefix=base_dir) + sep = re.escape(os.sep) + self.filelist.exclude_pattern(r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep, + is_regex=1) + + +def write_file(filename, contents): + """Create a file with the specified name and write 'contents' (a + sequence of strings without line terminators) to it. + """ + contents = "\n".join(contents) + + # assuming the contents has been vetted for utf-8 encoding + contents = contents.encode("utf-8") + + with open(filename, "wb") as f: # always write POSIX-style manifest + f.write(contents) + + +def write_pkg_info(cmd, basename, filename): + log.info("writing %s", filename) + if not cmd.dry_run: + metadata = cmd.distribution.metadata + metadata.version, oldver = cmd.egg_version, metadata.version + metadata.name, oldname = cmd.egg_name, metadata.name + try: + # write unescaped data to PKG-INFO, so older pkg_resources + # can still parse it + metadata.write_pkg_info(cmd.egg_info) + finally: + metadata.name, metadata.version = oldname, oldver + + safe = getattr(cmd.distribution, 'zip_safe', None) + from setuptools.command import bdist_egg + + bdist_egg.write_safety_flag(cmd.egg_info, safe) + + +def warn_depends_obsolete(cmd, basename, filename): + if os.path.exists(filename): + log.warn( + "WARNING: 'depends.txt' is not used by setuptools 0.6!\n" + "Use the install_requires/extras_require setup() args instead." + ) + + +def _write_requirements(stream, reqs): + lines = yield_lines(reqs or ()) + append_cr = lambda line: line + '\n' + lines = map(append_cr, lines) + stream.writelines(lines) + + +def write_requirements(cmd, basename, filename): + dist = cmd.distribution + data = StringIO() + _write_requirements(data, dist.install_requires) + extras_require = dist.extras_require or {} + for extra in sorted(extras_require): + data.write('\n[{extra}]\n'.format(**vars())) + _write_requirements(data, extras_require[extra]) + cmd.write_or_delete_file("requirements", filename, data.getvalue()) + + +def write_toplevel_names(cmd, basename, filename): + pkgs = dict.fromkeys( + [ + k.split('.', 1)[0] + for k in cmd.distribution.iter_distribution_names() + ] + ) + cmd.write_file("top-level names", filename, '\n'.join(pkgs) + '\n') + + +def overwrite_arg(cmd, basename, filename): + write_arg(cmd, basename, filename, True) + + +def write_arg(cmd, basename, filename, force=False): + argname = os.path.splitext(basename)[0] + value = getattr(cmd.distribution, argname, None) + if value is not None: + value = '\n'.join(value) + '\n' + cmd.write_or_delete_file(argname, filename, value, force) + + +def write_entries(cmd, basename, filename): + ep = cmd.distribution.entry_points + + if isinstance(ep, basestring) or ep is None: + data = ep + elif ep is not None: + data = [] + for section, contents in sorted(ep.items()): + if not isinstance(contents, basestring): + contents = EntryPoint.parse_group(section, contents) + contents = '\n'.join(sorted(map(str, contents.values()))) + data.append('[%s]\n%s\n\n' % (section, contents)) + data = ''.join(data) + + cmd.write_or_delete_file('entry points', filename, data, True) + + +def get_pkg_info_revision(): + # See if we can get a -r### off of PKG-INFO, in case this is an sdist of + # a subversion revision + # + if os.path.exists('PKG-INFO'): + f = open('PKG-INFO', 'rU') + for line in f: + match = re.match(r"Version:.*-r(\d+)\s*$", line) + if match: + return int(match.group(1)) + f.close() + return 0 diff --git a/lib/python3.4/site-packages/setuptools/command/install.py b/lib/python3.4/site-packages/setuptools/command/install.py new file mode 100644 index 0000000..d2bca2e --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/command/install.py @@ -0,0 +1,125 @@ +from distutils.errors import DistutilsArgError +import inspect +import glob +import warnings +import platform +import distutils.command.install as orig + +import setuptools + +# Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for +# now. See https://bitbucket.org/pypa/setuptools/issue/199/ +_install = orig.install + + +class install(orig.install): + """Use easy_install to install the package, w/dependencies""" + + user_options = orig.install.user_options + [ + ('old-and-unmanageable', None, "Try not to use this!"), + ('single-version-externally-managed', None, + "used by system package builders to create 'flat' eggs"), + ] + boolean_options = orig.install.boolean_options + [ + 'old-and-unmanageable', 'single-version-externally-managed', + ] + new_commands = [ + ('install_egg_info', lambda self: True), + ('install_scripts', lambda self: True), + ] + _nc = dict(new_commands) + + def initialize_options(self): + orig.install.initialize_options(self) + self.old_and_unmanageable = None + self.single_version_externally_managed = None + + def finalize_options(self): + orig.install.finalize_options(self) + if self.root: + self.single_version_externally_managed = True + elif self.single_version_externally_managed: + if not self.root and not self.record: + raise DistutilsArgError( + "You must specify --record or --root when building system" + " packages" + ) + + def handle_extra_path(self): + if self.root or self.single_version_externally_managed: + # explicit backward-compatibility mode, allow extra_path to work + return orig.install.handle_extra_path(self) + + # Ignore extra_path when installing an egg (or being run by another + # command without --root or --single-version-externally-managed + self.path_file = None + self.extra_dirs = '' + + def run(self): + # Explicit request for old-style install? Just do it + if self.old_and_unmanageable or self.single_version_externally_managed: + return orig.install.run(self) + + if not self._called_from_setup(inspect.currentframe()): + # Run in backward-compatibility mode to support bdist_* commands. + orig.install.run(self) + else: + self.do_egg_install() + + @staticmethod + def _called_from_setup(run_frame): + """ + Attempt to detect whether run() was called from setup() or by another + command. If called by setup(), the parent caller will be the + 'run_command' method in 'distutils.dist', and *its* caller will be + the 'run_commands' method. If called any other way, the + immediate caller *might* be 'run_command', but it won't have been + called by 'run_commands'. Return True in that case or if a call stack + is unavailable. Return False otherwise. + """ + if run_frame is None: + msg = "Call stack not available. bdist_* commands may fail." + warnings.warn(msg) + if platform.python_implementation() == 'IronPython': + msg = "For best results, pass -X:Frames to enable call stack." + warnings.warn(msg) + return True + res = inspect.getouterframes(run_frame)[2] + caller, = res[:1] + info = inspect.getframeinfo(caller) + caller_module = caller.f_globals.get('__name__', '') + return ( + caller_module == 'distutils.dist' + and info.function == 'run_commands' + ) + + def do_egg_install(self): + + easy_install = self.distribution.get_command_class('easy_install') + + cmd = easy_install( + self.distribution, args="x", root=self.root, record=self.record, + ) + cmd.ensure_finalized() # finalize before bdist_egg munges install cmd + cmd.always_copy_from = '.' # make sure local-dir eggs get installed + + # pick up setup-dir .egg files only: no .egg-info + cmd.package_index.scan(glob.glob('*.egg')) + + self.run_command('bdist_egg') + args = [self.distribution.get_command_obj('bdist_egg').egg_output] + + if setuptools.bootstrap_install_from: + # Bootstrap self-installation of setuptools + args.insert(0, setuptools.bootstrap_install_from) + + cmd.args = args + cmd.run() + setuptools.bootstrap_install_from = None + + +# XXX Python 3.1 doesn't see _nc if this is inside the class +install.sub_commands = ( + [cmd for cmd in orig.install.sub_commands if cmd[0] not in install._nc] + + install.new_commands +) diff --git a/lib/python3.4/site-packages/setuptools/command/install_egg_info.py b/lib/python3.4/site-packages/setuptools/command/install_egg_info.py new file mode 100644 index 0000000..f430616 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/command/install_egg_info.py @@ -0,0 +1,133 @@ +from distutils import log, dir_util +import os, sys + +from setuptools import Command +from setuptools.archive_util import unpack_archive +import pkg_resources + + +class install_egg_info(Command): + """Install an .egg-info directory for the package""" + + description = "Install an .egg-info directory for the package" + + user_options = [ + ('install-dir=', 'd', "directory to install to"), + ] + + def initialize_options(self): + self.install_dir = None + self.install_layout = None + self.prefix_option = None + + def finalize_options(self): + self.set_undefined_options('install_lib', + ('install_dir', 'install_dir')) + self.set_undefined_options('install',('install_layout','install_layout')) + if sys.hexversion > 0x2060000: + self.set_undefined_options('install',('prefix_option','prefix_option')) + ei_cmd = self.get_finalized_command("egg_info") + basename = pkg_resources.Distribution( + None, None, ei_cmd.egg_name, ei_cmd.egg_version + ).egg_name() + '.egg-info' + + if self.install_layout: + if not self.install_layout.lower() in ['deb']: + raise DistutilsOptionError( + "unknown value for --install-layout") + basename = basename.replace('-py%s' % pkg_resources.PY_MAJOR, '') + elif self.prefix_option or 'real_prefix' in sys.__dict__: + # don't modify for virtualenv + pass + else: + basename = basename.replace('-py%s' % pkg_resources.PY_MAJOR, '') + + self.source = ei_cmd.egg_info + self.target = os.path.join(self.install_dir, basename) + self.outputs = [self.target] + + def run(self): + self.run_command('egg_info') + if os.path.isdir(self.target) and not os.path.islink(self.target): + dir_util.remove_tree(self.target, dry_run=self.dry_run) + elif os.path.exists(self.target): + self.execute(os.unlink, (self.target,), "Removing " + self.target) + if not self.dry_run: + pkg_resources.ensure_directory(self.target) + self.execute( + self.copytree, (), "Copying %s to %s" % (self.source, self.target) + ) + self.install_namespaces() + + def get_outputs(self): + return self.outputs + + def copytree(self): + # Copy the .egg-info tree to site-packages + def skimmer(src, dst): + # filter out source-control directories; note that 'src' is always + # a '/'-separated path, regardless of platform. 'dst' is a + # platform-specific path. + for skip in '.svn/', 'CVS/': + if src.startswith(skip) or '/' + skip in src: + return None + self.outputs.append(dst) + log.debug("Copying %s to %s", src, dst) + return dst + + unpack_archive(self.source, self.target, skimmer) + + def install_namespaces(self): + nsp = self._get_all_ns_packages() + if not nsp: + return + filename, ext = os.path.splitext(self.target) + filename += '-nspkg.pth' + self.outputs.append(filename) + log.info("Installing %s", filename) + lines = map(self._gen_nspkg_line, nsp) + + if self.dry_run: + # always generate the lines, even in dry run + list(lines) + return + + with open(filename, 'wt') as f: + f.writelines(lines) + + _nspkg_tmpl = ( + "import sys, types, os", + "p = os.path.join(sys._getframe(1).f_locals['sitedir'], *%(pth)r)", + "ie = os.path.exists(os.path.join(p,'__init__.py'))", + "m = not ie and " + "sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))", + "mp = (m or []) and m.__dict__.setdefault('__path__',[])", + "(p not in mp) and mp.append(p)", + ) + "lines for the namespace installer" + + _nspkg_tmpl_multi = ( + 'm and setattr(sys.modules[%(parent)r], %(child)r, m)', + ) + "additional line(s) when a parent package is indicated" + + @classmethod + def _gen_nspkg_line(cls, pkg): + # ensure pkg is not a unicode string under Python 2.7 + pkg = str(pkg) + pth = tuple(pkg.split('.')) + tmpl_lines = cls._nspkg_tmpl + parent, sep, child = pkg.rpartition('.') + if parent: + tmpl_lines += cls._nspkg_tmpl_multi + return ';'.join(tmpl_lines) % locals() + '\n' + + def _get_all_ns_packages(self): + """Return sorted list of all package namespaces""" + nsp = set() + for pkg in self.distribution.namespace_packages or []: + pkg = pkg.split('.') + while pkg: + nsp.add('.'.join(pkg)) + pkg.pop() + return sorted(nsp) diff --git a/lib/python3.4/site-packages/setuptools/command/install_lib.py b/lib/python3.4/site-packages/setuptools/command/install_lib.py new file mode 100644 index 0000000..c16cd0a --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/command/install_lib.py @@ -0,0 +1,89 @@ +import distutils.command.install_lib as orig +import os +import sys + + +class install_lib(orig.install_lib): + """Don't add compiled flags to filenames of non-Python files""" + + def initialize_options(self): + orig.install_lib.initialize_options(self) + self.multiarch = None + self.install_layout = None + + def finalize_options(self): + orig.install_lib.finalize_options(self) + self.set_undefined_options('install',('install_layout','install_layout')) + if self.install_layout == 'deb' and sys.version_info[:2] >= (3, 3): + import sysconfig + self.multiarch = sysconfig.get_config_var('MULTIARCH') + + def run(self): + self.build() + outfiles = self.install() + if outfiles is not None: + # always compile, in case we have any extension stubs to deal with + self.byte_compile(outfiles) + + def get_exclusions(self): + exclude = {} + nsp = self.distribution.namespace_packages + svem = (nsp and self.get_finalized_command('install') + .single_version_externally_managed) + if svem: + for pkg in nsp: + parts = pkg.split('.') + while parts: + pkgdir = os.path.join(self.install_dir, *parts) + for f in '__init__.py', '__init__.pyc', '__init__.pyo': + exclude[os.path.join(pkgdir, f)] = 1 + parts.pop() + return exclude + + def copy_tree( + self, infile, outfile, + preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1 + ): + assert preserve_mode and preserve_times and not preserve_symlinks + exclude = self.get_exclusions() + + if not exclude: + import distutils.dir_util + distutils.dir_util._multiarch = self.multiarch + return orig.install_lib.copy_tree(self, infile, outfile) + + # Exclude namespace package __init__.py* files from the output + + from setuptools.archive_util import unpack_directory + from distutils import log + + outfiles = [] + + if self.multiarch: + import sysconfig + ext_suffix = sysconfig.get_config_var ('EXT_SUFFIX') + new_suffix = "%s-%s%s" % (ext_suffix[:-3], self.multiarch, ext_suffix[-3:]) + + def pf(src, dst): + if dst in exclude: + log.warn("Skipping installation of %s (namespace package)", + dst) + return False + + if self.multiarch and dst.endswith(ext_suffix) and not dst.endswith(new_suffix): + dst = dst.replace(ext_suffix, new_suffix) + log.info("renaming extension to %s", os.path.basename(dst)) + + log.info("copying %s -> %s", src, os.path.dirname(dst)) + outfiles.append(dst) + return dst + + unpack_directory(infile, outfile, pf) + return outfiles + + def get_outputs(self): + outputs = orig.install_lib.get_outputs(self) + exclude = self.get_exclusions() + if exclude: + return [f for f in outputs if f not in exclude] + return outputs diff --git a/lib/python3.4/site-packages/setuptools/command/install_scripts.py b/lib/python3.4/site-packages/setuptools/command/install_scripts.py new file mode 100644 index 0000000..eb79fa3 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/command/install_scripts.py @@ -0,0 +1,55 @@ +from distutils import log +import distutils.command.install_scripts as orig +import os + +from pkg_resources import Distribution, PathMetadata, ensure_directory + + +class install_scripts(orig.install_scripts): + """Do normal script install, plus any egg_info wrapper scripts""" + + def initialize_options(self): + orig.install_scripts.initialize_options(self) + self.no_ep = False + + def run(self): + from setuptools.command.easy_install import get_script_args + from setuptools.command.easy_install import sys_executable + + self.run_command("egg_info") + if self.distribution.scripts: + orig.install_scripts.run(self) # run first to set up self.outfiles + else: + self.outfiles = [] + if self.no_ep: + # don't install entry point scripts into .egg file! + return + + ei_cmd = self.get_finalized_command("egg_info") + dist = Distribution( + ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info), + ei_cmd.egg_name, ei_cmd.egg_version, + ) + bs_cmd = self.get_finalized_command('build_scripts') + executable = getattr(bs_cmd, 'executable', sys_executable) + is_wininst = getattr( + self.get_finalized_command("bdist_wininst"), '_is_running', False + ) + for args in get_script_args(dist, executable, is_wininst): + self.write_script(*args) + + def write_script(self, script_name, contents, mode="t", *ignored): + """Write an executable file to the scripts directory""" + from setuptools.command.easy_install import chmod, current_umask + + log.info("Installing %s script to %s", script_name, self.install_dir) + target = os.path.join(self.install_dir, script_name) + self.outfiles.append(target) + + mask = current_umask() + if not self.dry_run: + ensure_directory(target) + f = open(target, "w" + mode) + f.write(contents) + f.close() + chmod(target, 0o777 - mask) diff --git a/lib/python3.4/site-packages/setuptools/command/register.py b/lib/python3.4/site-packages/setuptools/command/register.py new file mode 100644 index 0000000..8d6336a --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/command/register.py @@ -0,0 +1,10 @@ +import distutils.command.register as orig + + +class register(orig.register): + __doc__ = orig.register.__doc__ + + def run(self): + # Make sure that we are using valid current name/version info + self.run_command('egg_info') + orig.register.run(self) diff --git a/lib/python3.4/site-packages/setuptools/command/rotate.py b/lib/python3.4/site-packages/setuptools/command/rotate.py new file mode 100644 index 0000000..1b07362 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/command/rotate.py @@ -0,0 +1,61 @@ +from distutils.util import convert_path +from distutils import log +from distutils.errors import DistutilsOptionError +import os + +from setuptools import Command +from setuptools.compat import basestring + + +class rotate(Command): + """Delete older distributions""" + + description = "delete older distributions, keeping N newest files" + user_options = [ + ('match=', 'm', "patterns to match (required)"), + ('dist-dir=', 'd', "directory where the distributions are"), + ('keep=', 'k', "number of matching distributions to keep"), + ] + + boolean_options = [] + + def initialize_options(self): + self.match = None + self.dist_dir = None + self.keep = None + + def finalize_options(self): + if self.match is None: + raise DistutilsOptionError( + "Must specify one or more (comma-separated) match patterns " + "(e.g. '.zip' or '.egg')" + ) + if self.keep is None: + raise DistutilsOptionError("Must specify number of files to keep") + try: + self.keep = int(self.keep) + except ValueError: + raise DistutilsOptionError("--keep must be an integer") + if isinstance(self.match, basestring): + self.match = [ + convert_path(p.strip()) for p in self.match.split(',') + ] + self.set_undefined_options('bdist', ('dist_dir', 'dist_dir')) + + def run(self): + self.run_command("egg_info") + from glob import glob + + for pattern in self.match: + pattern = self.distribution.get_name() + '*' + pattern + files = glob(os.path.join(self.dist_dir, pattern)) + files = [(os.path.getmtime(f), f) for f in files] + files.sort() + files.reverse() + + log.info("%d file(s) matching %s", len(files), pattern) + files = files[self.keep:] + for (t, f) in files: + log.info("Deleting %s", f) + if not self.dry_run: + os.unlink(f) diff --git a/lib/python3.4/site-packages/setuptools/command/saveopts.py b/lib/python3.4/site-packages/setuptools/command/saveopts.py new file mode 100644 index 0000000..611cec5 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/command/saveopts.py @@ -0,0 +1,22 @@ +from setuptools.command.setopt import edit_config, option_base + + +class saveopts(option_base): + """Save command-line options to a file""" + + description = "save supplied options to setup.cfg or other config file" + + def run(self): + dist = self.distribution + settings = {} + + for cmd in dist.command_options: + + if cmd == 'saveopts': + continue # don't save our own options! + + for opt, (src, val) in dist.get_option_dict(cmd).items(): + if src == "command line": + settings.setdefault(cmd, {})[opt] = val + + edit_config(self.filename, settings, self.dry_run) diff --git a/lib/python3.4/site-packages/setuptools/command/sdist.py b/lib/python3.4/site-packages/setuptools/command/sdist.py new file mode 100644 index 0000000..2aa1ee2 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/command/sdist.py @@ -0,0 +1,250 @@ +from glob import glob +from distutils.util import convert_path +from distutils import log +import distutils.command.sdist as orig +import os +import re +import sys + +from setuptools import svn_utils +from setuptools.compat import PY3 +import pkg_resources + +READMES = ('README', 'README.rst', 'README.txt') + + +def walk_revctrl(dirname=''): + """Find all files under revision control""" + for ep in pkg_resources.iter_entry_points('setuptools.file_finders'): + for item in ep.load()(dirname): + yield item + + +# TODO will need test case +class re_finder(object): + """ + Finder that locates files based on entries in a file matched by a + regular expression. + """ + + def __init__(self, path, pattern, postproc=lambda x: x): + self.pattern = pattern + self.postproc = postproc + self.entries_path = convert_path(path) + + def _finder(self, dirname, filename): + f = open(filename, 'rU') + try: + data = f.read() + finally: + f.close() + for match in self.pattern.finditer(data): + path = match.group(1) + # postproc was formerly used when the svn finder + # was an re_finder for calling unescape + path = self.postproc(path) + yield svn_utils.joinpath(dirname, path) + + def find(self, dirname=''): + path = svn_utils.joinpath(dirname, self.entries_path) + + if not os.path.isfile(path): + # entries file doesn't exist + return + for path in self._finder(dirname, path): + if os.path.isfile(path): + yield path + elif os.path.isdir(path): + for item in self.find(path): + yield item + + __call__ = find + + +def _default_revctrl(dirname=''): + 'Primary svn_cvs entry point' + for finder in finders: + for item in finder(dirname): + yield item + + +finders = [ + re_finder('CVS/Entries', re.compile(r"^\w?/([^/]+)/", re.M)), + svn_utils.svn_finder, +] + + +class sdist(orig.sdist): + """Smart sdist that finds anything supported by revision control""" + + user_options = [ + ('formats=', None, + "formats for source distribution (comma-separated list)"), + ('keep-temp', 'k', + "keep the distribution tree around after creating " + + "archive file(s)"), + ('dist-dir=', 'd', + "directory to put the source distribution archive(s) in " + "[default: dist]"), + ] + + negative_opt = {} + + def run(self): + self.run_command('egg_info') + ei_cmd = self.get_finalized_command('egg_info') + self.filelist = ei_cmd.filelist + self.filelist.append(os.path.join(ei_cmd.egg_info, 'SOURCES.txt')) + self.check_readme() + + # Run sub commands + for cmd_name in self.get_sub_commands(): + self.run_command(cmd_name) + + # Call check_metadata only if no 'check' command + # (distutils <= 2.6) + import distutils.command + + if 'check' not in distutils.command.__all__: + self.check_metadata() + + self.make_distribution() + + dist_files = getattr(self.distribution, 'dist_files', []) + for file in self.archive_files: + data = ('sdist', '', file) + if data not in dist_files: + dist_files.append(data) + + def __read_template_hack(self): + # This grody hack closes the template file (MANIFEST.in) if an + # exception occurs during read_template. + # Doing so prevents an error when easy_install attempts to delete the + # file. + try: + orig.sdist.read_template(self) + except: + sys.exc_info()[2].tb_next.tb_frame.f_locals['template'].close() + raise + + # Beginning with Python 2.7.2, 3.1.4, and 3.2.1, this leaky file handle + # has been fixed, so only override the method if we're using an earlier + # Python. + has_leaky_handle = ( + sys.version_info < (2, 7, 2) + or (3, 0) <= sys.version_info < (3, 1, 4) + or (3, 2) <= sys.version_info < (3, 2, 1) + ) + if has_leaky_handle: + read_template = __read_template_hack + + def add_defaults(self): + standards = [READMES, + self.distribution.script_name] + for fn in standards: + if isinstance(fn, tuple): + alts = fn + got_it = 0 + for fn in alts: + if os.path.exists(fn): + got_it = 1 + self.filelist.append(fn) + break + + if not got_it: + self.warn("standard file not found: should have one of " + + ', '.join(alts)) + else: + if os.path.exists(fn): + self.filelist.append(fn) + else: + self.warn("standard file '%s' not found" % fn) + + optional = ['test/test*.py', 'setup.cfg'] + for pattern in optional: + files = list(filter(os.path.isfile, glob(pattern))) + if files: + self.filelist.extend(files) + + # getting python files + if self.distribution.has_pure_modules(): + build_py = self.get_finalized_command('build_py') + self.filelist.extend(build_py.get_source_files()) + # This functionality is incompatible with include_package_data, and + # will in fact create an infinite recursion if include_package_data + # is True. Use of include_package_data will imply that + # distutils-style automatic handling of package_data is disabled + if not self.distribution.include_package_data: + for _, src_dir, _, filenames in build_py.data_files: + self.filelist.extend([os.path.join(src_dir, filename) + for filename in filenames]) + + if self.distribution.has_ext_modules(): + build_ext = self.get_finalized_command('build_ext') + self.filelist.extend(build_ext.get_source_files()) + + if self.distribution.has_c_libraries(): + build_clib = self.get_finalized_command('build_clib') + self.filelist.extend(build_clib.get_source_files()) + + if self.distribution.has_scripts(): + build_scripts = self.get_finalized_command('build_scripts') + self.filelist.extend(build_scripts.get_source_files()) + + def check_readme(self): + for f in READMES: + if os.path.exists(f): + return + else: + self.warn( + "standard file not found: should have one of " + + ', '.join(READMES) + ) + + def make_release_tree(self, base_dir, files): + orig.sdist.make_release_tree(self, base_dir, files) + + # Save any egg_info command line options used to create this sdist + dest = os.path.join(base_dir, 'setup.cfg') + if hasattr(os, 'link') and os.path.exists(dest): + # unlink and re-copy, since it might be hard-linked, and + # we don't want to change the source version + os.unlink(dest) + self.copy_file('setup.cfg', dest) + + self.get_finalized_command('egg_info').save_version_info(dest) + + def _manifest_is_not_generated(self): + # check for special comment used in 2.7.1 and higher + if not os.path.isfile(self.manifest): + return False + + fp = open(self.manifest, 'rbU') + try: + first_line = fp.readline() + finally: + fp.close() + return (first_line != + '# file GENERATED by distutils, do NOT edit\n'.encode()) + + def read_manifest(self): + """Read the manifest file (named by 'self.manifest') and use it to + fill in 'self.filelist', the list of files to include in the source + distribution. + """ + log.info("reading manifest file '%s'", self.manifest) + manifest = open(self.manifest, 'rbU') + for line in manifest: + # The manifest must contain UTF-8. See #303. + if PY3: + try: + line = line.decode('UTF-8') + except UnicodeDecodeError: + log.warn("%r not UTF-8 decodable -- skipping" % line) + continue + # ignore comments and blank lines + line = line.strip() + if line.startswith('#') or not line: + continue + self.filelist.append(line) + manifest.close() diff --git a/lib/python3.4/site-packages/setuptools/command/setopt.py b/lib/python3.4/site-packages/setuptools/command/setopt.py new file mode 100644 index 0000000..a04d603 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/command/setopt.py @@ -0,0 +1,150 @@ +from distutils.util import convert_path +from distutils import log +from distutils.errors import DistutilsOptionError +import distutils +import os + +from setuptools import Command + + +__all__ = ['config_file', 'edit_config', 'option_base', 'setopt'] + + +def config_file(kind="local"): + """Get the filename of the distutils, local, global, or per-user config + + `kind` must be one of "local", "global", or "user" + """ + if kind == 'local': + return 'setup.cfg' + if kind == 'global': + return os.path.join( + os.path.dirname(distutils.__file__), 'distutils.cfg' + ) + if kind == 'user': + dot = os.name == 'posix' and '.' or '' + return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot)) + raise ValueError( + "config_file() type must be 'local', 'global', or 'user'", kind + ) + + +def edit_config(filename, settings, dry_run=False): + """Edit a configuration file to include `settings` + + `settings` is a dictionary of dictionaries or ``None`` values, keyed by + command/section name. A ``None`` value means to delete the entire section, + while a dictionary lists settings to be changed or deleted in that section. + A setting of ``None`` means to delete that setting. + """ + from setuptools.compat import ConfigParser + + log.debug("Reading configuration from %s", filename) + opts = ConfigParser.RawConfigParser() + opts.read([filename]) + for section, options in settings.items(): + if options is None: + log.info("Deleting section [%s] from %s", section, filename) + opts.remove_section(section) + else: + if not opts.has_section(section): + log.debug("Adding new section [%s] to %s", section, filename) + opts.add_section(section) + for option, value in options.items(): + if value is None: + log.debug( + "Deleting %s.%s from %s", + section, option, filename + ) + opts.remove_option(section, option) + if not opts.options(section): + log.info("Deleting empty [%s] section from %s", + section, filename) + opts.remove_section(section) + else: + log.debug( + "Setting %s.%s to %r in %s", + section, option, value, filename + ) + opts.set(section, option, value) + + log.info("Writing %s", filename) + if not dry_run: + with open(filename, 'w') as f: + opts.write(f) + + +class option_base(Command): + """Abstract base class for commands that mess with config files""" + + user_options = [ + ('global-config', 'g', + "save options to the site-wide distutils.cfg file"), + ('user-config', 'u', + "save options to the current user's pydistutils.cfg file"), + ('filename=', 'f', + "configuration file to use (default=setup.cfg)"), + ] + + boolean_options = [ + 'global-config', 'user-config', + ] + + def initialize_options(self): + self.global_config = None + self.user_config = None + self.filename = None + + def finalize_options(self): + filenames = [] + if self.global_config: + filenames.append(config_file('global')) + if self.user_config: + filenames.append(config_file('user')) + if self.filename is not None: + filenames.append(self.filename) + if not filenames: + filenames.append(config_file('local')) + if len(filenames) > 1: + raise DistutilsOptionError( + "Must specify only one configuration file option", + filenames + ) + self.filename, = filenames + + +class setopt(option_base): + """Save command-line options to a file""" + + description = "set an option in setup.cfg or another config file" + + user_options = [ + ('command=', 'c', 'command to set an option for'), + ('option=', 'o', 'option to set'), + ('set-value=', 's', 'value of the option'), + ('remove', 'r', 'remove (unset) the value'), + ] + option_base.user_options + + boolean_options = option_base.boolean_options + ['remove'] + + def initialize_options(self): + option_base.initialize_options(self) + self.command = None + self.option = None + self.set_value = None + self.remove = None + + def finalize_options(self): + option_base.finalize_options(self) + if self.command is None or self.option is None: + raise DistutilsOptionError("Must specify --command *and* --option") + if self.set_value is None and not self.remove: + raise DistutilsOptionError("Must specify --set-value or --remove") + + def run(self): + edit_config( + self.filename, { + self.command: {self.option.replace('-', '_'): self.set_value} + }, + self.dry_run + ) diff --git a/lib/python3.4/site-packages/setuptools/command/test.py b/lib/python3.4/site-packages/setuptools/command/test.py new file mode 100644 index 0000000..1038da7 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/command/test.py @@ -0,0 +1,175 @@ +from distutils.errors import DistutilsOptionError +from unittest import TestLoader +import unittest +import sys + +from pkg_resources import (resource_listdir, resource_exists, normalize_path, + working_set, _namespace_packages, + add_activation_listener, require, EntryPoint) +from setuptools import Command +from setuptools.compat import PY3 +from setuptools.py31compat import unittest_main + + +class ScanningLoader(TestLoader): + def loadTestsFromModule(self, module): + """Return a suite of all tests cases contained in the given module + + If the module is a package, load tests from all the modules in it. + If the module has an ``additional_tests`` function, call it and add + the return value to the tests. + """ + tests = [] + tests.append(TestLoader.loadTestsFromModule(self, module)) + + if hasattr(module, "additional_tests"): + tests.append(module.additional_tests()) + + if hasattr(module, '__path__'): + for file in resource_listdir(module.__name__, ''): + if file.endswith('.py') and file != '__init__.py': + submodule = module.__name__ + '.' + file[:-3] + else: + if resource_exists(module.__name__, file + '/__init__.py'): + submodule = module.__name__ + '.' + file + else: + continue + tests.append(self.loadTestsFromName(submodule)) + + if len(tests) != 1: + return self.suiteClass(tests) + else: + return tests[0] # don't create a nested suite for only one return + + +class test(Command): + """Command to run unit tests after in-place build""" + + description = "run unit tests after in-place build" + + user_options = [ + ('test-module=', 'm', "Run 'test_suite' in specified module"), + ('test-suite=', 's', + "Test suite to run (e.g. 'some_module.test_suite')"), + ('test-runner=', 'r', "Test runner to use"), + ] + + def initialize_options(self): + self.test_suite = None + self.test_module = None + self.test_loader = None + self.test_runner = None + + def finalize_options(self): + + if self.test_suite is None: + if self.test_module is None: + self.test_suite = self.distribution.test_suite + else: + self.test_suite = self.test_module + ".test_suite" + elif self.test_module: + raise DistutilsOptionError( + "You may specify a module or a suite, but not both" + ) + + self.test_args = [self.test_suite] + + if self.verbose: + self.test_args.insert(0, '--verbose') + if self.test_loader is None: + self.test_loader = getattr(self.distribution, 'test_loader', None) + if self.test_loader is None: + self.test_loader = "setuptools.command.test:ScanningLoader" + if self.test_runner is None: + self.test_runner = getattr(self.distribution, 'test_runner', None) + + def with_project_on_sys_path(self, func): + with_2to3 = PY3 and getattr(self.distribution, 'use_2to3', False) + + if with_2to3: + # If we run 2to3 we can not do this inplace: + + # Ensure metadata is up-to-date + self.reinitialize_command('build_py', inplace=0) + self.run_command('build_py') + bpy_cmd = self.get_finalized_command("build_py") + build_path = normalize_path(bpy_cmd.build_lib) + + # Build extensions + self.reinitialize_command('egg_info', egg_base=build_path) + self.run_command('egg_info') + + self.reinitialize_command('build_ext', inplace=0) + self.run_command('build_ext') + else: + # Without 2to3 inplace works fine: + self.run_command('egg_info') + + # Build extensions in-place + self.reinitialize_command('build_ext', inplace=1) + self.run_command('build_ext') + + ei_cmd = self.get_finalized_command("egg_info") + + old_path = sys.path[:] + old_modules = sys.modules.copy() + + try: + sys.path.insert(0, normalize_path(ei_cmd.egg_base)) + working_set.__init__() + add_activation_listener(lambda dist: dist.activate()) + require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version)) + func() + finally: + sys.path[:] = old_path + sys.modules.clear() + sys.modules.update(old_modules) + working_set.__init__() + + def run(self): + if self.distribution.install_requires: + self.distribution.fetch_build_eggs( + self.distribution.install_requires) + if self.distribution.tests_require: + self.distribution.fetch_build_eggs(self.distribution.tests_require) + + if self.test_suite: + cmd = ' '.join(self.test_args) + if self.dry_run: + self.announce('skipping "unittest %s" (dry run)' % cmd) + else: + self.announce('running "unittest %s"' % cmd) + self.with_project_on_sys_path(self.run_tests) + + def run_tests(self): + # Purge modules under test from sys.modules. The test loader will + # re-import them from the build location. Required when 2to3 is used + # with namespace packages. + if PY3 and getattr(self.distribution, 'use_2to3', False): + module = self.test_args[-1].split('.')[0] + if module in _namespace_packages: + del_modules = [] + if module in sys.modules: + del_modules.append(module) + module += '.' + for name in sys.modules: + if name.startswith(module): + del_modules.append(name) + list(map(sys.modules.__delitem__, del_modules)) + + unittest_main( + None, None, [unittest.__file__] + self.test_args, + testLoader=self._resolve_as_ep(self.test_loader), + testRunner=self._resolve_as_ep(self.test_runner), + ) + + @staticmethod + def _resolve_as_ep(val): + """ + Load the indicated attribute value, called, as a as if it were + specified as an entry point. + """ + if val is None: + return + parsed = EntryPoint.parse("x=" + val) + return parsed.load(require=False)() diff --git a/lib/python3.4/site-packages/setuptools/command/upload_docs.py b/lib/python3.4/site-packages/setuptools/command/upload_docs.py new file mode 100644 index 0000000..cd6c300 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/command/upload_docs.py @@ -0,0 +1,191 @@ +# -*- coding: utf-8 -*- +"""upload_docs + +Implements a Distutils 'upload_docs' subcommand (upload documentation to +PyPI's pythonhosted.org). +""" + +from base64 import standard_b64encode +from distutils import log +from distutils.errors import DistutilsOptionError +from distutils.command.upload import upload +import os +import socket +import zipfile +import tempfile +import sys +import shutil + +from setuptools.compat import httplib, urlparse, unicode, iteritems, PY3 +from pkg_resources import iter_entry_points + + +errors = 'surrogateescape' if PY3 else 'strict' + + +# This is not just a replacement for byte literals +# but works as a general purpose encoder +def b(s, encoding='utf-8'): + if isinstance(s, unicode): + return s.encode(encoding, errors) + return s + + +class upload_docs(upload): + description = 'Upload documentation to PyPI' + + user_options = [ + ('repository=', 'r', + "url of repository [default: %s]" % upload.DEFAULT_REPOSITORY), + ('show-response', None, + 'display full response text from server'), + ('upload-dir=', None, 'directory to upload'), + ] + boolean_options = upload.boolean_options + + def has_sphinx(self): + if self.upload_dir is None: + for ep in iter_entry_points('distutils.commands', 'build_sphinx'): + return True + + sub_commands = [('build_sphinx', has_sphinx)] + + def initialize_options(self): + upload.initialize_options(self) + self.upload_dir = None + self.target_dir = None + + def finalize_options(self): + upload.finalize_options(self) + if self.upload_dir is None: + if self.has_sphinx(): + build_sphinx = self.get_finalized_command('build_sphinx') + self.target_dir = build_sphinx.builder_target_dir + else: + build = self.get_finalized_command('build') + self.target_dir = os.path.join(build.build_base, 'docs') + else: + self.ensure_dirname('upload_dir') + self.target_dir = self.upload_dir + self.announce('Using upload directory %s' % self.target_dir) + + def create_zipfile(self, filename): + zip_file = zipfile.ZipFile(filename, "w") + try: + self.mkpath(self.target_dir) # just in case + for root, dirs, files in os.walk(self.target_dir): + if root == self.target_dir and not files: + raise DistutilsOptionError( + "no files found in upload directory '%s'" + % self.target_dir) + for name in files: + full = os.path.join(root, name) + relative = root[len(self.target_dir):].lstrip(os.path.sep) + dest = os.path.join(relative, name) + zip_file.write(full, dest) + finally: + zip_file.close() + + def run(self): + # Run sub commands + for cmd_name in self.get_sub_commands(): + self.run_command(cmd_name) + + tmp_dir = tempfile.mkdtemp() + name = self.distribution.metadata.get_name() + zip_file = os.path.join(tmp_dir, "%s.zip" % name) + try: + self.create_zipfile(zip_file) + self.upload_file(zip_file) + finally: + shutil.rmtree(tmp_dir) + + def upload_file(self, filename): + f = open(filename, 'rb') + content = f.read() + f.close() + meta = self.distribution.metadata + data = { + ':action': 'doc_upload', + 'name': meta.get_name(), + 'content': (os.path.basename(filename), content), + } + # set up the authentication + credentials = b(self.username + ':' + self.password) + credentials = standard_b64encode(credentials) + if PY3: + credentials = credentials.decode('ascii') + auth = "Basic " + credentials + + # Build up the MIME payload for the POST data + boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' + sep_boundary = b('\n--') + b(boundary) + end_boundary = sep_boundary + b('--') + body = [] + for key, values in iteritems(data): + title = '\nContent-Disposition: form-data; name="%s"' % key + # handle multiple entries for the same name + if not isinstance(values, list): + values = [values] + for value in values: + if type(value) is tuple: + title += '; filename="%s"' % value[0] + value = value[1] + else: + value = b(value) + body.append(sep_boundary) + body.append(b(title)) + body.append(b("\n\n")) + body.append(value) + if value and value[-1:] == b('\r'): + body.append(b('\n')) # write an extra newline (lurve Macs) + body.append(end_boundary) + body.append(b("\n")) + body = b('').join(body) + + self.announce("Submitting documentation to %s" % (self.repository), + log.INFO) + + # build the Request + # We can't use urllib2 since we need to send the Basic + # auth right with the first request + schema, netloc, url, params, query, fragments = \ + urlparse(self.repository) + assert not params and not query and not fragments + if schema == 'http': + conn = httplib.HTTPConnection(netloc) + elif schema == 'https': + conn = httplib.HTTPSConnection(netloc) + else: + raise AssertionError("unsupported schema " + schema) + + data = '' + try: + conn.connect() + conn.putrequest("POST", url) + content_type = 'multipart/form-data; boundary=%s' % boundary + conn.putheader('Content-type', content_type) + conn.putheader('Content-length', str(len(body))) + conn.putheader('Authorization', auth) + conn.endheaders() + conn.send(body) + except socket.error: + e = sys.exc_info()[1] + self.announce(str(e), log.ERROR) + return + + r = conn.getresponse() + if r.status == 200: + self.announce('Server response (%s): %s' % (r.status, r.reason), + log.INFO) + elif r.status == 301: + location = r.getheader('Location') + if location is None: + location = 'https://pythonhosted.org/%s/' % meta.get_name() + self.announce('Upload successful. Visit %s' % location, + log.INFO) + else: + self.announce('Upload failed (%s): %s' % (r.status, r.reason), + log.ERROR) + if self.show_response: + print('-' * 75, r.read(), '-' * 75) diff --git a/lib/python3.4/site-packages/setuptools/compat.py b/lib/python3.4/site-packages/setuptools/compat.py new file mode 100644 index 0000000..73e6e4a --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/compat.py @@ -0,0 +1,66 @@ +import sys +import itertools + +PY3 = sys.version_info >= (3,) +PY2 = not PY3 + +if PY2: + basestring = basestring + import __builtin__ as builtins + import ConfigParser + from StringIO import StringIO + BytesIO = StringIO + func_code = lambda o: o.func_code + func_globals = lambda o: o.func_globals + im_func = lambda o: o.im_func + from htmlentitydefs import name2codepoint + import httplib + from BaseHTTPServer import HTTPServer + from SimpleHTTPServer import SimpleHTTPRequestHandler + from BaseHTTPServer import BaseHTTPRequestHandler + iteritems = lambda o: o.iteritems() + long_type = long + maxsize = sys.maxint + unichr = unichr + unicode = unicode + bytes = str + from urllib import url2pathname, splittag, pathname2url + import urllib2 + from urllib2 import urlopen, HTTPError, URLError, unquote, splituser + from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit + filterfalse = itertools.ifilterfalse + + exec("""def reraise(tp, value, tb=None): + raise tp, value, tb""") + +if PY3: + basestring = str + import builtins + import configparser as ConfigParser + from io import StringIO, BytesIO + func_code = lambda o: o.__code__ + func_globals = lambda o: o.__globals__ + im_func = lambda o: o.__func__ + from html.entities import name2codepoint + import http.client as httplib + from http.server import HTTPServer, SimpleHTTPRequestHandler + from http.server import BaseHTTPRequestHandler + iteritems = lambda o: o.items() + long_type = int + maxsize = sys.maxsize + unichr = chr + unicode = str + bytes = bytes + from urllib.error import HTTPError, URLError + import urllib.request as urllib2 + from urllib.request import urlopen, url2pathname, pathname2url + from urllib.parse import ( + urlparse, urlunparse, unquote, splituser, urljoin, urlsplit, + urlunsplit, splittag, + ) + filterfalse = itertools.filterfalse + + def reraise(tp, value, tb=None): + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value diff --git a/lib/python3.4/site-packages/setuptools/depends.py b/lib/python3.4/site-packages/setuptools/depends.py new file mode 100644 index 0000000..e87ef3f --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/depends.py @@ -0,0 +1,215 @@ +import sys +import imp +import marshal +from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN +from distutils.version import StrictVersion +from setuptools import compat + +__all__ = [ + 'Require', 'find_module', 'get_module_constant', 'extract_constant' +] + +class Require: + """A prerequisite to building or installing a distribution""" + + def __init__(self, name, requested_version, module, homepage='', + attribute=None, format=None): + + if format is None and requested_version is not None: + format = StrictVersion + + if format is not None: + requested_version = format(requested_version) + if attribute is None: + attribute = '__version__' + + self.__dict__.update(locals()) + del self.self + + def full_name(self): + """Return full package/distribution name, w/version""" + if self.requested_version is not None: + return '%s-%s' % (self.name,self.requested_version) + return self.name + + def version_ok(self, version): + """Is 'version' sufficiently up-to-date?""" + return self.attribute is None or self.format is None or \ + str(version) != "unknown" and version >= self.requested_version + + def get_version(self, paths=None, default="unknown"): + + """Get version number of installed module, 'None', or 'default' + + Search 'paths' for module. If not found, return 'None'. If found, + return the extracted version attribute, or 'default' if no version + attribute was specified, or the value cannot be determined without + importing the module. The version is formatted according to the + requirement's version format (if any), unless it is 'None' or the + supplied 'default'. + """ + + if self.attribute is None: + try: + f,p,i = find_module(self.module,paths) + if f: f.close() + return default + except ImportError: + return None + + v = get_module_constant(self.module, self.attribute, default, paths) + + if v is not None and v is not default and self.format is not None: + return self.format(v) + + return v + + def is_present(self, paths=None): + """Return true if dependency is present on 'paths'""" + return self.get_version(paths) is not None + + def is_current(self, paths=None): + """Return true if dependency is present and up-to-date on 'paths'""" + version = self.get_version(paths) + if version is None: + return False + return self.version_ok(version) + + +def _iter_code(code): + + """Yield '(op,arg)' pair for each operation in code object 'code'""" + + from array import array + from dis import HAVE_ARGUMENT, EXTENDED_ARG + + bytes = array('b',code.co_code) + eof = len(code.co_code) + + ptr = 0 + extended_arg = 0 + + while ptr<eof: + + op = bytes[ptr] + + if op>=HAVE_ARGUMENT: + + arg = bytes[ptr+1] + bytes[ptr+2]*256 + extended_arg + ptr += 3 + + if op==EXTENDED_ARG: + extended_arg = arg * compat.long_type(65536) + continue + + else: + arg = None + ptr += 1 + + yield op,arg + + +def find_module(module, paths=None): + """Just like 'imp.find_module()', but with package support""" + + parts = module.split('.') + + while parts: + part = parts.pop(0) + f, path, (suffix,mode,kind) = info = imp.find_module(part, paths) + + if kind==PKG_DIRECTORY: + parts = parts or ['__init__'] + paths = [path] + + elif parts: + raise ImportError("Can't find %r in %s" % (parts,module)) + + return info + + +def get_module_constant(module, symbol, default=-1, paths=None): + + """Find 'module' by searching 'paths', and extract 'symbol' + + Return 'None' if 'module' does not exist on 'paths', or it does not define + 'symbol'. If the module defines 'symbol' as a constant, return the + constant. Otherwise, return 'default'.""" + + try: + f, path, (suffix, mode, kind) = find_module(module, paths) + except ImportError: + # Module doesn't exist + return None + + try: + if kind==PY_COMPILED: + f.read(8) # skip magic & date + code = marshal.load(f) + elif kind==PY_FROZEN: + code = imp.get_frozen_object(module) + elif kind==PY_SOURCE: + code = compile(f.read(), path, 'exec') + else: + # Not something we can parse; we'll have to import it. :( + if module not in sys.modules: + imp.load_module(module, f, path, (suffix, mode, kind)) + return getattr(sys.modules[module], symbol, None) + + finally: + if f: + f.close() + + return extract_constant(code, symbol, default) + + +def extract_constant(code, symbol, default=-1): + """Extract the constant value of 'symbol' from 'code' + + If the name 'symbol' is bound to a constant value by the Python code + object 'code', return that value. If 'symbol' is bound to an expression, + return 'default'. Otherwise, return 'None'. + + Return value is based on the first assignment to 'symbol'. 'symbol' must + be a global, or at least a non-"fast" local in the code block. That is, + only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol' + must be present in 'code.co_names'. + """ + + if symbol not in code.co_names: + # name's not there, can't possibly be an assigment + return None + + name_idx = list(code.co_names).index(symbol) + + STORE_NAME = 90 + STORE_GLOBAL = 97 + LOAD_CONST = 100 + + const = default + + for op, arg in _iter_code(code): + + if op==LOAD_CONST: + const = code.co_consts[arg] + elif arg==name_idx and (op==STORE_NAME or op==STORE_GLOBAL): + return const + else: + const = default + + +def _update_globals(): + """ + Patch the globals to remove the objects not available on some platforms. + + XXX it'd be better to test assertions about bytecode instead. + """ + + if not sys.platform.startswith('java') and sys.platform != 'cli': + return + incompatible = 'extract_constant', 'get_module_constant' + for name in incompatible: + del globals()[name] + __all__.remove(name) + +_update_globals() diff --git a/lib/python3.4/site-packages/setuptools/dist.py b/lib/python3.4/site-packages/setuptools/dist.py new file mode 100644 index 0000000..8b36f67 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/dist.py @@ -0,0 +1,821 @@ +__all__ = ['Distribution'] + +import re +import os +import sys +import warnings +import numbers +import distutils.log +import distutils.core +import distutils.cmd +import distutils.dist +from distutils.core import Distribution as _Distribution +from distutils.errors import (DistutilsOptionError, DistutilsPlatformError, + DistutilsSetupError) + +from setuptools.depends import Require +from setuptools.compat import basestring, PY2 +import pkg_resources + +def _get_unpatched(cls): + """Protect against re-patching the distutils if reloaded + + Also ensures that no other distutils extension monkeypatched the distutils + first. + """ + while cls.__module__.startswith('setuptools'): + cls, = cls.__bases__ + if not cls.__module__.startswith('distutils'): + raise AssertionError( + "distutils has already been patched by %r" % cls + ) + return cls + +_Distribution = _get_unpatched(_Distribution) + +def _patch_distribution_metadata_write_pkg_info(): + """ + Workaround issue #197 - Python 3 prior to 3.2.2 uses an environment-local + encoding to save the pkg_info. Monkey-patch its write_pkg_info method to + correct this undesirable behavior. + """ + environment_local = (3,) <= sys.version_info[:3] < (3, 2, 2) + if not environment_local: + return + + # from Python 3.4 + def write_pkg_info(self, base_dir): + """Write the PKG-INFO file into the release tree. + """ + with open(os.path.join(base_dir, 'PKG-INFO'), 'w', + encoding='UTF-8') as pkg_info: + self.write_pkg_file(pkg_info) + + distutils.dist.DistributionMetadata.write_pkg_info = write_pkg_info +_patch_distribution_metadata_write_pkg_info() + +sequence = tuple, list + +def check_importable(dist, attr, value): + try: + ep = pkg_resources.EntryPoint.parse('x='+value) + assert not ep.extras + except (TypeError,ValueError,AttributeError,AssertionError): + raise DistutilsSetupError( + "%r must be importable 'module:attrs' string (got %r)" + % (attr,value) + ) + + +def assert_string_list(dist, attr, value): + """Verify that value is a string list or None""" + try: + assert ''.join(value)!=value + except (TypeError,ValueError,AttributeError,AssertionError): + raise DistutilsSetupError( + "%r must be a list of strings (got %r)" % (attr,value) + ) +def check_nsp(dist, attr, value): + """Verify that namespace packages are valid""" + assert_string_list(dist,attr,value) + for nsp in value: + if not dist.has_contents_for(nsp): + raise DistutilsSetupError( + "Distribution contains no modules or packages for " + + "namespace package %r" % nsp + ) + if '.' in nsp: + parent = '.'.join(nsp.split('.')[:-1]) + if parent not in value: + distutils.log.warn( + "WARNING: %r is declared as a package namespace, but %r" + " is not: please correct this in setup.py", nsp, parent + ) + +def check_extras(dist, attr, value): + """Verify that extras_require mapping is valid""" + try: + for k,v in value.items(): + if ':' in k: + k,m = k.split(':',1) + if pkg_resources.invalid_marker(m): + raise DistutilsSetupError("Invalid environment marker: "+m) + list(pkg_resources.parse_requirements(v)) + except (TypeError,ValueError,AttributeError): + raise DistutilsSetupError( + "'extras_require' must be a dictionary whose values are " + "strings or lists of strings containing valid project/version " + "requirement specifiers." + ) + +def assert_bool(dist, attr, value): + """Verify that value is True, False, 0, or 1""" + if bool(value) != value: + raise DistutilsSetupError( + "%r must be a boolean value (got %r)" % (attr,value) + ) +def check_requirements(dist, attr, value): + """Verify that install_requires is a valid requirements list""" + try: + list(pkg_resources.parse_requirements(value)) + except (TypeError,ValueError): + raise DistutilsSetupError( + "%r must be a string or list of strings " + "containing valid project/version requirement specifiers" % (attr,) + ) +def check_entry_points(dist, attr, value): + """Verify that entry_points map is parseable""" + try: + pkg_resources.EntryPoint.parse_map(value) + except ValueError: + e = sys.exc_info()[1] + raise DistutilsSetupError(e) + +def check_test_suite(dist, attr, value): + if not isinstance(value,basestring): + raise DistutilsSetupError("test_suite must be a string") + +def check_package_data(dist, attr, value): + """Verify that value is a dictionary of package names to glob lists""" + if isinstance(value,dict): + for k,v in value.items(): + if not isinstance(k,str): break + try: iter(v) + except TypeError: + break + else: + return + raise DistutilsSetupError( + attr+" must be a dictionary mapping package names to lists of " + "wildcard patterns" + ) + +def check_packages(dist, attr, value): + for pkgname in value: + if not re.match(r'\w+(\.\w+)*', pkgname): + distutils.log.warn( + "WARNING: %r not a valid package name; please use only" + ".-separated package names in setup.py", pkgname + ) + + +class Distribution(_Distribution): + """Distribution with support for features, tests, and package data + + This is an enhanced version of 'distutils.dist.Distribution' that + effectively adds the following new optional keyword arguments to 'setup()': + + 'install_requires' -- a string or sequence of strings specifying project + versions that the distribution requires when installed, in the format + used by 'pkg_resources.require()'. They will be installed + automatically when the package is installed. If you wish to use + packages that are not available in PyPI, or want to give your users an + alternate download location, you can add a 'find_links' option to the + '[easy_install]' section of your project's 'setup.cfg' file, and then + setuptools will scan the listed web pages for links that satisfy the + requirements. + + 'extras_require' -- a dictionary mapping names of optional "extras" to the + additional requirement(s) that using those extras incurs. For example, + this:: + + extras_require = dict(reST = ["docutils>=0.3", "reSTedit"]) + + indicates that the distribution can optionally provide an extra + capability called "reST", but it can only be used if docutils and + reSTedit are installed. If the user installs your package using + EasyInstall and requests one of your extras, the corresponding + additional requirements will be installed if needed. + + 'features' **deprecated** -- a dictionary mapping option names to + 'setuptools.Feature' + objects. Features are a portion of the distribution that can be + included or excluded based on user options, inter-feature dependencies, + and availability on the current system. Excluded features are omitted + from all setup commands, including source and binary distributions, so + you can create multiple distributions from the same source tree. + Feature names should be valid Python identifiers, except that they may + contain the '-' (minus) sign. Features can be included or excluded + via the command line options '--with-X' and '--without-X', where 'X' is + the name of the feature. Whether a feature is included by default, and + whether you are allowed to control this from the command line, is + determined by the Feature object. See the 'Feature' class for more + information. + + 'test_suite' -- the name of a test suite to run for the 'test' command. + If the user runs 'python setup.py test', the package will be installed, + and the named test suite will be run. The format is the same as + would be used on a 'unittest.py' command line. That is, it is the + dotted name of an object to import and call to generate a test suite. + + 'package_data' -- a dictionary mapping package names to lists of filenames + or globs to use to find data files contained in the named packages. + If the dictionary has filenames or globs listed under '""' (the empty + string), those names will be searched for in every package, in addition + to any names for the specific package. Data files found using these + names/globs will be installed along with the package, in the same + location as the package. Note that globs are allowed to reference + the contents of non-package subdirectories, as long as you use '/' as + a path separator. (Globs are automatically converted to + platform-specific paths at runtime.) + + In addition to these new keywords, this class also has several new methods + for manipulating the distribution's contents. For example, the 'include()' + and 'exclude()' methods can be thought of as in-place add and subtract + commands that add or remove packages, modules, extensions, and so on from + the distribution. They are used by the feature subsystem to configure the + distribution for the included and excluded features. + """ + + _patched_dist = None + + def patch_missing_pkg_info(self, attrs): + # Fake up a replacement for the data that would normally come from + # PKG-INFO, but which might not yet be built if this is a fresh + # checkout. + # + if not attrs or 'name' not in attrs or 'version' not in attrs: + return + key = pkg_resources.safe_name(str(attrs['name'])).lower() + dist = pkg_resources.working_set.by_key.get(key) + if dist is not None and not dist.has_metadata('PKG-INFO'): + dist._version = pkg_resources.safe_version(str(attrs['version'])) + self._patched_dist = dist + + def __init__(self, attrs=None): + have_package_data = hasattr(self, "package_data") + if not have_package_data: + self.package_data = {} + _attrs_dict = attrs or {} + if 'features' in _attrs_dict or 'require_features' in _attrs_dict: + Feature.warn_deprecated() + self.require_features = [] + self.features = {} + self.dist_files = [] + self.src_root = attrs and attrs.pop("src_root", None) + self.patch_missing_pkg_info(attrs) + # Make sure we have any eggs needed to interpret 'attrs' + if attrs is not None: + self.dependency_links = attrs.pop('dependency_links', []) + assert_string_list(self,'dependency_links',self.dependency_links) + if attrs and 'setup_requires' in attrs: + self.fetch_build_eggs(attrs['setup_requires']) + for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): + if not hasattr(self,ep.name): + setattr(self,ep.name,None) + _Distribution.__init__(self,attrs) + if isinstance(self.metadata.version, numbers.Number): + # Some people apparently take "version number" too literally :) + self.metadata.version = str(self.metadata.version) + + def parse_command_line(self): + """Process features after parsing command line options""" + result = _Distribution.parse_command_line(self) + if self.features: + self._finalize_features() + return result + + def _feature_attrname(self,name): + """Convert feature name to corresponding option attribute name""" + return 'with_'+name.replace('-','_') + + def fetch_build_eggs(self, requires): + """Resolve pre-setup requirements""" + resolved_dists = pkg_resources.working_set.resolve( + pkg_resources.parse_requirements(requires), + installer=self.fetch_build_egg, + replace_conflicting=True, + ) + for dist in resolved_dists: + pkg_resources.working_set.add(dist, replace=True) + + def finalize_options(self): + _Distribution.finalize_options(self) + if self.features: + self._set_global_opts_from_features() + + for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): + value = getattr(self,ep.name,None) + if value is not None: + ep.require(installer=self.fetch_build_egg) + ep.load()(self, ep.name, value) + if getattr(self, 'convert_2to3_doctests', None): + # XXX may convert to set here when we can rely on set being builtin + self.convert_2to3_doctests = [os.path.abspath(p) for p in self.convert_2to3_doctests] + else: + self.convert_2to3_doctests = [] + + def fetch_build_egg(self, req): + """Fetch an egg needed for building""" + + try: + cmd = self._egg_fetcher + cmd.package_index.to_scan = [] + except AttributeError: + from setuptools.command.easy_install import easy_install + dist = self.__class__({'script_args':['easy_install']}) + dist.parse_config_files() + opts = dist.get_option_dict('easy_install') + keep = ( + 'find_links', 'site_dirs', 'index_url', 'optimize', + 'site_dirs', 'allow_hosts' + ) + for key in list(opts): + if key not in keep: + del opts[key] # don't use any other settings + if self.dependency_links: + links = self.dependency_links[:] + if 'find_links' in opts: + links = opts['find_links'][1].split() + links + opts['find_links'] = ('setup', links) + cmd = easy_install( + dist, args=["x"], install_dir=os.curdir, exclude_scripts=True, + always_copy=False, build_directory=None, editable=False, + upgrade=False, multi_version=True, no_report=True, user=False + ) + cmd.ensure_finalized() + self._egg_fetcher = cmd + return cmd.easy_install(req) + + def _set_global_opts_from_features(self): + """Add --with-X/--without-X options based on optional features""" + + go = [] + no = self.negative_opt.copy() + + for name,feature in self.features.items(): + self._set_feature(name,None) + feature.validate(self) + + if feature.optional: + descr = feature.description + incdef = ' (default)' + excdef='' + if not feature.include_by_default(): + excdef, incdef = incdef, excdef + + go.append(('with-'+name, None, 'include '+descr+incdef)) + go.append(('without-'+name, None, 'exclude '+descr+excdef)) + no['without-'+name] = 'with-'+name + + self.global_options = self.feature_options = go + self.global_options + self.negative_opt = self.feature_negopt = no + + def _finalize_features(self): + """Add/remove features and resolve dependencies between them""" + + # First, flag all the enabled items (and thus their dependencies) + for name,feature in self.features.items(): + enabled = self.feature_is_included(name) + if enabled or (enabled is None and feature.include_by_default()): + feature.include_in(self) + self._set_feature(name,1) + + # Then disable the rest, so that off-by-default features don't + # get flagged as errors when they're required by an enabled feature + for name,feature in self.features.items(): + if not self.feature_is_included(name): + feature.exclude_from(self) + self._set_feature(name,0) + + def get_command_class(self, command): + """Pluggable version of get_command_class()""" + if command in self.cmdclass: + return self.cmdclass[command] + + for ep in pkg_resources.iter_entry_points('distutils.commands',command): + ep.require(installer=self.fetch_build_egg) + self.cmdclass[command] = cmdclass = ep.load() + return cmdclass + else: + return _Distribution.get_command_class(self, command) + + def print_commands(self): + for ep in pkg_resources.iter_entry_points('distutils.commands'): + if ep.name not in self.cmdclass: + cmdclass = ep.load(False) # don't require extras, we're not running + self.cmdclass[ep.name] = cmdclass + return _Distribution.print_commands(self) + + def _set_feature(self,name,status): + """Set feature's inclusion status""" + setattr(self,self._feature_attrname(name),status) + + def feature_is_included(self,name): + """Return 1 if feature is included, 0 if excluded, 'None' if unknown""" + return getattr(self,self._feature_attrname(name)) + + def include_feature(self,name): + """Request inclusion of feature named 'name'""" + + if self.feature_is_included(name)==0: + descr = self.features[name].description + raise DistutilsOptionError( + descr + " is required, but was excluded or is not available" + ) + self.features[name].include_in(self) + self._set_feature(name,1) + + def include(self,**attrs): + """Add items to distribution that are named in keyword arguments + + For example, 'dist.exclude(py_modules=["x"])' would add 'x' to + the distribution's 'py_modules' attribute, if it was not already + there. + + Currently, this method only supports inclusion for attributes that are + lists or tuples. If you need to add support for adding to other + attributes in this or a subclass, you can add an '_include_X' method, + where 'X' is the name of the attribute. The method will be called with + the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})' + will try to call 'dist._include_foo({"bar":"baz"})', which can then + handle whatever special inclusion logic is needed. + """ + for k,v in attrs.items(): + include = getattr(self, '_include_'+k, None) + if include: + include(v) + else: + self._include_misc(k,v) + + def exclude_package(self,package): + """Remove packages, modules, and extensions in named package""" + + pfx = package+'.' + if self.packages: + self.packages = [ + p for p in self.packages + if p != package and not p.startswith(pfx) + ] + + if self.py_modules: + self.py_modules = [ + p for p in self.py_modules + if p != package and not p.startswith(pfx) + ] + + if self.ext_modules: + self.ext_modules = [ + p for p in self.ext_modules + if p.name != package and not p.name.startswith(pfx) + ] + + def has_contents_for(self,package): + """Return true if 'exclude_package(package)' would do something""" + + pfx = package+'.' + + for p in self.iter_distribution_names(): + if p==package or p.startswith(pfx): + return True + + def _exclude_misc(self,name,value): + """Handle 'exclude()' for list/tuple attrs without a special handler""" + if not isinstance(value,sequence): + raise DistutilsSetupError( + "%s: setting must be a list or tuple (%r)" % (name, value) + ) + try: + old = getattr(self,name) + except AttributeError: + raise DistutilsSetupError( + "%s: No such distribution setting" % name + ) + if old is not None and not isinstance(old,sequence): + raise DistutilsSetupError( + name+": this setting cannot be changed via include/exclude" + ) + elif old: + setattr(self,name,[item for item in old if item not in value]) + + def _include_misc(self,name,value): + """Handle 'include()' for list/tuple attrs without a special handler""" + + if not isinstance(value,sequence): + raise DistutilsSetupError( + "%s: setting must be a list (%r)" % (name, value) + ) + try: + old = getattr(self,name) + except AttributeError: + raise DistutilsSetupError( + "%s: No such distribution setting" % name + ) + if old is None: + setattr(self,name,value) + elif not isinstance(old,sequence): + raise DistutilsSetupError( + name+": this setting cannot be changed via include/exclude" + ) + else: + setattr(self,name,old+[item for item in value if item not in old]) + + def exclude(self,**attrs): + """Remove items from distribution that are named in keyword arguments + + For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from + the distribution's 'py_modules' attribute. Excluding packages uses + the 'exclude_package()' method, so all of the package's contained + packages, modules, and extensions are also excluded. + + Currently, this method only supports exclusion from attributes that are + lists or tuples. If you need to add support for excluding from other + attributes in this or a subclass, you can add an '_exclude_X' method, + where 'X' is the name of the attribute. The method will be called with + the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})' + will try to call 'dist._exclude_foo({"bar":"baz"})', which can then + handle whatever special exclusion logic is needed. + """ + for k,v in attrs.items(): + exclude = getattr(self, '_exclude_'+k, None) + if exclude: + exclude(v) + else: + self._exclude_misc(k,v) + + def _exclude_packages(self,packages): + if not isinstance(packages,sequence): + raise DistutilsSetupError( + "packages: setting must be a list or tuple (%r)" % (packages,) + ) + list(map(self.exclude_package, packages)) + + def _parse_command_opts(self, parser, args): + # Remove --with-X/--without-X options when processing command args + self.global_options = self.__class__.global_options + self.negative_opt = self.__class__.negative_opt + + # First, expand any aliases + command = args[0] + aliases = self.get_option_dict('aliases') + while command in aliases: + src,alias = aliases[command] + del aliases[command] # ensure each alias can expand only once! + import shlex + args[:1] = shlex.split(alias,True) + command = args[0] + + nargs = _Distribution._parse_command_opts(self, parser, args) + + # Handle commands that want to consume all remaining arguments + cmd_class = self.get_command_class(command) + if getattr(cmd_class,'command_consumes_arguments',None): + self.get_option_dict(command)['args'] = ("command line", nargs) + if nargs is not None: + return [] + + return nargs + + def get_cmdline_options(self): + """Return a '{cmd: {opt:val}}' map of all command-line options + + Option names are all long, but do not include the leading '--', and + contain dashes rather than underscores. If the option doesn't take + an argument (e.g. '--quiet'), the 'val' is 'None'. + + Note that options provided by config files are intentionally excluded. + """ + + d = {} + + for cmd,opts in self.command_options.items(): + + for opt,(src,val) in opts.items(): + + if src != "command line": + continue + + opt = opt.replace('_','-') + + if val==0: + cmdobj = self.get_command_obj(cmd) + neg_opt = self.negative_opt.copy() + neg_opt.update(getattr(cmdobj,'negative_opt',{})) + for neg,pos in neg_opt.items(): + if pos==opt: + opt=neg + val=None + break + else: + raise AssertionError("Shouldn't be able to get here") + + elif val==1: + val = None + + d.setdefault(cmd,{})[opt] = val + + return d + + def iter_distribution_names(self): + """Yield all packages, modules, and extension names in distribution""" + + for pkg in self.packages or (): + yield pkg + + for module in self.py_modules or (): + yield module + + for ext in self.ext_modules or (): + if isinstance(ext,tuple): + name, buildinfo = ext + else: + name = ext.name + if name.endswith('module'): + name = name[:-6] + yield name + + def handle_display_options(self, option_order): + """If there were any non-global "display-only" options + (--help-commands or the metadata display options) on the command + line, display the requested info and return true; else return + false. + """ + import sys + + if PY2 or self.help_commands: + return _Distribution.handle_display_options(self, option_order) + + # Stdout may be StringIO (e.g. in tests) + import io + if not isinstance(sys.stdout, io.TextIOWrapper): + return _Distribution.handle_display_options(self, option_order) + + # Don't wrap stdout if utf-8 is already the encoding. Provides + # workaround for #334. + if sys.stdout.encoding.lower() in ('utf-8', 'utf8'): + return _Distribution.handle_display_options(self, option_order) + + # Print metadata in UTF-8 no matter the platform + encoding = sys.stdout.encoding + errors = sys.stdout.errors + newline = sys.platform != 'win32' and '\n' or None + line_buffering = sys.stdout.line_buffering + + sys.stdout = io.TextIOWrapper( + sys.stdout.detach(), 'utf-8', errors, newline, line_buffering) + try: + return _Distribution.handle_display_options(self, option_order) + finally: + sys.stdout = io.TextIOWrapper( + sys.stdout.detach(), encoding, errors, newline, line_buffering) + + +# Install it throughout the distutils +for module in distutils.dist, distutils.core, distutils.cmd: + module.Distribution = Distribution + + +class Feature: + """ + **deprecated** -- The `Feature` facility was never completely implemented + or supported, `has reported issues + <https://bitbucket.org/pypa/setuptools/issue/58>`_ and will be removed in + a future version. + + A subset of the distribution that can be excluded if unneeded/wanted + + Features are created using these keyword arguments: + + 'description' -- a short, human readable description of the feature, to + be used in error messages, and option help messages. + + 'standard' -- if true, the feature is included by default if it is + available on the current system. Otherwise, the feature is only + included if requested via a command line '--with-X' option, or if + another included feature requires it. The default setting is 'False'. + + 'available' -- if true, the feature is available for installation on the + current system. The default setting is 'True'. + + 'optional' -- if true, the feature's inclusion can be controlled from the + command line, using the '--with-X' or '--without-X' options. If + false, the feature's inclusion status is determined automatically, + based on 'availabile', 'standard', and whether any other feature + requires it. The default setting is 'True'. + + 'require_features' -- a string or sequence of strings naming features + that should also be included if this feature is included. Defaults to + empty list. May also contain 'Require' objects that should be + added/removed from the distribution. + + 'remove' -- a string or list of strings naming packages to be removed + from the distribution if this feature is *not* included. If the + feature *is* included, this argument is ignored. This argument exists + to support removing features that "crosscut" a distribution, such as + defining a 'tests' feature that removes all the 'tests' subpackages + provided by other features. The default for this argument is an empty + list. (Note: the named package(s) or modules must exist in the base + distribution when the 'setup()' function is initially called.) + + other keywords -- any other keyword arguments are saved, and passed to + the distribution's 'include()' and 'exclude()' methods when the + feature is included or excluded, respectively. So, for example, you + could pass 'packages=["a","b"]' to cause packages 'a' and 'b' to be + added or removed from the distribution as appropriate. + + A feature must include at least one 'requires', 'remove', or other + keyword argument. Otherwise, it can't affect the distribution in any way. + Note also that you can subclass 'Feature' to create your own specialized + feature types that modify the distribution in other ways when included or + excluded. See the docstrings for the various methods here for more detail. + Aside from the methods, the only feature attributes that distributions look + at are 'description' and 'optional'. + """ + + @staticmethod + def warn_deprecated(): + warnings.warn( + "Features are deprecated and will be removed in a future " + "version. See http://bitbucket.org/pypa/setuptools/65.", + DeprecationWarning, + stacklevel=3, + ) + + def __init__(self, description, standard=False, available=True, + optional=True, require_features=(), remove=(), **extras): + self.warn_deprecated() + + self.description = description + self.standard = standard + self.available = available + self.optional = optional + if isinstance(require_features,(str,Require)): + require_features = require_features, + + self.require_features = [ + r for r in require_features if isinstance(r,str) + ] + er = [r for r in require_features if not isinstance(r,str)] + if er: extras['require_features'] = er + + if isinstance(remove,str): + remove = remove, + self.remove = remove + self.extras = extras + + if not remove and not require_features and not extras: + raise DistutilsSetupError( + "Feature %s: must define 'require_features', 'remove', or at least one" + " of 'packages', 'py_modules', etc." + ) + + def include_by_default(self): + """Should this feature be included by default?""" + return self.available and self.standard + + def include_in(self,dist): + + """Ensure feature and its requirements are included in distribution + + You may override this in a subclass to perform additional operations on + the distribution. Note that this method may be called more than once + per feature, and so should be idempotent. + + """ + + if not self.available: + raise DistutilsPlatformError( + self.description+" is required," + "but is not available on this platform" + ) + + dist.include(**self.extras) + + for f in self.require_features: + dist.include_feature(f) + + def exclude_from(self,dist): + + """Ensure feature is excluded from distribution + + You may override this in a subclass to perform additional operations on + the distribution. This method will be called at most once per + feature, and only after all included features have been asked to + include themselves. + """ + + dist.exclude(**self.extras) + + if self.remove: + for item in self.remove: + dist.exclude_package(item) + + def validate(self,dist): + + """Verify that feature makes sense in context of distribution + + This method is called by the distribution just before it parses its + command line. It checks to ensure that the 'remove' attribute, if any, + contains only valid package/module names that are present in the base + distribution when 'setup()' is called. You may override it in a + subclass to perform any other required validation of the feature + against a target distribution. + """ + + for item in self.remove: + if not dist.has_contents_for(item): + raise DistutilsSetupError( + "%s wants to be able to remove %s, but the distribution" + " doesn't contain any packages or modules under %s" + % (self.description, item, item) + ) diff --git a/lib/python3.4/site-packages/setuptools/extension.py b/lib/python3.4/site-packages/setuptools/extension.py new file mode 100644 index 0000000..ab5908d --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/extension.py @@ -0,0 +1,53 @@ +import sys +import re +import functools +import distutils.core +import distutils.extension + +from setuptools.dist import _get_unpatched + +_Extension = _get_unpatched(distutils.core.Extension) + +def have_pyrex(): + """ + Return True if Cython or Pyrex can be imported. + """ + pyrex_impls = 'Cython.Distutils.build_ext', 'Pyrex.Distutils.build_ext' + for pyrex_impl in pyrex_impls: + try: + # from (pyrex_impl) import build_ext + __import__(pyrex_impl, fromlist=['build_ext']).build_ext + return True + except Exception: + pass + return False + + +class Extension(_Extension): + """Extension that uses '.c' files in place of '.pyx' files""" + + def __init__(self, *args, **kw): + _Extension.__init__(self, *args, **kw) + self._convert_pyx_sources_to_lang() + + def _convert_pyx_sources_to_lang(self): + """ + Replace sources with .pyx extensions to sources with the target + language extension. This mechanism allows language authors to supply + pre-converted sources but to prefer the .pyx sources. + """ + if have_pyrex(): + # the build has Cython, so allow it to compile the .pyx files + return + lang = self.language or '' + target_ext = '.cpp' if lang.lower() == 'c++' else '.c' + sub = functools.partial(re.sub, '.pyx$', target_ext) + self.sources = list(map(sub, self.sources)) + +class Library(Extension): + """Just like a regular Extension, but built as a library instead""" + +distutils.core.Extension = Extension +distutils.extension.Extension = Extension +if 'distutils.command.build_ext' in sys.modules: + sys.modules['distutils.command.build_ext'].Extension = Extension diff --git a/lib/python3.4/site-packages/setuptools/lib2to3_ex.py b/lib/python3.4/site-packages/setuptools/lib2to3_ex.py new file mode 100644 index 0000000..feef591 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/lib2to3_ex.py @@ -0,0 +1,58 @@ +""" +Customized Mixin2to3 support: + + - adds support for converting doctests + + +This module raises an ImportError on Python 2. +""" + +from distutils.util import Mixin2to3 as _Mixin2to3 +from distutils import log +from lib2to3.refactor import RefactoringTool, get_fixers_from_package +import setuptools + +class DistutilsRefactoringTool(RefactoringTool): + def log_error(self, msg, *args, **kw): + log.error(msg, *args) + + def log_message(self, msg, *args): + log.info(msg, *args) + + def log_debug(self, msg, *args): + log.debug(msg, *args) + +class Mixin2to3(_Mixin2to3): + def run_2to3(self, files, doctests = False): + # See of the distribution option has been set, otherwise check the + # setuptools default. + if self.distribution.use_2to3 is not True: + return + if not files: + return + log.info("Fixing "+" ".join(files)) + self.__build_fixer_names() + self.__exclude_fixers() + if doctests: + if setuptools.run_2to3_on_doctests: + r = DistutilsRefactoringTool(self.fixer_names) + r.refactor(files, write=True, doctests_only=True) + else: + _Mixin2to3.run_2to3(self, files) + + def __build_fixer_names(self): + if self.fixer_names: return + self.fixer_names = [] + for p in setuptools.lib2to3_fixer_packages: + self.fixer_names.extend(get_fixers_from_package(p)) + if self.distribution.use_2to3_fixers is not None: + for p in self.distribution.use_2to3_fixers: + self.fixer_names.extend(get_fixers_from_package(p)) + + def __exclude_fixers(self): + excluded_fixers = getattr(self, 'exclude_fixers', []) + if self.distribution.use_2to3_exclude_fixers is not None: + excluded_fixers.extend(self.distribution.use_2to3_exclude_fixers) + for fixer_name in excluded_fixers: + if fixer_name in self.fixer_names: + self.fixer_names.remove(fixer_name) diff --git a/lib/python3.4/site-packages/setuptools/package_index.py b/lib/python3.4/site-packages/setuptools/package_index.py new file mode 100644 index 0000000..58572ce --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/package_index.py @@ -0,0 +1,1055 @@ +"""PyPI and direct package downloading""" +import sys +import os +import re +import shutil +import socket +import base64 +import hashlib +from functools import wraps + +from pkg_resources import ( + CHECKOUT_DIST, Distribution, BINARY_DIST, normalize_path, SOURCE_DIST, + require, Environment, find_distributions, safe_name, safe_version, + to_filename, Requirement, DEVELOP_DIST, +) +from setuptools import ssl_support +from distutils import log +from distutils.errors import DistutilsError +from setuptools.compat import (urllib2, httplib, StringIO, HTTPError, + urlparse, urlunparse, unquote, splituser, + url2pathname, name2codepoint, + unichr, urljoin, urlsplit, urlunsplit, + ConfigParser) +from setuptools.compat import filterfalse +from fnmatch import translate +from setuptools.py26compat import strip_fragment +from setuptools.py27compat import get_all_headers + +EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$') +HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I) +# this is here to fix emacs' cruddy broken syntax highlighting +PYPI_MD5 = re.compile( + '<a href="([^"#]+)">([^<]+)</a>\n\s+\\(<a (?:title="MD5 hash"\n\s+)' + 'href="[^?]+\?:action=show_md5&digest=([0-9a-f]{32})">md5</a>\\)' +) +URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):',re.I).match +EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split() + +__all__ = [ + 'PackageIndex', 'distros_for_url', 'parse_bdist_wininst', + 'interpret_distro_name', +] + +_SOCKET_TIMEOUT = 15 + +def parse_bdist_wininst(name): + """Return (base,pyversion) or (None,None) for possible .exe name""" + + lower = name.lower() + base, py_ver, plat = None, None, None + + if lower.endswith('.exe'): + if lower.endswith('.win32.exe'): + base = name[:-10] + plat = 'win32' + elif lower.startswith('.win32-py',-16): + py_ver = name[-7:-4] + base = name[:-16] + plat = 'win32' + elif lower.endswith('.win-amd64.exe'): + base = name[:-14] + plat = 'win-amd64' + elif lower.startswith('.win-amd64-py',-20): + py_ver = name[-7:-4] + base = name[:-20] + plat = 'win-amd64' + return base,py_ver,plat + + +def egg_info_for_url(url): + scheme, server, path, parameters, query, fragment = urlparse(url) + base = unquote(path.split('/')[-1]) + if server=='sourceforge.net' and base=='download': # XXX Yuck + base = unquote(path.split('/')[-2]) + if '#' in base: base, fragment = base.split('#',1) + return base,fragment + +def distros_for_url(url, metadata=None): + """Yield egg or source distribution objects that might be found at a URL""" + base, fragment = egg_info_for_url(url) + for dist in distros_for_location(url, base, metadata): yield dist + if fragment: + match = EGG_FRAGMENT.match(fragment) + if match: + for dist in interpret_distro_name( + url, match.group(1), metadata, precedence = CHECKOUT_DIST + ): + yield dist + +def distros_for_location(location, basename, metadata=None): + """Yield egg or source distribution objects based on basename""" + if basename.endswith('.egg.zip'): + basename = basename[:-4] # strip the .zip + if basename.endswith('.egg') and '-' in basename: + # only one, unambiguous interpretation + return [Distribution.from_location(location, basename, metadata)] + if basename.endswith('.exe'): + win_base, py_ver, platform = parse_bdist_wininst(basename) + if win_base is not None: + return interpret_distro_name( + location, win_base, metadata, py_ver, BINARY_DIST, platform + ) + # Try source distro extensions (.zip, .tgz, etc.) + # + for ext in EXTENSIONS: + if basename.endswith(ext): + basename = basename[:-len(ext)] + return interpret_distro_name(location, basename, metadata) + return [] # no extension matched + +def distros_for_filename(filename, metadata=None): + """Yield possible egg or source distribution objects based on a filename""" + return distros_for_location( + normalize_path(filename), os.path.basename(filename), metadata + ) + + +def interpret_distro_name( + location, basename, metadata, py_version=None, precedence=SOURCE_DIST, + platform=None + ): + """Generate alternative interpretations of a source distro name + + Note: if `location` is a filesystem filename, you should call + ``pkg_resources.normalize_path()`` on it before passing it to this + routine! + """ + # Generate alternative interpretations of a source distro name + # Because some packages are ambiguous as to name/versions split + # e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc. + # So, we generate each possible interepretation (e.g. "adns, python-1.1.0" + # "adns-python, 1.1.0", and "adns-python-1.1.0, no version"). In practice, + # the spurious interpretations should be ignored, because in the event + # there's also an "adns" package, the spurious "python-1.1.0" version will + # compare lower than any numeric version number, and is therefore unlikely + # to match a request for it. It's still a potential problem, though, and + # in the long run PyPI and the distutils should go for "safe" names and + # versions in distribution archive names (sdist and bdist). + + parts = basename.split('-') + if not py_version: + for i,p in enumerate(parts[2:]): + if len(p)==5 and p.startswith('py2.'): + return # It's a bdist_dumb, not an sdist -- bail out + + for p in range(1,len(parts)+1): + yield Distribution( + location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]), + py_version=py_version, precedence = precedence, + platform = platform + ) + +# From Python 2.7 docs +def unique_everseen(iterable, key=None): + "List unique elements, preserving order. Remember all elements ever seen." + # unique_everseen('AAAABBBCCDAABBB') --> A B C D + # unique_everseen('ABBCcAD', str.lower) --> A B C D + seen = set() + seen_add = seen.add + if key is None: + for element in filterfalse(seen.__contains__, iterable): + seen_add(element) + yield element + else: + for element in iterable: + k = key(element) + if k not in seen: + seen_add(k) + yield element + +def unique_values(func): + """ + Wrap a function returning an iterable such that the resulting iterable + only ever yields unique items. + """ + @wraps(func) + def wrapper(*args, **kwargs): + return unique_everseen(func(*args, **kwargs)) + return wrapper + +REL = re.compile("""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I) +# this line is here to fix emacs' cruddy broken syntax highlighting + +@unique_values +def find_external_links(url, page): + """Find rel="homepage" and rel="download" links in `page`, yielding URLs""" + + for match in REL.finditer(page): + tag, rel = match.groups() + rels = set(map(str.strip, rel.lower().split(','))) + if 'homepage' in rels or 'download' in rels: + for match in HREF.finditer(tag): + yield urljoin(url, htmldecode(match.group(1))) + + for tag in ("<th>Home Page", "<th>Download URL"): + pos = page.find(tag) + if pos!=-1: + match = HREF.search(page,pos) + if match: + yield urljoin(url, htmldecode(match.group(1))) + +user_agent = "Python-urllib/%s setuptools/%s" % ( + sys.version[:3], require('setuptools')[0].version +) + +class ContentChecker(object): + """ + A null content checker that defines the interface for checking content + """ + def feed(self, block): + """ + Feed a block of data to the hash. + """ + return + + def is_valid(self): + """ + Check the hash. Return False if validation fails. + """ + return True + + def report(self, reporter, template): + """ + Call reporter with information about the checker (hash name) + substituted into the template. + """ + return + +class HashChecker(ContentChecker): + pattern = re.compile( + r'(?P<hash_name>sha1|sha224|sha384|sha256|sha512|md5)=' + r'(?P<expected>[a-f0-9]+)' + ) + + def __init__(self, hash_name, expected): + self.hash_name = hash_name + self.hash = hashlib.new(hash_name) + self.expected = expected + + @classmethod + def from_url(cls, url): + "Construct a (possibly null) ContentChecker from a URL" + fragment = urlparse(url)[-1] + if not fragment: + return ContentChecker() + match = cls.pattern.search(fragment) + if not match: + return ContentChecker() + return cls(**match.groupdict()) + + def feed(self, block): + self.hash.update(block) + + def is_valid(self): + return self.hash.hexdigest() == self.expected + + def report(self, reporter, template): + msg = template % self.hash_name + return reporter(msg) + + +class PackageIndex(Environment): + """A distribution index that scans web pages for download URLs""" + + def __init__( + self, index_url="https://pypi.python.org/simple", hosts=('*',), + ca_bundle=None, verify_ssl=True, *args, **kw + ): + Environment.__init__(self,*args,**kw) + self.index_url = index_url + "/"[:not index_url.endswith('/')] + self.scanned_urls = {} + self.fetched_urls = {} + self.package_pages = {} + self.allows = re.compile('|'.join(map(translate,hosts))).match + self.to_scan = [] + if verify_ssl and ssl_support.is_available and (ca_bundle or ssl_support.find_ca_bundle()): + self.opener = ssl_support.opener_for(ca_bundle) + else: self.opener = urllib2.urlopen + + def process_url(self, url, retrieve=False): + """Evaluate a URL as a possible download, and maybe retrieve it""" + if url in self.scanned_urls and not retrieve: + return + self.scanned_urls[url] = True + if not URL_SCHEME(url): + self.process_filename(url) + return + else: + dists = list(distros_for_url(url)) + if dists: + if not self.url_ok(url): + return + self.debug("Found link: %s", url) + + if dists or not retrieve or url in self.fetched_urls: + list(map(self.add, dists)) + return # don't need the actual page + + if not self.url_ok(url): + self.fetched_urls[url] = True + return + + self.info("Reading %s", url) + self.fetched_urls[url] = True # prevent multiple fetch attempts + f = self.open_url(url, "Download error on %s: %%s -- Some packages may not be found!" % url) + if f is None: return + self.fetched_urls[f.url] = True + if 'html' not in f.headers.get('content-type', '').lower(): + f.close() # not html, we can't process it + return + + base = f.url # handle redirects + page = f.read() + if not isinstance(page, str): # We are in Python 3 and got bytes. We want str. + if isinstance(f, HTTPError): + # Errors have no charset, assume latin1: + charset = 'latin-1' + else: + charset = f.headers.get_param('charset') or 'latin-1' + page = page.decode(charset, "ignore") + f.close() + for match in HREF.finditer(page): + link = urljoin(base, htmldecode(match.group(1))) + self.process_url(link) + if url.startswith(self.index_url) and getattr(f,'code',None)!=404: + page = self.process_index(url, page) + + def process_filename(self, fn, nested=False): + # process filenames or directories + if not os.path.exists(fn): + self.warn("Not found: %s", fn) + return + + if os.path.isdir(fn) and not nested: + path = os.path.realpath(fn) + for item in os.listdir(path): + self.process_filename(os.path.join(path,item), True) + + dists = distros_for_filename(fn) + if dists: + self.debug("Found: %s", fn) + list(map(self.add, dists)) + + def url_ok(self, url, fatal=False): + s = URL_SCHEME(url) + if (s and s.group(1).lower()=='file') or self.allows(urlparse(url)[1]): + return True + msg = ("\nNote: Bypassing %s (disallowed host; see " + "http://bit.ly/1dg9ijs for details).\n") + if fatal: + raise DistutilsError(msg % url) + else: + self.warn(msg, url) + + def scan_egg_links(self, search_path): + for item in search_path: + if os.path.isdir(item): + for entry in os.listdir(item): + if entry.endswith('.egg-link'): + self.scan_egg_link(item, entry) + + def scan_egg_link(self, path, entry): + lines = [_f for _f in map(str.strip, + open(os.path.join(path, entry))) if _f] + if len(lines)==2: + for dist in find_distributions(os.path.join(path, lines[0])): + dist.location = os.path.join(path, *lines) + dist.precedence = SOURCE_DIST + self.add(dist) + + def process_index(self,url,page): + """Process the contents of a PyPI page""" + def scan(link): + # Process a URL to see if it's for a package page + if link.startswith(self.index_url): + parts = list(map( + unquote, link[len(self.index_url):].split('/') + )) + if len(parts)==2 and '#' not in parts[1]: + # it's a package page, sanitize and index it + pkg = safe_name(parts[0]) + ver = safe_version(parts[1]) + self.package_pages.setdefault(pkg.lower(),{})[link] = True + return to_filename(pkg), to_filename(ver) + return None, None + + # process an index page into the package-page index + for match in HREF.finditer(page): + try: + scan(urljoin(url, htmldecode(match.group(1)))) + except ValueError: + pass + + pkg, ver = scan(url) # ensure this page is in the page index + if pkg: + # process individual package page + for new_url in find_external_links(url, page): + # Process the found URL + base, frag = egg_info_for_url(new_url) + if base.endswith('.py') and not frag: + if ver: + new_url+='#egg=%s-%s' % (pkg,ver) + else: + self.need_version_info(url) + self.scan_url(new_url) + + return PYPI_MD5.sub( + lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1,3,2), page + ) + else: + return "" # no sense double-scanning non-package pages + + def need_version_info(self, url): + self.scan_all( + "Page at %s links to .py file(s) without version info; an index " + "scan is required.", url + ) + + def scan_all(self, msg=None, *args): + if self.index_url not in self.fetched_urls: + if msg: self.warn(msg,*args) + self.info( + "Scanning index of all packages (this may take a while)" + ) + self.scan_url(self.index_url) + + def find_packages(self, requirement): + self.scan_url(self.index_url + requirement.unsafe_name+'/') + + if not self.package_pages.get(requirement.key): + # Fall back to safe version of the name + self.scan_url(self.index_url + requirement.project_name+'/') + + if not self.package_pages.get(requirement.key): + # We couldn't find the target package, so search the index page too + self.not_found_in_index(requirement) + + for url in list(self.package_pages.get(requirement.key,())): + # scan each page that might be related to the desired package + self.scan_url(url) + + def obtain(self, requirement, installer=None): + self.prescan() + self.find_packages(requirement) + for dist in self[requirement.key]: + if dist in requirement: + return dist + self.debug("%s does not match %s", requirement, dist) + return super(PackageIndex, self).obtain(requirement,installer) + + def check_hash(self, checker, filename, tfp): + """ + checker is a ContentChecker + """ + checker.report(self.debug, + "Validating %%s checksum for %s" % filename) + if not checker.is_valid(): + tfp.close() + os.unlink(filename) + raise DistutilsError( + "%s validation failed for %s; " + "possible download problem?" % ( + checker.hash.name, os.path.basename(filename)) + ) + + def add_find_links(self, urls): + """Add `urls` to the list that will be prescanned for searches""" + for url in urls: + if ( + self.to_scan is None # if we have already "gone online" + or not URL_SCHEME(url) # or it's a local file/directory + or url.startswith('file:') + or list(distros_for_url(url)) # or a direct package link + ): + # then go ahead and process it now + self.scan_url(url) + else: + # otherwise, defer retrieval till later + self.to_scan.append(url) + + def prescan(self): + """Scan urls scheduled for prescanning (e.g. --find-links)""" + if self.to_scan: + list(map(self.scan_url, self.to_scan)) + self.to_scan = None # from now on, go ahead and process immediately + + def not_found_in_index(self, requirement): + if self[requirement.key]: # we've seen at least one distro + meth, msg = self.info, "Couldn't retrieve index page for %r" + else: # no distros seen for this name, might be misspelled + meth, msg = (self.warn, + "Couldn't find index page for %r (maybe misspelled?)") + meth(msg, requirement.unsafe_name) + self.scan_all() + + def download(self, spec, tmpdir): + """Locate and/or download `spec` to `tmpdir`, returning a local path + + `spec` may be a ``Requirement`` object, or a string containing a URL, + an existing local filename, or a project/version requirement spec + (i.e. the string form of a ``Requirement`` object). If it is the URL + of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one + that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is + automatically created alongside the downloaded file. + + If `spec` is a ``Requirement`` object or a string containing a + project/version requirement spec, this method returns the location of + a matching distribution (possibly after downloading it to `tmpdir`). + If `spec` is a locally existing file or directory name, it is simply + returned unchanged. If `spec` is a URL, it is downloaded to a subpath + of `tmpdir`, and the local filename is returned. Various errors may be + raised if a problem occurs during downloading. + """ + if not isinstance(spec,Requirement): + scheme = URL_SCHEME(spec) + if scheme: + # It's a url, download it to tmpdir + found = self._download_url(scheme.group(1), spec, tmpdir) + base, fragment = egg_info_for_url(spec) + if base.endswith('.py'): + found = self.gen_setup(found,fragment,tmpdir) + return found + elif os.path.exists(spec): + # Existing file or directory, just return it + return spec + else: + try: + spec = Requirement.parse(spec) + except ValueError: + raise DistutilsError( + "Not a URL, existing file, or requirement spec: %r" % + (spec,) + ) + return getattr(self.fetch_distribution(spec, tmpdir),'location',None) + + def fetch_distribution( + self, requirement, tmpdir, force_scan=False, source=False, + develop_ok=False, local_index=None + ): + """Obtain a distribution suitable for fulfilling `requirement` + + `requirement` must be a ``pkg_resources.Requirement`` instance. + If necessary, or if the `force_scan` flag is set, the requirement is + searched for in the (online) package index as well as the locally + installed packages. If a distribution matching `requirement` is found, + the returned distribution's ``location`` is the value you would have + gotten from calling the ``download()`` method with the matching + distribution's URL or filename. If no matching distribution is found, + ``None`` is returned. + + If the `source` flag is set, only source distributions and source + checkout links will be considered. Unless the `develop_ok` flag is + set, development and system eggs (i.e., those using the ``.egg-info`` + format) will be ignored. + """ + # process a Requirement + self.info("Searching for %s", requirement) + skipped = {} + dist = None + + def find(req, env=None): + if env is None: + env = self + # Find a matching distribution; may be called more than once + + for dist in env[req.key]: + + if dist.precedence==DEVELOP_DIST and not develop_ok: + if dist not in skipped: + self.warn("Skipping development or system egg: %s",dist) + skipped[dist] = 1 + continue + + if dist in req and (dist.precedence<=SOURCE_DIST or not source): + return dist + + if force_scan: + self.prescan() + self.find_packages(requirement) + dist = find(requirement) + + if local_index is not None: + dist = dist or find(requirement, local_index) + + if dist is None: + if self.to_scan is not None: + self.prescan() + dist = find(requirement) + + if dist is None and not force_scan: + self.find_packages(requirement) + dist = find(requirement) + + if dist is None: + self.warn( + "No local packages or download links found for %s%s", + (source and "a source distribution of " or ""), + requirement, + ) + else: + self.info("Best match: %s", dist) + return dist.clone(location=self.download(dist.location, tmpdir)) + + def fetch(self, requirement, tmpdir, force_scan=False, source=False): + """Obtain a file suitable for fulfilling `requirement` + + DEPRECATED; use the ``fetch_distribution()`` method now instead. For + backward compatibility, this routine is identical but returns the + ``location`` of the downloaded distribution instead of a distribution + object. + """ + dist = self.fetch_distribution(requirement,tmpdir,force_scan,source) + if dist is not None: + return dist.location + return None + + def gen_setup(self, filename, fragment, tmpdir): + match = EGG_FRAGMENT.match(fragment) + dists = match and [ + d for d in + interpret_distro_name(filename, match.group(1), None) if d.version + ] or [] + + if len(dists)==1: # unambiguous ``#egg`` fragment + basename = os.path.basename(filename) + + # Make sure the file has been downloaded to the temp dir. + if os.path.dirname(filename) != tmpdir: + dst = os.path.join(tmpdir, basename) + from setuptools.command.easy_install import samefile + if not samefile(filename, dst): + shutil.copy2(filename, dst) + filename=dst + + with open(os.path.join(tmpdir, 'setup.py'), 'w') as file: + file.write( + "from setuptools import setup\n" + "setup(name=%r, version=%r, py_modules=[%r])\n" + % ( + dists[0].project_name, dists[0].version, + os.path.splitext(basename)[0] + ) + ) + return filename + + elif match: + raise DistutilsError( + "Can't unambiguously interpret project/version identifier %r; " + "any dashes in the name or version should be escaped using " + "underscores. %r" % (fragment,dists) + ) + else: + raise DistutilsError( + "Can't process plain .py files without an '#egg=name-version'" + " suffix to enable automatic setup script generation." + ) + + dl_blocksize = 8192 + def _download_to(self, url, filename): + self.info("Downloading %s", url) + # Download the file + fp, info = None, None + try: + checker = HashChecker.from_url(url) + fp = self.open_url(strip_fragment(url)) + if isinstance(fp, HTTPError): + raise DistutilsError( + "Can't download %s: %s %s" % (url, fp.code,fp.msg) + ) + headers = fp.info() + blocknum = 0 + bs = self.dl_blocksize + size = -1 + if "content-length" in headers: + # Some servers return multiple Content-Length headers :( + sizes = get_all_headers(headers, 'Content-Length') + size = max(map(int, sizes)) + self.reporthook(url, filename, blocknum, bs, size) + with open(filename,'wb') as tfp: + while True: + block = fp.read(bs) + if block: + checker.feed(block) + tfp.write(block) + blocknum += 1 + self.reporthook(url, filename, blocknum, bs, size) + else: + break + self.check_hash(checker, filename, tfp) + return headers + finally: + if fp: fp.close() + + def reporthook(self, url, filename, blocknum, blksize, size): + pass # no-op + + def open_url(self, url, warning=None): + if url.startswith('file:'): + return local_open(url) + try: + return open_with_auth(url, self.opener) + except (ValueError, httplib.InvalidURL): + v = sys.exc_info()[1] + msg = ' '.join([str(arg) for arg in v.args]) + if warning: + self.warn(warning, msg) + else: + raise DistutilsError('%s %s' % (url, msg)) + except urllib2.HTTPError: + v = sys.exc_info()[1] + return v + except urllib2.URLError: + v = sys.exc_info()[1] + if warning: + self.warn(warning, v.reason) + else: + raise DistutilsError("Download error for %s: %s" + % (url, v.reason)) + except httplib.BadStatusLine: + v = sys.exc_info()[1] + if warning: + self.warn(warning, v.line) + else: + raise DistutilsError( + '%s returned a bad status line. The server might be ' + 'down, %s' % + (url, v.line) + ) + except httplib.HTTPException: + v = sys.exc_info()[1] + if warning: + self.warn(warning, v) + else: + raise DistutilsError("Download error for %s: %s" + % (url, v)) + + def _download_url(self, scheme, url, tmpdir): + # Determine download filename + # + name, fragment = egg_info_for_url(url) + if name: + while '..' in name: + name = name.replace('..','.').replace('\\','_') + else: + name = "__downloaded__" # default if URL has no path contents + + if name.endswith('.egg.zip'): + name = name[:-4] # strip the extra .zip before download + + filename = os.path.join(tmpdir,name) + + # Download the file + # + if scheme=='svn' or scheme.startswith('svn+'): + return self._download_svn(url, filename) + elif scheme=='git' or scheme.startswith('git+'): + return self._download_git(url, filename) + elif scheme.startswith('hg+'): + return self._download_hg(url, filename) + elif scheme=='file': + return url2pathname(urlparse(url)[2]) + else: + self.url_ok(url, True) # raises error if not allowed + return self._attempt_download(url, filename) + + def scan_url(self, url): + self.process_url(url, True) + + def _attempt_download(self, url, filename): + headers = self._download_to(url, filename) + if 'html' in headers.get('content-type','').lower(): + return self._download_html(url, headers, filename) + else: + return filename + + def _download_html(self, url, headers, filename): + file = open(filename) + for line in file: + if line.strip(): + # Check for a subversion index page + if re.search(r'<title>([^- ]+ - )?Revision \d+:', line): + # it's a subversion index page: + file.close() + os.unlink(filename) + return self._download_svn(url, filename) + break # not an index page + file.close() + os.unlink(filename) + raise DistutilsError("Unexpected HTML page found at "+url) + + def _download_svn(self, url, filename): + url = url.split('#',1)[0] # remove any fragment for svn's sake + creds = '' + if url.lower().startswith('svn:') and '@' in url: + scheme, netloc, path, p, q, f = urlparse(url) + if not netloc and path.startswith('//') and '/' in path[2:]: + netloc, path = path[2:].split('/',1) + auth, host = splituser(netloc) + if auth: + if ':' in auth: + user, pw = auth.split(':',1) + creds = " --username=%s --password=%s" % (user, pw) + else: + creds = " --username="+auth + netloc = host + url = urlunparse((scheme, netloc, url, p, q, f)) + self.info("Doing subversion checkout from %s to %s", url, filename) + os.system("svn checkout%s -q %s %s" % (creds, url, filename)) + return filename + + @staticmethod + def _vcs_split_rev_from_url(url, pop_prefix=False): + scheme, netloc, path, query, frag = urlsplit(url) + + scheme = scheme.split('+', 1)[-1] + + # Some fragment identification fails + path = path.split('#',1)[0] + + rev = None + if '@' in path: + path, rev = path.rsplit('@', 1) + + # Also, discard fragment + url = urlunsplit((scheme, netloc, path, query, '')) + + return url, rev + + def _download_git(self, url, filename): + filename = filename.split('#',1)[0] + url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True) + + self.info("Doing git clone from %s to %s", url, filename) + os.system("git clone --quiet %s %s" % (url, filename)) + + if rev is not None: + self.info("Checking out %s", rev) + os.system("(cd %s && git checkout --quiet %s)" % ( + filename, + rev, + )) + + return filename + + def _download_hg(self, url, filename): + filename = filename.split('#',1)[0] + url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True) + + self.info("Doing hg clone from %s to %s", url, filename) + os.system("hg clone --quiet %s %s" % (url, filename)) + + if rev is not None: + self.info("Updating to %s", rev) + os.system("(cd %s && hg up -C -r %s >&-)" % ( + filename, + rev, + )) + + return filename + + def debug(self, msg, *args): + log.debug(msg, *args) + + def info(self, msg, *args): + log.info(msg, *args) + + def warn(self, msg, *args): + log.warn(msg, *args) + +# This pattern matches a character entity reference (a decimal numeric +# references, a hexadecimal numeric reference, or a named reference). +entity_sub = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub + +def uchr(c): + if not isinstance(c, int): + return c + if c>255: return unichr(c) + return chr(c) + +def decode_entity(match): + what = match.group(1) + if what.startswith('#x'): + what = int(what[2:], 16) + elif what.startswith('#'): + what = int(what[1:]) + else: + what = name2codepoint.get(what, match.group(0)) + return uchr(what) + +def htmldecode(text): + """Decode HTML entities in the given text.""" + return entity_sub(decode_entity, text) + +def socket_timeout(timeout=15): + def _socket_timeout(func): + def _socket_timeout(*args, **kwargs): + old_timeout = socket.getdefaulttimeout() + socket.setdefaulttimeout(timeout) + try: + return func(*args, **kwargs) + finally: + socket.setdefaulttimeout(old_timeout) + return _socket_timeout + return _socket_timeout + +def _encode_auth(auth): + """ + A function compatible with Python 2.3-3.3 that will encode + auth from a URL suitable for an HTTP header. + >>> str(_encode_auth('username%3Apassword')) + 'dXNlcm5hbWU6cGFzc3dvcmQ=' + + Long auth strings should not cause a newline to be inserted. + >>> long_auth = 'username:' + 'password'*10 + >>> chr(10) in str(_encode_auth(long_auth)) + False + """ + auth_s = unquote(auth) + # convert to bytes + auth_bytes = auth_s.encode() + # use the legacy interface for Python 2.3 support + encoded_bytes = base64.encodestring(auth_bytes) + # convert back to a string + encoded = encoded_bytes.decode() + # strip the trailing carriage return + return encoded.replace('\n','') + +class Credential(object): + """ + A username/password pair. Use like a namedtuple. + """ + def __init__(self, username, password): + self.username = username + self.password = password + + def __iter__(self): + yield self.username + yield self.password + + def __str__(self): + return '%(username)s:%(password)s' % vars(self) + +class PyPIConfig(ConfigParser.ConfigParser): + + def __init__(self): + """ + Load from ~/.pypirc + """ + defaults = dict.fromkeys(['username', 'password', 'repository'], '') + ConfigParser.ConfigParser.__init__(self, defaults) + + rc = os.path.join(os.path.expanduser('~'), '.pypirc') + if os.path.exists(rc): + self.read(rc) + + @property + def creds_by_repository(self): + sections_with_repositories = [ + section for section in self.sections() + if self.get(section, 'repository').strip() + ] + + return dict(map(self._get_repo_cred, sections_with_repositories)) + + def _get_repo_cred(self, section): + repo = self.get(section, 'repository').strip() + return repo, Credential( + self.get(section, 'username').strip(), + self.get(section, 'password').strip(), + ) + + def find_credential(self, url): + """ + If the URL indicated appears to be a repository defined in this + config, return the credential for that repository. + """ + for repository, cred in self.creds_by_repository.items(): + if url.startswith(repository): + return cred + + +def open_with_auth(url, opener=urllib2.urlopen): + """Open a urllib2 request, handling HTTP authentication""" + + scheme, netloc, path, params, query, frag = urlparse(url) + + # Double scheme does not raise on Mac OS X as revealed by a + # failing test. We would expect "nonnumeric port". Refs #20. + if netloc.endswith(':'): + raise httplib.InvalidURL("nonnumeric port: ''") + + if scheme in ('http', 'https'): + auth, host = splituser(netloc) + else: + auth = None + + if not auth: + cred = PyPIConfig().find_credential(url) + if cred: + auth = str(cred) + info = cred.username, url + log.info('Authenticating as %s for %s (from .pypirc)' % info) + + if auth: + auth = "Basic " + _encode_auth(auth) + new_url = urlunparse((scheme,host,path,params,query,frag)) + request = urllib2.Request(new_url) + request.add_header("Authorization", auth) + else: + request = urllib2.Request(url) + + request.add_header('User-Agent', user_agent) + fp = opener(request) + + if auth: + # Put authentication info back into request URL if same host, + # so that links found on the page will work + s2, h2, path2, param2, query2, frag2 = urlparse(fp.url) + if s2==scheme and h2==host: + fp.url = urlunparse((s2,netloc,path2,param2,query2,frag2)) + + return fp + +# adding a timeout to avoid freezing package_index +open_with_auth = socket_timeout(_SOCKET_TIMEOUT)(open_with_auth) + + +def fix_sf_url(url): + return url # backward compatibility + +def local_open(url): + """Read a local path, with special support for directories""" + scheme, server, path, param, query, frag = urlparse(url) + filename = url2pathname(path) + if os.path.isfile(filename): + return urllib2.urlopen(url) + elif path.endswith('/') and os.path.isdir(filename): + files = [] + for f in os.listdir(filename): + if f=='index.html': + with open(os.path.join(filename,f),'r') as fp: + body = fp.read() + break + elif os.path.isdir(os.path.join(filename,f)): + f+='/' + files.append("<a href=%r>%s</a>" % (f,f)) + else: + body = ("<html><head><title>%s" % url) + \ + "%s" % '\n'.join(files) + status, message = 200, "OK" + else: + status, message, body = 404, "Path not found", "Not found" + + headers = {'content-type': 'text/html'} + return HTTPError(url, status, message, headers, StringIO(body)) diff --git a/lib/python3.4/site-packages/setuptools/py26compat.py b/lib/python3.4/site-packages/setuptools/py26compat.py new file mode 100644 index 0000000..738b0cc --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/py26compat.py @@ -0,0 +1,19 @@ +""" +Compatibility Support for Python 2.6 and earlier +""" + +import sys + +from setuptools.compat import splittag + +def strip_fragment(url): + """ + In `Python 8280 `_, Python 2.7 and + later was patched to disregard the fragment when making URL requests. + Do the same for Python 2.6 and earlier. + """ + url, fragment = splittag(url) + return url + +if sys.version_info >= (2,7): + strip_fragment = lambda x: x diff --git a/lib/python3.4/site-packages/setuptools/py27compat.py b/lib/python3.4/site-packages/setuptools/py27compat.py new file mode 100644 index 0000000..9d2886d --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/py27compat.py @@ -0,0 +1,15 @@ +""" +Compatibility Support for Python 2.7 and earlier +""" + +import sys + +def get_all_headers(message, key): + """ + Given an HTTPMessage, return all headers matching a given key. + """ + return message.get_all(key) + +if sys.version_info < (3,): + def get_all_headers(message, key): + return message.getheaders(key) diff --git a/lib/python3.4/site-packages/setuptools/py31compat.py b/lib/python3.4/site-packages/setuptools/py31compat.py new file mode 100644 index 0000000..c487ac0 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/py31compat.py @@ -0,0 +1,52 @@ +import sys +import unittest + +__all__ = ['get_config_vars', 'get_path'] + +try: + # Python 2.7 or >=3.2 + from sysconfig import get_config_vars, get_path +except ImportError: + from distutils.sysconfig import get_config_vars, get_python_lib + def get_path(name): + if name not in ('platlib', 'purelib'): + raise ValueError("Name must be purelib or platlib") + return get_python_lib(name=='platlib') + +try: + # Python >=3.2 + from tempfile import TemporaryDirectory +except ImportError: + import shutil + import tempfile + class TemporaryDirectory(object): + """" + Very simple temporary directory context manager. + Will try to delete afterward, but will also ignore OS and similar + errors on deletion. + """ + def __init__(self): + self.name = None # Handle mkdtemp raising an exception + self.name = tempfile.mkdtemp() + + def __enter__(self): + return self.name + + def __exit__(self, exctype, excvalue, exctrace): + try: + shutil.rmtree(self.name, True) + except OSError: #removal errors are not the only possible + pass + self.name = None + + +unittest_main = unittest.main + +_PY31 = (3, 1) <= sys.version_info[:2] < (3, 2) +if _PY31: + # on Python 3.1, translate testRunner==None to TextTestRunner + # for compatibility with Python 2.6, 2.7, and 3.2+ + def unittest_main(*args, **kwargs): + if 'testRunner' in kwargs and kwargs['testRunner'] is None: + kwargs['testRunner'] = unittest.TextTestRunner + return unittest.main(*args, **kwargs) diff --git a/lib/python3.4/site-packages/setuptools/sandbox.py b/lib/python3.4/site-packages/setuptools/sandbox.py new file mode 100644 index 0000000..e79a13a --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/sandbox.py @@ -0,0 +1,336 @@ +import os +import sys +import tempfile +import operator +import functools +import itertools +import re + +import pkg_resources + +if os.name == "java": + import org.python.modules.posix.PosixModule as _os +else: + _os = sys.modules[os.name] +try: + _file = file +except NameError: + _file = None +_open = open +from distutils.errors import DistutilsError +from pkg_resources import working_set + +from setuptools.compat import builtins + +__all__ = [ + "AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup", +] + +def _execfile(filename, globals, locals=None): + """ + Python 3 implementation of execfile. + """ + mode = 'rb' + # Python 2.6 compile requires LF for newlines, so use deprecated + # Universal newlines support. + if sys.version_info < (2, 7): + mode += 'U' + with open(filename, mode) as stream: + script = stream.read() + if locals is None: + locals = globals + code = compile(script, filename, 'exec') + exec(code, globals, locals) + +def run_setup(setup_script, args): + """Run a distutils setup script, sandboxed in its directory""" + old_dir = os.getcwd() + save_argv = sys.argv[:] + save_path = sys.path[:] + setup_dir = os.path.abspath(os.path.dirname(setup_script)) + temp_dir = os.path.join(setup_dir,'temp') + if not os.path.isdir(temp_dir): os.makedirs(temp_dir) + save_tmp = tempfile.tempdir + save_modules = sys.modules.copy() + pr_state = pkg_resources.__getstate__() + try: + tempfile.tempdir = temp_dir + os.chdir(setup_dir) + try: + sys.argv[:] = [setup_script]+list(args) + sys.path.insert(0, setup_dir) + # reset to include setup dir, w/clean callback list + working_set.__init__() + working_set.callbacks.append(lambda dist:dist.activate()) + def runner(): + ns = dict(__file__=setup_script, __name__='__main__') + _execfile(setup_script, ns) + DirectorySandbox(setup_dir).run(runner) + except SystemExit: + v = sys.exc_info()[1] + if v.args and v.args[0]: + raise + # Normal exit, just return + finally: + pkg_resources.__setstate__(pr_state) + sys.modules.update(save_modules) + # remove any modules imported within the sandbox + del_modules = [ + mod_name for mod_name in sys.modules + if mod_name not in save_modules + # exclude any encodings modules. See #285 + and not mod_name.startswith('encodings.') + ] + list(map(sys.modules.__delitem__, del_modules)) + os.chdir(old_dir) + sys.path[:] = save_path + sys.argv[:] = save_argv + tempfile.tempdir = save_tmp + + +class AbstractSandbox: + """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts""" + + _active = False + + def __init__(self): + self._attrs = [ + name for name in dir(_os) + if not name.startswith('_') and hasattr(self,name) + ] + + def _copy(self, source): + for name in self._attrs: + setattr(os, name, getattr(source,name)) + + def run(self, func): + """Run 'func' under os sandboxing""" + try: + self._copy(self) + if _file: + builtins.file = self._file + builtins.open = self._open + self._active = True + return func() + finally: + self._active = False + if _file: + builtins.file = _file + builtins.open = _open + self._copy(_os) + + def _mk_dual_path_wrapper(name): + original = getattr(_os,name) + def wrap(self,src,dst,*args,**kw): + if self._active: + src,dst = self._remap_pair(name,src,dst,*args,**kw) + return original(src,dst,*args,**kw) + return wrap + + for name in ["rename", "link", "symlink"]: + if hasattr(_os,name): locals()[name] = _mk_dual_path_wrapper(name) + + def _mk_single_path_wrapper(name, original=None): + original = original or getattr(_os,name) + def wrap(self,path,*args,**kw): + if self._active: + path = self._remap_input(name,path,*args,**kw) + return original(path,*args,**kw) + return wrap + + if _file: + _file = _mk_single_path_wrapper('file', _file) + _open = _mk_single_path_wrapper('open', _open) + for name in [ + "stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir", + "remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat", + "startfile", "mkfifo", "mknod", "pathconf", "access" + ]: + if hasattr(_os,name): locals()[name] = _mk_single_path_wrapper(name) + + def _mk_single_with_return(name): + original = getattr(_os,name) + def wrap(self,path,*args,**kw): + if self._active: + path = self._remap_input(name,path,*args,**kw) + return self._remap_output(name, original(path,*args,**kw)) + return original(path,*args,**kw) + return wrap + + for name in ['readlink', 'tempnam']: + if hasattr(_os,name): locals()[name] = _mk_single_with_return(name) + + def _mk_query(name): + original = getattr(_os,name) + def wrap(self,*args,**kw): + retval = original(*args,**kw) + if self._active: + return self._remap_output(name, retval) + return retval + return wrap + + for name in ['getcwd', 'tmpnam']: + if hasattr(_os,name): locals()[name] = _mk_query(name) + + def _validate_path(self,path): + """Called to remap or validate any path, whether input or output""" + return path + + def _remap_input(self,operation,path,*args,**kw): + """Called for path inputs""" + return self._validate_path(path) + + def _remap_output(self,operation,path): + """Called for path outputs""" + return self._validate_path(path) + + def _remap_pair(self,operation,src,dst,*args,**kw): + """Called for path pairs like rename, link, and symlink operations""" + return ( + self._remap_input(operation+'-from',src,*args,**kw), + self._remap_input(operation+'-to',dst,*args,**kw) + ) + + +if hasattr(os, 'devnull'): + _EXCEPTIONS = [os.devnull,] +else: + _EXCEPTIONS = [] + +try: + from win32com.client.gencache import GetGeneratePath + _EXCEPTIONS.append(GetGeneratePath()) + del GetGeneratePath +except ImportError: + # it appears pywin32 is not installed, so no need to exclude. + pass + +class DirectorySandbox(AbstractSandbox): + """Restrict operations to a single subdirectory - pseudo-chroot""" + + write_ops = dict.fromkeys([ + "open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir", + "utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam", + ]) + + _exception_patterns = [ + # Allow lib2to3 to attempt to save a pickled grammar object (#121) + '.*lib2to3.*\.pickle$', + ] + "exempt writing to paths that match the pattern" + + def __init__(self, sandbox, exceptions=_EXCEPTIONS): + self._sandbox = os.path.normcase(os.path.realpath(sandbox)) + self._prefix = os.path.join(self._sandbox,'') + self._exceptions = [ + os.path.normcase(os.path.realpath(path)) + for path in exceptions + ] + AbstractSandbox.__init__(self) + + def _violation(self, operation, *args, **kw): + raise SandboxViolation(operation, args, kw) + + if _file: + def _file(self, path, mode='r', *args, **kw): + if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): + self._violation("file", path, mode, *args, **kw) + return _file(path,mode,*args,**kw) + + def _open(self, path, mode='r', *args, **kw): + if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): + self._violation("open", path, mode, *args, **kw) + return _open(path,mode,*args,**kw) + + def tmpnam(self): + self._violation("tmpnam") + + def _ok(self, path): + active = self._active + try: + self._active = False + realpath = os.path.normcase(os.path.realpath(path)) + return ( + self._exempted(realpath) + or realpath == self._sandbox + or realpath.startswith(self._prefix) + ) + finally: + self._active = active + + def _exempted(self, filepath): + start_matches = ( + filepath.startswith(exception) + for exception in self._exceptions + ) + pattern_matches = ( + re.match(pattern, filepath) + for pattern in self._exception_patterns + ) + candidates = itertools.chain(start_matches, pattern_matches) + return any(candidates) + + def _remap_input(self, operation, path, *args, **kw): + """Called for path inputs""" + if operation in self.write_ops and not self._ok(path): + self._violation(operation, os.path.realpath(path), *args, **kw) + return path + + def _remap_pair(self, operation, src, dst, *args, **kw): + """Called for path pairs like rename, link, and symlink operations""" + if not self._ok(src) or not self._ok(dst): + self._violation(operation, src, dst, *args, **kw) + return (src,dst) + + def open(self, file, flags, mode=0o777, *args, **kw): + """Called for low-level os.open()""" + if flags & WRITE_FLAGS and not self._ok(file): + self._violation("os.open", file, flags, mode, *args, **kw) + return _os.open(file,flags,mode, *args, **kw) + +WRITE_FLAGS = functools.reduce( + operator.or_, [getattr(_os, a, 0) for a in + "O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()] +) + +class SandboxViolation(DistutilsError): + """A setup script attempted to modify the filesystem outside the sandbox""" + + def __str__(self): + return """SandboxViolation: %s%r %s + +The package setup script has attempted to modify files on your system +that are not within the EasyInstall build area, and has been aborted. + +This package cannot be safely installed by EasyInstall, and may not +support alternate installation locations even if you run its setup +script by hand. Please inform the package's author and the EasyInstall +maintainers to find out if a fix or workaround is available.""" % self.args + + + + + + + + + + + + + + + + + + + + + + + + + + + +# diff --git a/lib/python3.4/site-packages/setuptools/script (dev).tmpl b/lib/python3.4/site-packages/setuptools/script (dev).tmpl new file mode 100644 index 0000000..d58b1bb --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/script (dev).tmpl @@ -0,0 +1,5 @@ +# EASY-INSTALL-DEV-SCRIPT: %(spec)r,%(script_name)r +__requires__ = %(spec)r +__import__('pkg_resources').require(%(spec)r) +__file__ = %(dev_path)r +exec(compile(open(__file__).read(), __file__, 'exec')) diff --git a/lib/python3.4/site-packages/setuptools/script.tmpl b/lib/python3.4/site-packages/setuptools/script.tmpl new file mode 100644 index 0000000..ff5efbc --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/script.tmpl @@ -0,0 +1,3 @@ +# EASY-INSTALL-SCRIPT: %(spec)r,%(script_name)r +__requires__ = %(spec)r +__import__('pkg_resources').run_script(%(spec)r, %(script_name)r) diff --git a/lib/python3.4/site-packages/setuptools/site-patch.py b/lib/python3.4/site-packages/setuptools/site-patch.py new file mode 100644 index 0000000..c216801 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/site-patch.py @@ -0,0 +1,76 @@ +def __boot(): + import sys + import os + PYTHONPATH = os.environ.get('PYTHONPATH') + if PYTHONPATH is None or (sys.platform=='win32' and not PYTHONPATH): + PYTHONPATH = [] + else: + PYTHONPATH = PYTHONPATH.split(os.pathsep) + + pic = getattr(sys,'path_importer_cache',{}) + stdpath = sys.path[len(PYTHONPATH):] + mydir = os.path.dirname(__file__) + #print "searching",stdpath,sys.path + + for item in stdpath: + if item==mydir or not item: + continue # skip if current dir. on Windows, or my own directory + importer = pic.get(item) + if importer is not None: + loader = importer.find_module('site') + if loader is not None: + # This should actually reload the current module + loader.load_module('site') + break + else: + try: + import imp # Avoid import loop in Python >= 3.3 + stream, path, descr = imp.find_module('site',[item]) + except ImportError: + continue + if stream is None: + continue + try: + # This should actually reload the current module + imp.load_module('site',stream,path,descr) + finally: + stream.close() + break + else: + raise ImportError("Couldn't find the real 'site' module") + + #print "loaded", __file__ + + known_paths = dict([(makepath(item)[1],1) for item in sys.path]) # 2.2 comp + + oldpos = getattr(sys,'__egginsert',0) # save old insertion position + sys.__egginsert = 0 # and reset the current one + + for item in PYTHONPATH: + addsitedir(item) + + sys.__egginsert += oldpos # restore effective old position + + d, nd = makepath(stdpath[0]) + insert_at = None + new_path = [] + + for item in sys.path: + p, np = makepath(item) + + if np==nd and insert_at is None: + # We've hit the first 'system' path entry, so added entries go here + insert_at = len(new_path) + + if np in known_paths or insert_at is None: + new_path.append(item) + else: + # new path after the insert point, back-insert it + new_path.insert(insert_at, item) + insert_at += 1 + + sys.path[:] = new_path + +if __name__=='site': + __boot() + del __boot diff --git a/lib/python3.4/site-packages/setuptools/ssl_support.py b/lib/python3.4/site-packages/setuptools/ssl_support.py new file mode 100644 index 0000000..cc7db06 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/ssl_support.py @@ -0,0 +1,241 @@ +import os +import socket +import atexit +import re + +import pkg_resources +from pkg_resources import ResolutionError, ExtractionError +from setuptools.compat import urllib2 + +try: + import ssl +except ImportError: + ssl = None + +__all__ = [ + 'VerifyingHTTPSHandler', 'find_ca_bundle', 'is_available', 'cert_paths', + 'opener_for' +] + +cert_paths = """ +/etc/pki/tls/certs/ca-bundle.crt +/etc/ssl/certs/ca-certificates.crt +/usr/share/ssl/certs/ca-bundle.crt +/usr/local/share/certs/ca-root.crt +/etc/ssl/cert.pem +/System/Library/OpenSSL/certs/cert.pem +""".strip().split() + + +HTTPSHandler = HTTPSConnection = object + +for what, where in ( + ('HTTPSHandler', ['urllib2','urllib.request']), + ('HTTPSConnection', ['httplib', 'http.client']), +): + for module in where: + try: + exec("from %s import %s" % (module, what)) + except ImportError: + pass + +is_available = ssl is not None and object not in (HTTPSHandler, HTTPSConnection) + + +try: + from ssl import CertificateError, match_hostname +except ImportError: + try: + from backports.ssl_match_hostname import CertificateError + from backports.ssl_match_hostname import match_hostname + except ImportError: + CertificateError = None + match_hostname = None + +if not CertificateError: + class CertificateError(ValueError): + pass + +if not match_hostname: + def _dnsname_match(dn, hostname, max_wildcards=1): + """Matching according to RFC 6125, section 6.4.3 + + http://tools.ietf.org/html/rfc6125#section-6.4.3 + """ + pats = [] + if not dn: + return False + + # Ported from python3-syntax: + # leftmost, *remainder = dn.split(r'.') + parts = dn.split(r'.') + leftmost = parts[0] + remainder = parts[1:] + + wildcards = leftmost.count('*') + if wildcards > max_wildcards: + # Issue #17980: avoid denials of service by refusing more + # than one wildcard per fragment. A survey of established + # policy among SSL implementations showed it to be a + # reasonable choice. + raise CertificateError( + "too many wildcards in certificate DNS name: " + repr(dn)) + + # speed up common case w/o wildcards + if not wildcards: + return dn.lower() == hostname.lower() + + # RFC 6125, section 6.4.3, subitem 1. + # The client SHOULD NOT attempt to match a presented identifier in which + # the wildcard character comprises a label other than the left-most label. + if leftmost == '*': + # When '*' is a fragment by itself, it matches a non-empty dotless + # fragment. + pats.append('[^.]+') + elif leftmost.startswith('xn--') or hostname.startswith('xn--'): + # RFC 6125, section 6.4.3, subitem 3. + # The client SHOULD NOT attempt to match a presented identifier + # where the wildcard character is embedded within an A-label or + # U-label of an internationalized domain name. + pats.append(re.escape(leftmost)) + else: + # Otherwise, '*' matches any dotless string, e.g. www* + pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) + + # add the remaining fragments, ignore any wildcards + for frag in remainder: + pats.append(re.escape(frag)) + + pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) + return pat.match(hostname) + + def match_hostname(cert, hostname): + """Verify that *cert* (in decoded format as returned by + SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 + rules are followed, but IP addresses are not accepted for *hostname*. + + CertificateError is raised on failure. On success, the function + returns nothing. + """ + if not cert: + raise ValueError("empty or no certificate") + dnsnames = [] + san = cert.get('subjectAltName', ()) + for key, value in san: + if key == 'DNS': + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if not dnsnames: + # The subject is only checked when there is no dNSName entry + # in subjectAltName + for sub in cert.get('subject', ()): + for key, value in sub: + # XXX according to RFC 2818, the most specific Common Name + # must be used. + if key == 'commonName': + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if len(dnsnames) > 1: + raise CertificateError("hostname %r " + "doesn't match either of %s" + % (hostname, ', '.join(map(repr, dnsnames)))) + elif len(dnsnames) == 1: + raise CertificateError("hostname %r " + "doesn't match %r" + % (hostname, dnsnames[0])) + else: + raise CertificateError("no appropriate commonName or " + "subjectAltName fields were found") + + +class VerifyingHTTPSHandler(HTTPSHandler): + """Simple verifying handler: no auth, subclasses, timeouts, etc.""" + + def __init__(self, ca_bundle): + self.ca_bundle = ca_bundle + HTTPSHandler.__init__(self) + + def https_open(self, req): + return self.do_open( + lambda host, **kw: VerifyingHTTPSConn(host, self.ca_bundle, **kw), req + ) + + +class VerifyingHTTPSConn(HTTPSConnection): + """Simple verifying connection: no auth, subclasses, timeouts, etc.""" + def __init__(self, host, ca_bundle, **kw): + HTTPSConnection.__init__(self, host, **kw) + self.ca_bundle = ca_bundle + + def connect(self): + sock = socket.create_connection( + (self.host, self.port), getattr(self, 'source_address', None) + ) + + # Handle the socket if a (proxy) tunnel is present + if hasattr(self, '_tunnel') and getattr(self, '_tunnel_host', None): + self.sock = sock + self._tunnel() + # http://bugs.python.org/issue7776: Python>=3.4.1 and >=2.7.7 + # change self.host to mean the proxy server host when tunneling is + # being used. Adapt, since we are interested in the destination + # host for the match_hostname() comparison. + actual_host = self._tunnel_host + else: + actual_host = self.host + + self.sock = ssl.wrap_socket( + sock, cert_reqs=ssl.CERT_REQUIRED, ca_certs=self.ca_bundle + ) + try: + match_hostname(self.sock.getpeercert(), actual_host) + except CertificateError: + self.sock.shutdown(socket.SHUT_RDWR) + self.sock.close() + raise + +def opener_for(ca_bundle=None): + """Get a urlopen() replacement that uses ca_bundle for verification""" + return urllib2.build_opener( + VerifyingHTTPSHandler(ca_bundle or find_ca_bundle()) + ).open + + +_wincerts = None + +def get_win_certfile(): + global _wincerts + if _wincerts is not None: + return _wincerts.name + + try: + from wincertstore import CertFile + except ImportError: + return None + + class MyCertFile(CertFile): + def __init__(self, stores=(), certs=()): + CertFile.__init__(self) + for store in stores: + self.addstore(store) + self.addcerts(certs) + atexit.register(self.close) + + _wincerts = MyCertFile(stores=['CA', 'ROOT']) + return _wincerts.name + + +def find_ca_bundle(): + """Return an existing CA bundle path, or None""" + if os.name=='nt': + return get_win_certfile() + else: + for cert_path in cert_paths: + if os.path.isfile(cert_path): + return cert_path + try: + return pkg_resources.resource_filename('certifi', 'cacert.pem') + except (ImportError, ResolutionError, ExtractionError): + return None diff --git a/lib/python3.4/site-packages/setuptools/svn_utils.py b/lib/python3.4/site-packages/setuptools/svn_utils.py new file mode 100644 index 0000000..2dcfd89 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/svn_utils.py @@ -0,0 +1,583 @@ +import os +import re +import sys +from distutils import log +import xml.dom.pulldom +import shlex +import locale +import codecs +import unicodedata +import warnings +from setuptools.compat import unicode, PY2 +from setuptools.py31compat import TemporaryDirectory +from xml.sax.saxutils import unescape + +try: + import urlparse +except ImportError: + import urllib.parse as urlparse + +from subprocess import Popen as _Popen, PIPE as _PIPE + +#NOTE: Use of the command line options require SVN 1.3 or newer (December 2005) +# and SVN 1.3 hasn't been supported by the developers since mid 2008. + +#subprocess is called several times with shell=(sys.platform=='win32') +#see the follow for more information: +# http://bugs.python.org/issue8557 +# http://stackoverflow.com/questions/5658622/ +# python-subprocess-popen-environment-path + +def _run_command(args, stdout=_PIPE, stderr=_PIPE, encoding=None, stream=0): + #regarding the shell argument, see: http://bugs.python.org/issue8557 + try: + proc = _Popen(args, stdout=stdout, stderr=stderr, + shell=(sys.platform == 'win32')) + + data = proc.communicate()[stream] + except OSError: + return 1, '' + + #doubled checked and + data = decode_as_string(data, encoding) + + #communciate calls wait() + return proc.returncode, data + + +def _get_entry_schedule(entry): + schedule = entry.getElementsByTagName('schedule')[0] + return "".join([t.nodeValue + for t in schedule.childNodes + if t.nodeType == t.TEXT_NODE]) + + +def _get_target_property(target): + property_text = target.getElementsByTagName('property')[0] + return "".join([t.nodeValue + for t in property_text.childNodes + if t.nodeType == t.TEXT_NODE]) + + +def _get_xml_data(decoded_str): + if PY2: + #old versions want an encoded string + data = decoded_str.encode('utf-8') + else: + data = decoded_str + return data + + +def joinpath(prefix, *suffix): + if not prefix or prefix == '.': + return os.path.join(*suffix) + return os.path.join(prefix, *suffix) + +def determine_console_encoding(): + try: + #try for the preferred encoding + encoding = locale.getpreferredencoding() + + #see if the locale.getdefaultlocale returns null + #some versions of python\platforms return US-ASCII + #when it cannot determine an encoding + if not encoding or encoding == "US-ASCII": + encoding = locale.getdefaultlocale()[1] + + if encoding: + codecs.lookup(encoding) # make sure a lookup error is not made + + except (locale.Error, LookupError): + encoding = None + + is_osx = sys.platform == "darwin" + if not encoding: + return ["US-ASCII", "utf-8"][is_osx] + elif encoding.startswith("mac-") and is_osx: + #certain versions of python would return mac-roman as default + #OSX as a left over of earlier mac versions. + return "utf-8" + else: + return encoding + +_console_encoding = determine_console_encoding() + +def decode_as_string(text, encoding=None): + """ + Decode the console or file output explicitly using getpreferredencoding. + The text paraemeter should be a encoded string, if not no decode occurs + If no encoding is given, getpreferredencoding is used. If encoding is + specified, that is used instead. This would be needed for SVN --xml + output. Unicode is explicitly put in composed NFC form. + + --xml should be UTF-8 (SVN Issue 2938) the discussion on the Subversion + DEV List from 2007 seems to indicate the same. + """ + #text should be a byte string + + if encoding is None: + encoding = _console_encoding + + if not isinstance(text, unicode): + text = text.decode(encoding) + + text = unicodedata.normalize('NFC', text) + + return text + + +def parse_dir_entries(decoded_str): + '''Parse the entries from a recursive info xml''' + doc = xml.dom.pulldom.parseString(_get_xml_data(decoded_str)) + entries = list() + + for event, node in doc: + if event == 'START_ELEMENT' and node.nodeName == 'entry': + doc.expandNode(node) + if not _get_entry_schedule(node).startswith('delete'): + entries.append((node.getAttribute('path'), + node.getAttribute('kind'))) + + return entries[1:] # do not want the root directory + + +def parse_externals_xml(decoded_str, prefix=''): + '''Parse a propget svn:externals xml''' + prefix = os.path.normpath(prefix) + prefix = os.path.normcase(prefix) + + doc = xml.dom.pulldom.parseString(_get_xml_data(decoded_str)) + externals = list() + + for event, node in doc: + if event == 'START_ELEMENT' and node.nodeName == 'target': + doc.expandNode(node) + path = os.path.normpath(node.getAttribute('path')) + + if os.path.normcase(path).startswith(prefix): + path = path[len(prefix)+1:] + + data = _get_target_property(node) + #data should be decoded already + for external in parse_external_prop(data): + externals.append(joinpath(path, external)) + + return externals # do not want the root directory + + +def parse_external_prop(lines): + """ + Parse the value of a retrieved svn:externals entry. + + possible token setups (with quotng and backscaping in laters versions) + URL[@#] EXT_FOLDERNAME + [-r#] URL EXT_FOLDERNAME + EXT_FOLDERNAME [-r#] URL + """ + externals = [] + for line in lines.splitlines(): + line = line.lstrip() # there might be a "\ " + if not line: + continue + + if PY2: + #shlex handles NULLs just fine and shlex in 2.7 tries to encode + #as ascii automatiically + line = line.encode('utf-8') + line = shlex.split(line) + if PY2: + line = [x.decode('utf-8') for x in line] + + #EXT_FOLDERNAME is either the first or last depending on where + #the URL falls + if urlparse.urlsplit(line[-1])[0]: + external = line[0] + else: + external = line[-1] + + external = decode_as_string(external, encoding="utf-8") + externals.append(os.path.normpath(external)) + + return externals + + +def parse_prop_file(filename, key): + found = False + f = open(filename, 'rt') + data = '' + try: + for line in iter(f.readline, ''): # can't use direct iter! + parts = line.split() + if len(parts) == 2: + kind, length = parts + data = f.read(int(length)) + if kind == 'K' and data == key: + found = True + elif kind == 'V' and found: + break + finally: + f.close() + + return data + + +class SvnInfo(object): + ''' + Generic svn_info object. No has little knowledge of how to extract + information. Use cls.load to instatiate according svn version. + + Paths are not filesystem encoded. + ''' + + @staticmethod + def get_svn_version(): + # Temp config directory should be enough to check for repository + # This is needed because .svn always creates .subversion and + # some operating systems do not handle dot directory correctly. + # Real queries in real svn repos with be concerned with it creation + with TemporaryDirectory() as tempdir: + code, data = _run_command(['svn', + '--config-dir', tempdir, + '--version', + '--quiet']) + + if code == 0 and data: + return data.strip() + else: + return '' + + #svnversion return values (previous implementations return max revision) + # 4123:4168 mixed revision working copy + # 4168M modified working copy + # 4123S switched working copy + # 4123:4168MS mixed revision, modified, switched working copy + revision_re = re.compile(r'(?:([\-0-9]+):)?(\d+)([a-z]*)\s*$', re.I) + + @classmethod + def load(cls, dirname=''): + normdir = os.path.normpath(dirname) + + # Temp config directory should be enough to check for repository + # This is needed because .svn always creates .subversion and + # some operating systems do not handle dot directory correctly. + # Real queries in real svn repos with be concerned with it creation + with TemporaryDirectory() as tempdir: + code, data = _run_command(['svn', + '--config-dir', tempdir, + 'info', normdir]) + + # Must check for some contents, as some use empty directories + # in testcases, however only enteries is needed also the info + # command above MUST have worked + svn_dir = os.path.join(normdir, '.svn') + is_svn_wd = (not code or + os.path.isfile(os.path.join(svn_dir, 'entries'))) + + svn_version = tuple(cls.get_svn_version().split('.')) + + try: + base_svn_version = tuple(int(x) for x in svn_version[:2]) + except ValueError: + base_svn_version = tuple() + + if not is_svn_wd: + #return an instance of this NO-OP class + return SvnInfo(dirname) + + if code or not base_svn_version or base_svn_version < (1, 3): + warnings.warn(("No SVN 1.3+ command found: falling back " + "on pre 1.7 .svn parsing"), DeprecationWarning) + return SvnFileInfo(dirname) + + if base_svn_version < (1, 5): + return Svn13Info(dirname) + + return Svn15Info(dirname) + + def __init__(self, path=''): + self.path = path + self._entries = None + self._externals = None + + def get_revision(self): + 'Retrieve the directory revision informatino using svnversion' + code, data = _run_command(['svnversion', '-c', self.path]) + if code: + log.warn("svnversion failed") + return 0 + + parsed = self.revision_re.match(data) + if parsed: + return int(parsed.group(2)) + else: + return 0 + + @property + def entries(self): + if self._entries is None: + self._entries = self.get_entries() + return self._entries + + @property + def externals(self): + if self._externals is None: + self._externals = self.get_externals() + return self._externals + + def iter_externals(self): + ''' + Iterate over the svn:external references in the repository path. + ''' + for item in self.externals: + yield item + + def iter_files(self): + ''' + Iterate over the non-deleted file entries in the repository path + ''' + for item, kind in self.entries: + if kind.lower() == 'file': + yield item + + def iter_dirs(self, include_root=True): + ''' + Iterate over the non-deleted file entries in the repository path + ''' + if include_root: + yield self.path + for item, kind in self.entries: + if kind.lower() == 'dir': + yield item + + def get_entries(self): + return [] + + def get_externals(self): + return [] + + +class Svn13Info(SvnInfo): + def get_entries(self): + code, data = _run_command(['svn', 'info', '-R', '--xml', self.path], + encoding="utf-8") + + if code: + log.debug("svn info failed") + return [] + + return parse_dir_entries(data) + + def get_externals(self): + #Previous to 1.5 --xml was not supported for svn propget and the -R + #output format breaks the shlex compatible semantics. + cmd = ['svn', 'propget', 'svn:externals'] + result = [] + for folder in self.iter_dirs(): + code, lines = _run_command(cmd + [folder], encoding="utf-8") + if code != 0: + log.warn("svn propget failed") + return [] + #lines should a str + for external in parse_external_prop(lines): + if folder: + external = os.path.join(folder, external) + result.append(os.path.normpath(external)) + + return result + + +class Svn15Info(Svn13Info): + def get_externals(self): + cmd = ['svn', 'propget', 'svn:externals', self.path, '-R', '--xml'] + code, lines = _run_command(cmd, encoding="utf-8") + if code: + log.debug("svn propget failed") + return [] + return parse_externals_xml(lines, prefix=os.path.abspath(self.path)) + + +class SvnFileInfo(SvnInfo): + + def __init__(self, path=''): + super(SvnFileInfo, self).__init__(path) + self._directories = None + self._revision = None + + def _walk_svn(self, base): + entry_file = joinpath(base, '.svn', 'entries') + if os.path.isfile(entry_file): + entries = SVNEntriesFile.load(base) + yield (base, False, entries.parse_revision()) + for path in entries.get_undeleted_records(): + path = decode_as_string(path) + path = joinpath(base, path) + if os.path.isfile(path): + yield (path, True, None) + elif os.path.isdir(path): + for item in self._walk_svn(path): + yield item + + def _build_entries(self): + entries = list() + + rev = 0 + for path, isfile, dir_rev in self._walk_svn(self.path): + if isfile: + entries.append((path, 'file')) + else: + entries.append((path, 'dir')) + rev = max(rev, dir_rev) + + self._entries = entries + self._revision = rev + + def get_entries(self): + if self._entries is None: + self._build_entries() + return self._entries + + def get_revision(self): + if self._revision is None: + self._build_entries() + return self._revision + + def get_externals(self): + prop_files = [['.svn', 'dir-prop-base'], + ['.svn', 'dir-props']] + externals = [] + + for dirname in self.iter_dirs(): + prop_file = None + for rel_parts in prop_files: + filename = joinpath(dirname, *rel_parts) + if os.path.isfile(filename): + prop_file = filename + + if prop_file is not None: + ext_prop = parse_prop_file(prop_file, 'svn:externals') + #ext_prop should be utf-8 coming from svn:externals + ext_prop = decode_as_string(ext_prop, encoding="utf-8") + externals.extend(parse_external_prop(ext_prop)) + + return externals + + +def svn_finder(dirname=''): + #combined externals due to common interface + #combined externals and entries due to lack of dir_props in 1.7 + info = SvnInfo.load(dirname) + for path in info.iter_files(): + yield path + + for path in info.iter_externals(): + sub_info = SvnInfo.load(path) + for sub_path in sub_info.iter_files(): + yield sub_path + + +class SVNEntriesFile(object): + def __init__(self, data): + self.data = data + + @classmethod + def load(class_, base): + filename = os.path.join(base, '.svn', 'entries') + f = open(filename) + try: + result = SVNEntriesFile.read(f) + finally: + f.close() + return result + + @classmethod + def read(class_, fileobj): + data = fileobj.read() + is_xml = data.startswith(' revision_line_number + and section[revision_line_number]) + ] + return rev_numbers + + def get_undeleted_records(self): + undeleted = lambda s: s and s[0] and (len(s) < 6 or s[5] != 'delete') + result = [ + section[0] + for section in self.get_sections() + if undeleted(section) + ] + return result + + +class SVNEntriesFileXML(SVNEntriesFile): + def is_valid(self): + return True + + def get_url(self): + "Get repository URL" + urlre = re.compile('url="([^"]+)"') + return urlre.search(self.data).group(1) + + def parse_revision_numbers(self): + revre = re.compile(r'committed-rev="(\d+)"') + return [ + int(m.group(1)) + for m in revre.finditer(self.data) + ] + + def get_undeleted_records(self): + entries_pattern = \ + re.compile(r'name="([^"]+)"(?![^>]+deleted="true")', re.I) + results = [ + unescape(match.group(1)) + for match in entries_pattern.finditer(self.data) + ] + return results + + +if __name__ == '__main__': + for name in svn_finder(sys.argv[1]): + print(name) diff --git a/lib/python3.4/site-packages/setuptools/tests/__init__.py b/lib/python3.4/site-packages/setuptools/tests/__init__.py new file mode 100644 index 0000000..d6a4542 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/tests/__init__.py @@ -0,0 +1,351 @@ +"""Tests for the 'setuptools' package""" +import sys +import os +import unittest +import doctest +import distutils.core +import distutils.cmd +from distutils.errors import DistutilsOptionError, DistutilsPlatformError +from distutils.errors import DistutilsSetupError +from distutils.core import Extension +from distutils.version import LooseVersion +from setuptools.compat import func_code + +from setuptools.compat import func_code +import setuptools.dist +import setuptools.depends as dep +from setuptools import Feature +from setuptools.depends import Require + +def additional_tests(): + suite = unittest.TestSuite(( + doctest.DocFileSuite( + os.path.join('tests', 'api_tests.txt'), + optionflags=doctest.ELLIPSIS, package='pkg_resources', + ), + )) + if sys.platform == 'win32': + suite.addTest(doctest.DocFileSuite('win_script_wrapper.txt')) + return suite + +def makeSetup(**args): + """Return distribution from 'setup(**args)', without executing commands""" + + distutils.core._setup_stop_after = "commandline" + + # Don't let system command line leak into tests! + args.setdefault('script_args',['install']) + + try: + return setuptools.setup(**args) + finally: + distutils.core._setup_stop_after = None + + +class DependsTests(unittest.TestCase): + + def testExtractConst(self): + if not hasattr(dep, 'extract_constant'): + # skip on non-bytecode platforms + return + + def f1(): + global x, y, z + x = "test" + y = z + + fc = func_code(f1) + # unrecognized name + self.assertEqual(dep.extract_constant(fc,'q', -1), None) + + # constant assigned + self.assertEqual(dep.extract_constant(fc,'x', -1), "test") + + # expression assigned + self.assertEqual(dep.extract_constant(fc,'y', -1), -1) + + # recognized name, not assigned + self.assertEqual(dep.extract_constant(fc,'z', -1), None) + + def testFindModule(self): + self.assertRaises(ImportError, dep.find_module, 'no-such.-thing') + self.assertRaises(ImportError, dep.find_module, 'setuptools.non-existent') + f,p,i = dep.find_module('setuptools.tests') + f.close() + + def testModuleExtract(self): + if not hasattr(dep, 'get_module_constant'): + # skip on non-bytecode platforms + return + + from email import __version__ + self.assertEqual( + dep.get_module_constant('email','__version__'), __version__ + ) + self.assertEqual( + dep.get_module_constant('sys','version'), sys.version + ) + self.assertEqual( + dep.get_module_constant('setuptools.tests','__doc__'),__doc__ + ) + + def testRequire(self): + if not hasattr(dep, 'extract_constant'): + # skip on non-bytecode platformsh + return + + req = Require('Email','1.0.3','email') + + self.assertEqual(req.name, 'Email') + self.assertEqual(req.module, 'email') + self.assertEqual(req.requested_version, '1.0.3') + self.assertEqual(req.attribute, '__version__') + self.assertEqual(req.full_name(), 'Email-1.0.3') + + from email import __version__ + self.assertEqual(req.get_version(), __version__) + self.assertTrue(req.version_ok('1.0.9')) + self.assertTrue(not req.version_ok('0.9.1')) + self.assertTrue(not req.version_ok('unknown')) + + self.assertTrue(req.is_present()) + self.assertTrue(req.is_current()) + + req = Require('Email 3000','03000','email',format=LooseVersion) + self.assertTrue(req.is_present()) + self.assertTrue(not req.is_current()) + self.assertTrue(not req.version_ok('unknown')) + + req = Require('Do-what-I-mean','1.0','d-w-i-m') + self.assertTrue(not req.is_present()) + self.assertTrue(not req.is_current()) + + req = Require('Tests', None, 'tests', homepage="http://example.com") + self.assertEqual(req.format, None) + self.assertEqual(req.attribute, None) + self.assertEqual(req.requested_version, None) + self.assertEqual(req.full_name(), 'Tests') + self.assertEqual(req.homepage, 'http://example.com') + + paths = [os.path.dirname(p) for p in __path__] + self.assertTrue(req.is_present(paths)) + self.assertTrue(req.is_current(paths)) + + +class DistroTests(unittest.TestCase): + + def setUp(self): + self.e1 = Extension('bar.ext',['bar.c']) + self.e2 = Extension('c.y', ['y.c']) + + self.dist = makeSetup( + packages=['a', 'a.b', 'a.b.c', 'b', 'c'], + py_modules=['b.d','x'], + ext_modules = (self.e1, self.e2), + package_dir = {}, + ) + + def testDistroType(self): + self.assertTrue(isinstance(self.dist,setuptools.dist.Distribution)) + + def testExcludePackage(self): + self.dist.exclude_package('a') + self.assertEqual(self.dist.packages, ['b','c']) + + self.dist.exclude_package('b') + self.assertEqual(self.dist.packages, ['c']) + self.assertEqual(self.dist.py_modules, ['x']) + self.assertEqual(self.dist.ext_modules, [self.e1, self.e2]) + + self.dist.exclude_package('c') + self.assertEqual(self.dist.packages, []) + self.assertEqual(self.dist.py_modules, ['x']) + self.assertEqual(self.dist.ext_modules, [self.e1]) + + # test removals from unspecified options + makeSetup().exclude_package('x') + + def testIncludeExclude(self): + # remove an extension + self.dist.exclude(ext_modules=[self.e1]) + self.assertEqual(self.dist.ext_modules, [self.e2]) + + # add it back in + self.dist.include(ext_modules=[self.e1]) + self.assertEqual(self.dist.ext_modules, [self.e2, self.e1]) + + # should not add duplicate + self.dist.include(ext_modules=[self.e1]) + self.assertEqual(self.dist.ext_modules, [self.e2, self.e1]) + + def testExcludePackages(self): + self.dist.exclude(packages=['c','b','a']) + self.assertEqual(self.dist.packages, []) + self.assertEqual(self.dist.py_modules, ['x']) + self.assertEqual(self.dist.ext_modules, [self.e1]) + + def testEmpty(self): + dist = makeSetup() + dist.include(packages=['a'], py_modules=['b'], ext_modules=[self.e2]) + dist = makeSetup() + dist.exclude(packages=['a'], py_modules=['b'], ext_modules=[self.e2]) + + def testContents(self): + self.assertTrue(self.dist.has_contents_for('a')) + self.dist.exclude_package('a') + self.assertTrue(not self.dist.has_contents_for('a')) + + self.assertTrue(self.dist.has_contents_for('b')) + self.dist.exclude_package('b') + self.assertTrue(not self.dist.has_contents_for('b')) + + self.assertTrue(self.dist.has_contents_for('c')) + self.dist.exclude_package('c') + self.assertTrue(not self.dist.has_contents_for('c')) + + def testInvalidIncludeExclude(self): + self.assertRaises(DistutilsSetupError, + self.dist.include, nonexistent_option='x' + ) + self.assertRaises(DistutilsSetupError, + self.dist.exclude, nonexistent_option='x' + ) + self.assertRaises(DistutilsSetupError, + self.dist.include, packages={'x':'y'} + ) + self.assertRaises(DistutilsSetupError, + self.dist.exclude, packages={'x':'y'} + ) + self.assertRaises(DistutilsSetupError, + self.dist.include, ext_modules={'x':'y'} + ) + self.assertRaises(DistutilsSetupError, + self.dist.exclude, ext_modules={'x':'y'} + ) + + self.assertRaises(DistutilsSetupError, + self.dist.include, package_dir=['q'] + ) + self.assertRaises(DistutilsSetupError, + self.dist.exclude, package_dir=['q'] + ) + + +class FeatureTests(unittest.TestCase): + + def setUp(self): + self.req = Require('Distutils','1.0.3','distutils') + self.dist = makeSetup( + features={ + 'foo': Feature("foo",standard=True,require_features=['baz',self.req]), + 'bar': Feature("bar", standard=True, packages=['pkg.bar'], + py_modules=['bar_et'], remove=['bar.ext'], + ), + 'baz': Feature( + "baz", optional=False, packages=['pkg.baz'], + scripts = ['scripts/baz_it'], + libraries=[('libfoo','foo/foofoo.c')] + ), + 'dwim': Feature("DWIM", available=False, remove='bazish'), + }, + script_args=['--without-bar', 'install'], + packages = ['pkg.bar', 'pkg.foo'], + py_modules = ['bar_et', 'bazish'], + ext_modules = [Extension('bar.ext',['bar.c'])] + ) + + def testDefaults(self): + self.assertTrue(not + Feature( + "test",standard=True,remove='x',available=False + ).include_by_default() + ) + self.assertTrue( + Feature("test",standard=True,remove='x').include_by_default() + ) + # Feature must have either kwargs, removes, or require_features + self.assertRaises(DistutilsSetupError, Feature, "test") + + def testAvailability(self): + self.assertRaises( + DistutilsPlatformError, + self.dist.features['dwim'].include_in, self.dist + ) + + def testFeatureOptions(self): + dist = self.dist + self.assertTrue( + ('with-dwim',None,'include DWIM') in dist.feature_options + ) + self.assertTrue( + ('without-dwim',None,'exclude DWIM (default)') in dist.feature_options + ) + self.assertTrue( + ('with-bar',None,'include bar (default)') in dist.feature_options + ) + self.assertTrue( + ('without-bar',None,'exclude bar') in dist.feature_options + ) + self.assertEqual(dist.feature_negopt['without-foo'],'with-foo') + self.assertEqual(dist.feature_negopt['without-bar'],'with-bar') + self.assertEqual(dist.feature_negopt['without-dwim'],'with-dwim') + self.assertTrue(not 'without-baz' in dist.feature_negopt) + + def testUseFeatures(self): + dist = self.dist + self.assertEqual(dist.with_foo,1) + self.assertEqual(dist.with_bar,0) + self.assertEqual(dist.with_baz,1) + self.assertTrue(not 'bar_et' in dist.py_modules) + self.assertTrue(not 'pkg.bar' in dist.packages) + self.assertTrue('pkg.baz' in dist.packages) + self.assertTrue('scripts/baz_it' in dist.scripts) + self.assertTrue(('libfoo','foo/foofoo.c') in dist.libraries) + self.assertEqual(dist.ext_modules,[]) + self.assertEqual(dist.require_features, [self.req]) + + # If we ask for bar, it should fail because we explicitly disabled + # it on the command line + self.assertRaises(DistutilsOptionError, dist.include_feature, 'bar') + + def testFeatureWithInvalidRemove(self): + self.assertRaises( + SystemExit, makeSetup, features = {'x':Feature('x', remove='y')} + ) + +class TestCommandTests(unittest.TestCase): + + def testTestIsCommand(self): + test_cmd = makeSetup().get_command_obj('test') + self.assertTrue(isinstance(test_cmd, distutils.cmd.Command)) + + def testLongOptSuiteWNoDefault(self): + ts1 = makeSetup(script_args=['test','--test-suite=foo.tests.suite']) + ts1 = ts1.get_command_obj('test') + ts1.ensure_finalized() + self.assertEqual(ts1.test_suite, 'foo.tests.suite') + + def testDefaultSuite(self): + ts2 = makeSetup(test_suite='bar.tests.suite').get_command_obj('test') + ts2.ensure_finalized() + self.assertEqual(ts2.test_suite, 'bar.tests.suite') + + def testDefaultWModuleOnCmdLine(self): + ts3 = makeSetup( + test_suite='bar.tests', + script_args=['test','-m','foo.tests'] + ).get_command_obj('test') + ts3.ensure_finalized() + self.assertEqual(ts3.test_module, 'foo.tests') + self.assertEqual(ts3.test_suite, 'foo.tests.test_suite') + + def testConflictingOptions(self): + ts4 = makeSetup( + script_args=['test','-m','bar.tests', '-s','foo.tests.suite'] + ).get_command_obj('test') + self.assertRaises(DistutilsOptionError, ts4.ensure_finalized) + + def testNoSuite(self): + ts5 = makeSetup().get_command_obj('test') + ts5.ensure_finalized() + self.assertEqual(ts5.test_suite, None) diff --git a/lib/python3.4/site-packages/setuptools/tests/environment.py b/lib/python3.4/site-packages/setuptools/tests/environment.py new file mode 100644 index 0000000..476d280 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/tests/environment.py @@ -0,0 +1,165 @@ +import os +import zipfile +import sys +import tempfile +import unittest +import shutil +import stat +import unicodedata + +from subprocess import Popen as _Popen, PIPE as _PIPE + + +def _extract(self, member, path=None, pwd=None): + """for zipfile py2.5 borrowed from cpython""" + if not isinstance(member, zipfile.ZipInfo): + member = self.getinfo(member) + + if path is None: + path = os.getcwd() + + return _extract_member(self, member, path, pwd) + + +def _extract_from_zip(self, name, dest_path): + dest_file = open(dest_path, 'wb') + try: + dest_file.write(self.read(name)) + finally: + dest_file.close() + + +def _extract_member(self, member, targetpath, pwd): + """for zipfile py2.5 borrowed from cpython""" + # build the destination pathname, replacing + # forward slashes to platform specific separators. + # Strip trailing path separator, unless it represents the root. + if (targetpath[-1:] in (os.path.sep, os.path.altsep) + and len(os.path.splitdrive(targetpath)[1]) > 1): + targetpath = targetpath[:-1] + + # don't include leading "/" from file name if present + if member.filename[0] == '/': + targetpath = os.path.join(targetpath, member.filename[1:]) + else: + targetpath = os.path.join(targetpath, member.filename) + + targetpath = os.path.normpath(targetpath) + + # Create all upper directories if necessary. + upperdirs = os.path.dirname(targetpath) + if upperdirs and not os.path.exists(upperdirs): + os.makedirs(upperdirs) + + if member.filename[-1] == '/': + if not os.path.isdir(targetpath): + os.mkdir(targetpath) + return targetpath + + _extract_from_zip(self, member.filename, targetpath) + + return targetpath + + +def _remove_dir(target): + + #on windows this seems to a problem + for dir_path, dirs, files in os.walk(target): + os.chmod(dir_path, stat.S_IWRITE) + for filename in files: + os.chmod(os.path.join(dir_path, filename), stat.S_IWRITE) + shutil.rmtree(target) + + +class ZippedEnvironment(unittest.TestCase): + + datafile = None + dataname = None + old_cwd = None + + def setUp(self): + if self.datafile is None or self.dataname is None: + return + + if not os.path.isfile(self.datafile): + self.old_cwd = None + return + + self.old_cwd = os.getcwd() + + self.temp_dir = tempfile.mkdtemp() + zip_file, source, target = [None, None, None] + try: + zip_file = zipfile.ZipFile(self.datafile) + for files in zip_file.namelist(): + _extract(zip_file, files, self.temp_dir) + finally: + if zip_file: + zip_file.close() + del zip_file + + os.chdir(os.path.join(self.temp_dir, self.dataname)) + + def tearDown(self): + #Assume setUp was never completed + if self.dataname is None or self.datafile is None: + return + + try: + if self.old_cwd: + os.chdir(self.old_cwd) + _remove_dir(self.temp_dir) + except OSError: + #sigh? + pass + + +def _which_dirs(cmd): + result = set() + for path in os.environ.get('PATH', '').split(os.pathsep): + filename = os.path.join(path, cmd) + if os.access(filename, os.X_OK): + result.add(path) + return result + + +def run_setup_py(cmd, pypath=None, path=None, + data_stream=0, env=None): + """ + Execution command for tests, separate from those used by the + code directly to prevent accidental behavior issues + """ + if env is None: + env = dict() + for envname in os.environ: + env[envname] = os.environ[envname] + + #override the python path if needed + if pypath is not None: + env["PYTHONPATH"] = pypath + + #overide the execution path if needed + if path is not None: + env["PATH"] = path + if not env.get("PATH", ""): + env["PATH"] = _which_dirs("tar").union(_which_dirs("gzip")) + env["PATH"] = os.pathsep.join(env["PATH"]) + + cmd = [sys.executable, "setup.py"] + list(cmd) + + #regarding the shell argument, see: http://bugs.python.org/issue8557 + try: + proc = _Popen(cmd, stdout=_PIPE, stderr=_PIPE, + shell=(sys.platform == 'win32'), env=env) + + data = proc.communicate()[data_stream] + except OSError: + return 1, '' + + #decode the console string if needed + if hasattr(data, "decode"): + data = data.decode() # should use the preffered encoding + data = unicodedata.normalize('NFC', data) + + #communciate calls wait() + return proc.returncode, data diff --git a/lib/python3.4/site-packages/setuptools/tests/py26compat.py b/lib/python3.4/site-packages/setuptools/tests/py26compat.py new file mode 100644 index 0000000..d4fb891 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/tests/py26compat.py @@ -0,0 +1,14 @@ +import unittest + +try: + # provide skipIf for Python 2.4-2.6 + skipIf = unittest.skipIf +except AttributeError: + def skipIf(condition, reason): + def skipper(func): + def skip(*args, **kwargs): + return + if condition: + return skip + return func + return skipper diff --git a/lib/python3.4/site-packages/setuptools/tests/script-with-bom.py b/lib/python3.4/site-packages/setuptools/tests/script-with-bom.py new file mode 100644 index 0000000..22dee0d --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/tests/script-with-bom.py @@ -0,0 +1,3 @@ +# -*- coding: utf-8 -*- + +result = 'passed' diff --git a/lib/python3.4/site-packages/setuptools/tests/server.py b/lib/python3.4/site-packages/setuptools/tests/server.py new file mode 100644 index 0000000..ae2381e --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/tests/server.py @@ -0,0 +1,82 @@ +"""Basic http server for tests to simulate PyPI or custom indexes +""" +import sys +import time +import threading +from setuptools.compat import BaseHTTPRequestHandler +from setuptools.compat import (urllib2, URLError, HTTPServer, + SimpleHTTPRequestHandler) + +class IndexServer(HTTPServer): + """Basic single-threaded http server simulating a package index + + You can use this server in unittest like this:: + s = IndexServer() + s.start() + index_url = s.base_url() + 'mytestindex' + # do some test requests to the index + # The index files should be located in setuptools/tests/indexes + s.stop() + """ + def __init__(self, server_address=('', 0), + RequestHandlerClass=SimpleHTTPRequestHandler): + HTTPServer.__init__(self, server_address, RequestHandlerClass) + self._run = True + + def serve(self): + while self._run: + self.handle_request() + + def start(self): + self.thread = threading.Thread(target=self.serve) + self.thread.start() + + def stop(self): + "Stop the server" + + # Let the server finish the last request and wait for a new one. + time.sleep(0.1) + + # self.shutdown is not supported on python < 2.6, so just + # set _run to false, and make a request, causing it to + # terminate. + self._run = False + url = 'http://127.0.0.1:%(server_port)s/' % vars(self) + try: + if sys.version_info >= (2, 6): + urllib2.urlopen(url, timeout=5) + else: + urllib2.urlopen(url) + except URLError: + # ignore any errors; all that's important is the request + pass + self.thread.join() + self.socket.close() + + def base_url(self): + port = self.server_port + return 'http://127.0.0.1:%s/setuptools/tests/indexes/' % port + +class RequestRecorder(BaseHTTPRequestHandler): + def do_GET(self): + requests = vars(self.server).setdefault('requests', []) + requests.append(self) + self.send_response(200, 'OK') + +class MockServer(HTTPServer, threading.Thread): + """ + A simple HTTP Server that records the requests made to it. + """ + def __init__(self, server_address=('', 0), + RequestHandlerClass=RequestRecorder): + HTTPServer.__init__(self, server_address, RequestHandlerClass) + threading.Thread.__init__(self) + self.setDaemon(True) + self.requests = [] + + def run(self): + self.serve_forever() + + def url(self): + return 'http://localhost:%(server_port)s/' % vars(self) + url = property(url) diff --git a/lib/python3.4/site-packages/setuptools/tests/test_bdist_egg.py b/lib/python3.4/site-packages/setuptools/tests/test_bdist_egg.py new file mode 100644 index 0000000..cf4bcd1 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/tests/test_bdist_egg.py @@ -0,0 +1,72 @@ +"""develop tests +""" +import os +import re +import shutil +import site +import sys +import tempfile +import unittest + +from distutils.errors import DistutilsError +from setuptools.compat import StringIO +from setuptools.command.bdist_egg import bdist_egg +from setuptools.command import easy_install as easy_install_pkg +from setuptools.dist import Distribution + +SETUP_PY = """\ +from setuptools import setup + +setup(name='foo', py_modules=['hi']) +""" + +class TestDevelopTest(unittest.TestCase): + + def setUp(self): + self.dir = tempfile.mkdtemp() + self.old_cwd = os.getcwd() + os.chdir(self.dir) + f = open('setup.py', 'w') + f.write(SETUP_PY) + f.close() + f = open('hi.py', 'w') + f.write('1\n') + f.close() + if sys.version >= "2.6": + self.old_base = site.USER_BASE + site.USER_BASE = tempfile.mkdtemp() + self.old_site = site.USER_SITE + site.USER_SITE = tempfile.mkdtemp() + + def tearDown(self): + os.chdir(self.old_cwd) + shutil.rmtree(self.dir) + if sys.version >= "2.6": + shutil.rmtree(site.USER_BASE) + shutil.rmtree(site.USER_SITE) + site.USER_BASE = self.old_base + site.USER_SITE = self.old_site + + def test_bdist_egg(self): + dist = Distribution(dict( + script_name='setup.py', + script_args=['bdist_egg'], + name='foo', + py_modules=['hi'] + )) + os.makedirs(os.path.join('build', 'src')) + old_stdout = sys.stdout + sys.stdout = o = StringIO() + try: + dist.parse_command_line() + dist.run_commands() + finally: + sys.stdout = old_stdout + + # let's see if we got our egg link at the right place + [content] = os.listdir('dist') + self.assertTrue(re.match('foo-0.0.0-py[23].\d.egg$', content)) + +def test_suite(): + return unittest.makeSuite(TestDevelopTest) + diff --git a/lib/python3.4/site-packages/setuptools/tests/test_build_ext.py b/lib/python3.4/site-packages/setuptools/tests/test_build_ext.py new file mode 100644 index 0000000..a92e53a --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/tests/test_build_ext.py @@ -0,0 +1,19 @@ +"""build_ext tests +""" +import unittest +from distutils.command.build_ext import build_ext as distutils_build_ext +from setuptools.command.build_ext import build_ext +from setuptools.dist import Distribution + +class TestBuildExtTest(unittest.TestCase): + + def test_get_ext_filename(self): + # setuptools needs to give back the same + # result than distutils, even if the fullname + # is not in ext_map + dist = Distribution() + cmd = build_ext(dist) + cmd.ext_map['foo/bar'] = '' + res = cmd.get_ext_filename('foo') + wanted = distutils_build_ext.get_ext_filename(cmd, 'foo') + assert res == wanted diff --git a/lib/python3.4/site-packages/setuptools/tests/test_develop.py b/lib/python3.4/site-packages/setuptools/tests/test_develop.py new file mode 100644 index 0000000..66d182e --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/tests/test_develop.py @@ -0,0 +1,122 @@ +"""develop tests +""" +import os +import shutil +import site +import sys +import tempfile +import unittest + +from distutils.errors import DistutilsError +from setuptools.command.develop import develop +from setuptools.dist import Distribution + +SETUP_PY = """\ +from setuptools import setup + +setup(name='foo', + packages=['foo'], + use_2to3=True, +) +""" + +INIT_PY = """print "foo" +""" + +class TestDevelopTest(unittest.TestCase): + + def setUp(self): + if sys.version < "2.6" or hasattr(sys, 'real_prefix'): + return + + # Directory structure + self.dir = tempfile.mkdtemp() + os.mkdir(os.path.join(self.dir, 'foo')) + # setup.py + setup = os.path.join(self.dir, 'setup.py') + f = open(setup, 'w') + f.write(SETUP_PY) + f.close() + self.old_cwd = os.getcwd() + # foo/__init__.py + init = os.path.join(self.dir, 'foo', '__init__.py') + f = open(init, 'w') + f.write(INIT_PY) + f.close() + + os.chdir(self.dir) + self.old_base = site.USER_BASE + site.USER_BASE = tempfile.mkdtemp() + self.old_site = site.USER_SITE + site.USER_SITE = tempfile.mkdtemp() + + def tearDown(self): + if sys.version < "2.6" or hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix): + return + + os.chdir(self.old_cwd) + shutil.rmtree(self.dir) + shutil.rmtree(site.USER_BASE) + shutil.rmtree(site.USER_SITE) + site.USER_BASE = self.old_base + site.USER_SITE = self.old_site + + def test_develop(self): + if sys.version < "2.6" or hasattr(sys, 'real_prefix'): + return + dist = Distribution( + dict(name='foo', + packages=['foo'], + use_2to3=True, + version='0.0', + )) + dist.script_name = 'setup.py' + cmd = develop(dist) + cmd.user = 1 + cmd.ensure_finalized() + cmd.install_dir = site.USER_SITE + cmd.user = 1 + old_stdout = sys.stdout + #sys.stdout = StringIO() + try: + cmd.run() + finally: + sys.stdout = old_stdout + + # let's see if we got our egg link at the right place + content = os.listdir(site.USER_SITE) + content.sort() + self.assertEqual(content, ['easy-install.pth', 'foo.egg-link']) + + # Check that we are using the right code. + egg_link_file = open(os.path.join(site.USER_SITE, 'foo.egg-link'), 'rt') + try: + path = egg_link_file.read().split()[0].strip() + finally: + egg_link_file.close() + init_file = open(os.path.join(path, 'foo', '__init__.py'), 'rt') + try: + init = init_file.read().strip() + finally: + init_file.close() + if sys.version < "3": + self.assertEqual(init, 'print "foo"') + else: + self.assertEqual(init, 'print("foo")') + + def notest_develop_with_setup_requires(self): + + wanted = ("Could not find suitable distribution for " + "Requirement.parse('I-DONT-EXIST')") + old_dir = os.getcwd() + os.chdir(self.dir) + try: + try: + Distribution({'setup_requires': ['I_DONT_EXIST']}) + except DistutilsError: + e = sys.exc_info()[1] + error = str(e) + if error == wanted: + pass + finally: + os.chdir(old_dir) diff --git a/lib/python3.4/site-packages/setuptools/tests/test_dist_info.py b/lib/python3.4/site-packages/setuptools/tests/test_dist_info.py new file mode 100644 index 0000000..a8adb68 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/tests/test_dist_info.py @@ -0,0 +1,83 @@ +"""Test .dist-info style distributions. +""" +import os +import shutil +import tempfile +import unittest +import textwrap + +try: + import ast +except: + pass + +import pkg_resources + +from setuptools.tests.py26compat import skipIf + +def DALS(s): + "dedent and left-strip" + return textwrap.dedent(s).lstrip() + +class TestDistInfo(unittest.TestCase): + + def test_distinfo(self): + dists = {} + for d in pkg_resources.find_distributions(self.tmpdir): + dists[d.project_name] = d + + assert len(dists) == 2, dists + + unversioned = dists['UnversionedDistribution'] + versioned = dists['VersionedDistribution'] + + assert versioned.version == '2.718' # from filename + assert unversioned.version == '0.3' # from METADATA + + @skipIf('ast' not in globals(), + "ast is used to test conditional dependencies (Python >= 2.6)") + def test_conditional_dependencies(self): + requires = [pkg_resources.Requirement.parse('splort==4'), + pkg_resources.Requirement.parse('quux>=1.1')] + + for d in pkg_resources.find_distributions(self.tmpdir): + self.assertEqual(d.requires(), requires[:1]) + self.assertEqual(d.requires(extras=('baz',)), requires) + self.assertEqual(d.extras, ['baz']) + + def setUp(self): + self.tmpdir = tempfile.mkdtemp() + versioned = os.path.join(self.tmpdir, + 'VersionedDistribution-2.718.dist-info') + os.mkdir(versioned) + metadata_file = open(os.path.join(versioned, 'METADATA'), 'w+') + try: + metadata_file.write(DALS( + """ + Metadata-Version: 1.2 + Name: VersionedDistribution + Requires-Dist: splort (4) + Provides-Extra: baz + Requires-Dist: quux (>=1.1); extra == 'baz' + """)) + finally: + metadata_file.close() + unversioned = os.path.join(self.tmpdir, + 'UnversionedDistribution.dist-info') + os.mkdir(unversioned) + metadata_file = open(os.path.join(unversioned, 'METADATA'), 'w+') + try: + metadata_file.write(DALS( + """ + Metadata-Version: 1.2 + Name: UnversionedDistribution + Version: 0.3 + Requires-Dist: splort (==4) + Provides-Extra: baz + Requires-Dist: quux (>=1.1); extra == 'baz' + """)) + finally: + metadata_file.close() + + def tearDown(self): + shutil.rmtree(self.tmpdir) diff --git a/lib/python3.4/site-packages/setuptools/tests/test_easy_install.py b/lib/python3.4/site-packages/setuptools/tests/test_easy_install.py new file mode 100644 index 0000000..a443095 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/tests/test_easy_install.py @@ -0,0 +1,462 @@ +"""Easy install Tests +""" +import sys +import os +import shutil +import tempfile +import unittest +import site +import contextlib +import textwrap +import tarfile +import logging +import distutils.core + +from setuptools.compat import StringIO, BytesIO, urlparse +from setuptools.sandbox import run_setup, SandboxViolation +from setuptools.command.easy_install import ( + easy_install, fix_jython_executable, get_script_args, nt_quote_arg) +from setuptools.command.easy_install import PthDistributions +from setuptools.command import easy_install as easy_install_pkg +from setuptools.dist import Distribution +from pkg_resources import working_set, VersionConflict +from pkg_resources import Distribution as PRDistribution +import setuptools.tests.server +import pkg_resources +from .py26compat import skipIf + +class FakeDist(object): + def get_entry_map(self, group): + if group != 'console_scripts': + return {} + return {'name': 'ep'} + + def as_requirement(self): + return 'spec' + +WANTED = """\ +#!%s +# EASY-INSTALL-ENTRY-SCRIPT: 'spec','console_scripts','name' +__requires__ = 'spec' +import sys +from pkg_resources import load_entry_point + +if __name__ == '__main__': + sys.exit( + load_entry_point('spec', 'console_scripts', 'name')() + ) +""" % nt_quote_arg(fix_jython_executable(sys.executable, "")) + +SETUP_PY = """\ +from setuptools import setup + +setup(name='foo') +""" + +class TestEasyInstallTest(unittest.TestCase): + + def test_install_site_py(self): + dist = Distribution() + cmd = easy_install(dist) + cmd.sitepy_installed = False + cmd.install_dir = tempfile.mkdtemp() + try: + cmd.install_site_py() + sitepy = os.path.join(cmd.install_dir, 'site.py') + self.assertTrue(os.path.exists(sitepy)) + finally: + shutil.rmtree(cmd.install_dir) + + def test_get_script_args(self): + dist = FakeDist() + + old_platform = sys.platform + try: + name, script = [i for i in next(get_script_args(dist))][0:2] + finally: + sys.platform = old_platform + + self.assertEqual(script, WANTED) + + def test_no_find_links(self): + # new option '--no-find-links', that blocks find-links added at + # the project level + dist = Distribution() + cmd = easy_install(dist) + cmd.check_pth_processing = lambda: True + cmd.no_find_links = True + cmd.find_links = ['link1', 'link2'] + cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok') + cmd.args = ['ok'] + cmd.ensure_finalized() + self.assertEqual(cmd.package_index.scanned_urls, {}) + + # let's try without it (default behavior) + cmd = easy_install(dist) + cmd.check_pth_processing = lambda: True + cmd.find_links = ['link1', 'link2'] + cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok') + cmd.args = ['ok'] + cmd.ensure_finalized() + keys = sorted(cmd.package_index.scanned_urls.keys()) + self.assertEqual(keys, ['link1', 'link2']) + + +class TestPTHFileWriter(unittest.TestCase): + def test_add_from_cwd_site_sets_dirty(self): + '''a pth file manager should set dirty + if a distribution is in site but also the cwd + ''' + pth = PthDistributions('does-not_exist', [os.getcwd()]) + self.assertTrue(not pth.dirty) + pth.add(PRDistribution(os.getcwd())) + self.assertTrue(pth.dirty) + + def test_add_from_site_is_ignored(self): + if os.name != 'nt': + location = '/test/location/does-not-have-to-exist' + else: + location = 'c:\\does_not_exist' + pth = PthDistributions('does-not_exist', [location, ]) + self.assertTrue(not pth.dirty) + pth.add(PRDistribution(location)) + self.assertTrue(not pth.dirty) + + +class TestUserInstallTest(unittest.TestCase): + + def setUp(self): + self.dir = tempfile.mkdtemp() + setup = os.path.join(self.dir, 'setup.py') + f = open(setup, 'w') + f.write(SETUP_PY) + f.close() + self.old_cwd = os.getcwd() + os.chdir(self.dir) + + self.old_enable_site = site.ENABLE_USER_SITE + self.old_file = easy_install_pkg.__file__ + self.old_base = site.USER_BASE + site.USER_BASE = tempfile.mkdtemp() + self.old_site = site.USER_SITE + site.USER_SITE = tempfile.mkdtemp() + easy_install_pkg.__file__ = site.USER_SITE + + def tearDown(self): + os.chdir(self.old_cwd) + shutil.rmtree(self.dir) + + shutil.rmtree(site.USER_BASE) + shutil.rmtree(site.USER_SITE) + site.USER_BASE = self.old_base + site.USER_SITE = self.old_site + site.ENABLE_USER_SITE = self.old_enable_site + easy_install_pkg.__file__ = self.old_file + + def test_user_install_implied(self): + site.ENABLE_USER_SITE = True # disabled sometimes + #XXX: replace with something meaningfull + dist = Distribution() + dist.script_name = 'setup.py' + cmd = easy_install(dist) + cmd.args = ['py'] + cmd.ensure_finalized() + self.assertTrue(cmd.user, 'user should be implied') + + def test_multiproc_atexit(self): + try: + __import__('multiprocessing') + except ImportError: + # skip the test if multiprocessing is not available + return + + log = logging.getLogger('test_easy_install') + logging.basicConfig(level=logging.INFO, stream=sys.stderr) + log.info('this should not break') + + def test_user_install_not_implied_without_usersite_enabled(self): + site.ENABLE_USER_SITE = False # usually enabled + #XXX: replace with something meaningfull + dist = Distribution() + dist.script_name = 'setup.py' + cmd = easy_install(dist) + cmd.args = ['py'] + cmd.initialize_options() + self.assertFalse(cmd.user, 'NOT user should be implied') + + def test_local_index(self): + # make sure the local index is used + # when easy_install looks for installed + # packages + new_location = tempfile.mkdtemp() + target = tempfile.mkdtemp() + egg_file = os.path.join(new_location, 'foo-1.0.egg-info') + f = open(egg_file, 'w') + try: + f.write('Name: foo\n') + finally: + f.close() + + sys.path.append(target) + old_ppath = os.environ.get('PYTHONPATH') + os.environ['PYTHONPATH'] = os.path.pathsep.join(sys.path) + try: + dist = Distribution() + dist.script_name = 'setup.py' + cmd = easy_install(dist) + cmd.install_dir = target + cmd.args = ['foo'] + cmd.ensure_finalized() + cmd.local_index.scan([new_location]) + res = cmd.easy_install('foo') + actual = os.path.normcase(os.path.realpath(res.location)) + expected = os.path.normcase(os.path.realpath(new_location)) + self.assertEqual(actual, expected) + finally: + sys.path.remove(target) + for basedir in [new_location, target, ]: + if not os.path.exists(basedir) or not os.path.isdir(basedir): + continue + try: + shutil.rmtree(basedir) + except: + pass + if old_ppath is not None: + os.environ['PYTHONPATH'] = old_ppath + else: + del os.environ['PYTHONPATH'] + + def test_setup_requires(self): + """Regression test for Distribute issue #318 + + Ensure that a package with setup_requires can be installed when + setuptools is installed in the user site-packages without causing a + SandboxViolation. + """ + + test_pkg = create_setup_requires_package(self.dir) + test_setup_py = os.path.join(test_pkg, 'setup.py') + + try: + with quiet_context(): + with reset_setup_stop_context(): + run_setup(test_setup_py, ['install']) + except SandboxViolation: + self.fail('Installation caused SandboxViolation') + except IndexError: + # Test fails in some cases due to bugs in Python + # See https://bitbucket.org/pypa/setuptools/issue/201 + pass + + +class TestSetupRequires(unittest.TestCase): + + def test_setup_requires_honors_fetch_params(self): + """ + When easy_install installs a source distribution which specifies + setup_requires, it should honor the fetch parameters (such as + allow-hosts, index-url, and find-links). + """ + # set up a server which will simulate an alternate package index. + p_index = setuptools.tests.server.MockServer() + p_index.start() + netloc = 1 + p_index_loc = urlparse(p_index.url)[netloc] + if p_index_loc.endswith(':0'): + # Some platforms (Jython) don't find a port to which to bind, + # so skip this test for them. + return + with quiet_context(): + # create an sdist that has a build-time dependency. + with TestSetupRequires.create_sdist() as dist_file: + with tempdir_context() as temp_install_dir: + with environment_context(PYTHONPATH=temp_install_dir): + ei_params = ['--index-url', p_index.url, + '--allow-hosts', p_index_loc, + '--exclude-scripts', '--install-dir', temp_install_dir, + dist_file] + with reset_setup_stop_context(): + with argv_context(['easy_install']): + # attempt to install the dist. It should fail because + # it doesn't exist. + self.assertRaises(SystemExit, + easy_install_pkg.main, ei_params) + # there should have been two or three requests to the server + # (three happens on Python 3.3a) + self.assertTrue(2 <= len(p_index.requests) <= 3) + self.assertEqual(p_index.requests[0].path, '/does-not-exist/') + + @staticmethod + @contextlib.contextmanager + def create_sdist(): + """ + Return an sdist with a setup_requires dependency (of something that + doesn't exist) + """ + with tempdir_context() as dir: + dist_path = os.path.join(dir, 'setuptools-test-fetcher-1.0.tar.gz') + make_trivial_sdist( + dist_path, + textwrap.dedent(""" + import setuptools + setuptools.setup( + name="setuptools-test-fetcher", + version="1.0", + setup_requires = ['does-not-exist'], + ) + """).lstrip()) + yield dist_path + + def test_setup_requires_overrides_version_conflict(self): + """ + Regression test for issue #323. + + Ensures that a distribution's setup_requires requirements can still be + installed and used locally even if a conflicting version of that + requirement is already on the path. + """ + + pr_state = pkg_resources.__getstate__() + fake_dist = PRDistribution('does-not-matter', project_name='foobar', + version='0.0') + working_set.add(fake_dist) + + try: + with tempdir_context() as temp_dir: + test_pkg = create_setup_requires_package(temp_dir) + test_setup_py = os.path.join(test_pkg, 'setup.py') + with quiet_context() as (stdout, stderr): + with reset_setup_stop_context(): + try: + # Don't even need to install the package, just + # running the setup.py at all is sufficient + run_setup(test_setup_py, ['--name']) + except VersionConflict: + self.fail('Installing setup.py requirements ' + 'caused a VersionConflict') + + lines = stdout.readlines() + self.assertTrue(len(lines) > 0) + self.assertTrue(lines[-1].strip(), 'test_pkg') + finally: + pkg_resources.__setstate__(pr_state) + + +def create_setup_requires_package(path): + """Creates a source tree under path for a trivial test package that has a + single requirement in setup_requires--a tarball for that requirement is + also created and added to the dependency_links argument. + """ + + test_setup_attrs = { + 'name': 'test_pkg', 'version': '0.0', + 'setup_requires': ['foobar==0.1'], + 'dependency_links': [os.path.abspath(path)] + } + + test_pkg = os.path.join(path, 'test_pkg') + test_setup_py = os.path.join(test_pkg, 'setup.py') + os.mkdir(test_pkg) + + f = open(test_setup_py, 'w') + f.write(textwrap.dedent("""\ + import setuptools + setuptools.setup(**%r) + """ % test_setup_attrs)) + f.close() + + foobar_path = os.path.join(path, 'foobar-0.1.tar.gz') + make_trivial_sdist( + foobar_path, + textwrap.dedent("""\ + import setuptools + setuptools.setup( + name='foobar', + version='0.1' + ) + """)) + + return test_pkg + + +def make_trivial_sdist(dist_path, setup_py): + """Create a simple sdist tarball at dist_path, containing just a + setup.py, the contents of which are provided by the setup_py string. + """ + + setup_py_file = tarfile.TarInfo(name='setup.py') + try: + # Python 3 (StringIO gets converted to io module) + MemFile = BytesIO + except AttributeError: + MemFile = StringIO + setup_py_bytes = MemFile(setup_py.encode('utf-8')) + setup_py_file.size = len(setup_py_bytes.getvalue()) + dist = tarfile.open(dist_path, 'w:gz') + try: + dist.addfile(setup_py_file, fileobj=setup_py_bytes) + finally: + dist.close() + + +@contextlib.contextmanager +def tempdir_context(cd=lambda dir:None): + temp_dir = tempfile.mkdtemp() + orig_dir = os.getcwd() + try: + cd(temp_dir) + yield temp_dir + finally: + cd(orig_dir) + shutil.rmtree(temp_dir) + +@contextlib.contextmanager +def environment_context(**updates): + old_env = os.environ.copy() + os.environ.update(updates) + try: + yield + finally: + for key in updates: + del os.environ[key] + os.environ.update(old_env) + +@contextlib.contextmanager +def argv_context(repl): + old_argv = sys.argv[:] + sys.argv[:] = repl + yield + sys.argv[:] = old_argv + +@contextlib.contextmanager +def reset_setup_stop_context(): + """ + When the setuptools tests are run using setup.py test, and then + one wants to invoke another setup() command (such as easy_install) + within those tests, it's necessary to reset the global variable + in distutils.core so that the setup() command will run naturally. + """ + setup_stop_after = distutils.core._setup_stop_after + distutils.core._setup_stop_after = None + yield + distutils.core._setup_stop_after = setup_stop_after + + +@contextlib.contextmanager +def quiet_context(): + """ + Redirect stdout/stderr to StringIO objects to prevent console output from + distutils commands. + """ + + old_stdout = sys.stdout + old_stderr = sys.stderr + new_stdout = sys.stdout = StringIO() + new_stderr = sys.stderr = StringIO() + try: + yield new_stdout, new_stderr + finally: + new_stdout.seek(0) + new_stderr.seek(0) + sys.stdout = old_stdout + sys.stderr = old_stderr diff --git a/lib/python3.4/site-packages/setuptools/tests/test_egg_info.py b/lib/python3.4/site-packages/setuptools/tests/test_egg_info.py new file mode 100644 index 0000000..7531e37 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/tests/test_egg_info.py @@ -0,0 +1,210 @@ + +import os +import sys +import tempfile +import shutil +import unittest + +import pkg_resources +import warnings +from setuptools.command import egg_info +from setuptools import svn_utils +from setuptools.tests import environment, test_svn +from setuptools.tests.py26compat import skipIf + +ENTRIES_V10 = pkg_resources.resource_string(__name__, 'entries-v10') +"An entries file generated with svn 1.6.17 against the legacy Setuptools repo" + + +class TestEggInfo(unittest.TestCase): + + def setUp(self): + self.test_dir = tempfile.mkdtemp() + os.mkdir(os.path.join(self.test_dir, '.svn')) + + self.old_cwd = os.getcwd() + os.chdir(self.test_dir) + + def tearDown(self): + os.chdir(self.old_cwd) + shutil.rmtree(self.test_dir) + + def _write_entries(self, entries): + fn = os.path.join(self.test_dir, '.svn', 'entries') + entries_f = open(fn, 'wb') + entries_f.write(entries) + entries_f.close() + + @skipIf(not test_svn._svn_check, "No SVN to text, in the first place") + def test_version_10_format(self): + """ + """ + #keeping this set for 1.6 is a good check on the get_svn_revision + #to ensure I return using svnversion what would had been returned + version_str = svn_utils.SvnInfo.get_svn_version() + version = [int(x) for x in version_str.split('.')[:2]] + if version != [1, 6]: + if hasattr(self, 'skipTest'): + self.skipTest('') + else: + sys.stderr.write('\n Skipping due to SVN Version\n') + return + + self._write_entries(ENTRIES_V10) + rev = egg_info.egg_info.get_svn_revision() + self.assertEqual(rev, '89000') + + def test_version_10_format_legacy_parser(self): + """ + """ + path_variable = None + for env in os.environ: + if env.lower() == 'path': + path_variable = env + + if path_variable: + old_path = os.environ[path_variable] + os.environ[path_variable] = '' + #catch_warnings not available until py26 + warning_filters = warnings.filters + warnings.filters = warning_filters[:] + try: + warnings.simplefilter("ignore", DeprecationWarning) + self._write_entries(ENTRIES_V10) + rev = egg_info.egg_info.get_svn_revision() + finally: + #restore the warning filters + warnings.filters = warning_filters + #restore the os path + if path_variable: + os.environ[path_variable] = old_path + + self.assertEqual(rev, '89000') + +DUMMY_SOURCE_TXT = """CHANGES.txt +CONTRIBUTORS.txt +HISTORY.txt +LICENSE +MANIFEST.in +README.txt +setup.py +dummy/__init__.py +dummy/test.txt +dummy.egg-info/PKG-INFO +dummy.egg-info/SOURCES.txt +dummy.egg-info/dependency_links.txt +dummy.egg-info/top_level.txt""" + + +class TestSvnDummy(environment.ZippedEnvironment): + + def setUp(self): + version = svn_utils.SvnInfo.get_svn_version() + if not version: # None or Empty + return None + + self.base_version = tuple([int(x) for x in version.split('.')][:2]) + + if not self.base_version: + raise ValueError('No SVN tools installed') + elif self.base_version < (1, 3): + raise ValueError('Insufficient SVN Version %s' % version) + elif self.base_version >= (1, 9): + #trying the latest version + self.base_version = (1, 8) + + self.dataname = "dummy%i%i" % self.base_version + self.datafile = os.path.join('setuptools', 'tests', + 'svn_data', self.dataname + ".zip") + super(TestSvnDummy, self).setUp() + + @skipIf(not test_svn._svn_check, "No SVN to text, in the first place") + def test_sources(self): + code, data = environment.run_setup_py(["sdist"], + pypath=self.old_cwd, + data_stream=1) + if code: + raise AssertionError(data) + + sources = os.path.join('dummy.egg-info', 'SOURCES.txt') + infile = open(sources, 'r') + try: + read_contents = infile.read() + finally: + infile.close() + del infile + + self.assertEqual(DUMMY_SOURCE_TXT, read_contents) + + return data + + @skipIf(not test_svn._svn_check, "No SVN to text, in the first place") + def test_svn_tags(self): + code, data = environment.run_setup_py(["egg_info", + "--tag-svn-revision"], + pypath=self.old_cwd, + data_stream=1) + if code: + raise AssertionError(data) + + pkginfo = os.path.join('dummy.egg-info', 'PKG-INFO') + infile = open(pkginfo, 'r') + try: + read_contents = infile.readlines() + finally: + infile.close() + del infile + + self.assertTrue("Version: 0.1.1-r1\n" in read_contents) + + @skipIf(not test_svn._svn_check, "No SVN to text, in the first place") + def test_no_tags(self): + code, data = environment.run_setup_py(["egg_info"], + pypath=self.old_cwd, + data_stream=1) + if code: + raise AssertionError(data) + + pkginfo = os.path.join('dummy.egg-info', 'PKG-INFO') + infile = open(pkginfo, 'r') + try: + read_contents = infile.readlines() + finally: + infile.close() + del infile + + self.assertTrue("Version: 0.1.1\n" in read_contents) + + +class TestSvnDummyLegacy(environment.ZippedEnvironment): + + def setUp(self): + self.base_version = (1, 6) + self.dataname = "dummy%i%i" % self.base_version + self.datafile = os.path.join('setuptools', 'tests', + 'svn_data', self.dataname + ".zip") + super(TestSvnDummyLegacy, self).setUp() + + def test_sources(self): + code, data = environment.run_setup_py(["sdist"], + pypath=self.old_cwd, + path="", + data_stream=1) + if code: + raise AssertionError(data) + + sources = os.path.join('dummy.egg-info', 'SOURCES.txt') + infile = open(sources, 'r') + try: + read_contents = infile.read() + finally: + infile.close() + del infile + + self.assertEqual(DUMMY_SOURCE_TXT, read_contents) + + return data + + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/lib/python3.4/site-packages/setuptools/tests/test_find_packages.py b/lib/python3.4/site-packages/setuptools/tests/test_find_packages.py new file mode 100644 index 0000000..fe39072 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/tests/test_find_packages.py @@ -0,0 +1,170 @@ +"""Tests for setuptools.find_packages().""" +import os +import sys +import shutil +import tempfile +import unittest +import platform + +import setuptools +from setuptools import find_packages +from setuptools.tests.py26compat import skipIf + +find_420_packages = setuptools.PEP420PackageFinder.find + +# modeled after CPython's test.support.can_symlink +def can_symlink(): + TESTFN = tempfile.mktemp() + symlink_path = TESTFN + "can_symlink" + try: + os.symlink(TESTFN, symlink_path) + can = True + except (OSError, NotImplementedError, AttributeError): + can = False + else: + os.remove(symlink_path) + globals().update(can_symlink=lambda: can) + return can + +def has_symlink(): + bad_symlink = ( + # Windows symlink directory detection is broken on Python 3.2 + platform.system() == 'Windows' and sys.version_info[:2] == (3,2) + ) + return can_symlink() and not bad_symlink + +class TestFindPackages(unittest.TestCase): + + def setUp(self): + self.dist_dir = tempfile.mkdtemp() + self._make_pkg_structure() + + def tearDown(self): + shutil.rmtree(self.dist_dir) + + def _make_pkg_structure(self): + """Make basic package structure. + + dist/ + docs/ + conf.py + pkg/ + __pycache__/ + nspkg/ + mod.py + subpkg/ + assets/ + asset + __init__.py + setup.py + + """ + self.docs_dir = self._mkdir('docs', self.dist_dir) + self._touch('conf.py', self.docs_dir) + self.pkg_dir = self._mkdir('pkg', self.dist_dir) + self._mkdir('__pycache__', self.pkg_dir) + self.ns_pkg_dir = self._mkdir('nspkg', self.pkg_dir) + self._touch('mod.py', self.ns_pkg_dir) + self.sub_pkg_dir = self._mkdir('subpkg', self.pkg_dir) + self.asset_dir = self._mkdir('assets', self.sub_pkg_dir) + self._touch('asset', self.asset_dir) + self._touch('__init__.py', self.sub_pkg_dir) + self._touch('setup.py', self.dist_dir) + + def _mkdir(self, path, parent_dir=None): + if parent_dir: + path = os.path.join(parent_dir, path) + os.mkdir(path) + return path + + def _touch(self, path, dir_=None): + if dir_: + path = os.path.join(dir_, path) + fp = open(path, 'w') + fp.close() + return path + + def test_regular_package(self): + self._touch('__init__.py', self.pkg_dir) + packages = find_packages(self.dist_dir) + self.assertEqual(packages, ['pkg', 'pkg.subpkg']) + + def test_exclude(self): + self._touch('__init__.py', self.pkg_dir) + packages = find_packages(self.dist_dir, exclude=('pkg.*',)) + assert packages == ['pkg'] + + def test_include_excludes_other(self): + """ + If include is specified, other packages should be excluded. + """ + self._touch('__init__.py', self.pkg_dir) + alt_dir = self._mkdir('other_pkg', self.dist_dir) + self._touch('__init__.py', alt_dir) + packages = find_packages(self.dist_dir, include=['other_pkg']) + self.assertEqual(packages, ['other_pkg']) + + def test_dir_with_dot_is_skipped(self): + shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets')) + data_dir = self._mkdir('some.data', self.pkg_dir) + self._touch('__init__.py', data_dir) + self._touch('file.dat', data_dir) + packages = find_packages(self.dist_dir) + self.assertTrue('pkg.some.data' not in packages) + + def test_dir_with_packages_in_subdir_is_excluded(self): + """ + Ensure that a package in a non-package such as build/pkg/__init__.py + is excluded. + """ + build_dir = self._mkdir('build', self.dist_dir) + build_pkg_dir = self._mkdir('pkg', build_dir) + self._touch('__init__.py', build_pkg_dir) + packages = find_packages(self.dist_dir) + self.assertTrue('build.pkg' not in packages) + + @skipIf(not has_symlink(), 'Symlink support required') + def test_symlinked_packages_are_included(self): + """ + A symbolically-linked directory should be treated like any other + directory when matched as a package. + + Create a link from lpkg -> pkg. + """ + self._touch('__init__.py', self.pkg_dir) + linked_pkg = os.path.join(self.dist_dir, 'lpkg') + os.symlink('pkg', linked_pkg) + assert os.path.isdir(linked_pkg) + packages = find_packages(self.dist_dir) + self.assertTrue('lpkg' in packages) + + def _assert_packages(self, actual, expected): + self.assertEqual(set(actual), set(expected)) + + def test_pep420_ns_package(self): + packages = find_420_packages( + self.dist_dir, include=['pkg*'], exclude=['pkg.subpkg.assets']) + self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg']) + + def test_pep420_ns_package_no_includes(self): + packages = find_420_packages( + self.dist_dir, exclude=['pkg.subpkg.assets']) + self._assert_packages(packages, ['docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg']) + + def test_pep420_ns_package_no_includes_or_excludes(self): + packages = find_420_packages(self.dist_dir) + expected = [ + 'docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg', 'pkg.subpkg.assets'] + self._assert_packages(packages, expected) + + def test_regular_package_with_nested_pep420_ns_packages(self): + self._touch('__init__.py', self.pkg_dir) + packages = find_420_packages( + self.dist_dir, exclude=['docs', 'pkg.subpkg.assets']) + self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg']) + + def test_pep420_ns_package_no_non_package_dirs(self): + shutil.rmtree(self.docs_dir) + shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets')) + packages = find_420_packages(self.dist_dir) + self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg']) diff --git a/lib/python3.4/site-packages/setuptools/tests/test_integration.py b/lib/python3.4/site-packages/setuptools/tests/test_integration.py new file mode 100644 index 0000000..8d6c1e5 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/tests/test_integration.py @@ -0,0 +1,83 @@ +"""Run some integration tests. + +Try to install a few packages. +""" + +import glob +import os +import sys + +import pytest + +from setuptools.command.easy_install import easy_install +from setuptools.command import easy_install as easy_install_pkg +from setuptools.dist import Distribution + + +@pytest.fixture +def install_context(request, tmpdir, monkeypatch): + """Fixture to set up temporary installation directory. + """ + # Save old values so we can restore them. + new_cwd = tmpdir.mkdir('cwd') + user_base = tmpdir.mkdir('user_base') + user_site = tmpdir.mkdir('user_site') + install_dir = tmpdir.mkdir('install_dir') + + def fin(): + # undo the monkeypatch, particularly needed under + # windows because of kept handle on cwd + monkeypatch.undo() + new_cwd.remove() + user_base.remove() + user_site.remove() + install_dir.remove() + request.addfinalizer(fin) + + # Change the environment and site settings to control where the + # files are installed and ensure we do not overwrite anything. + monkeypatch.chdir(new_cwd) + monkeypatch.setattr(easy_install_pkg, '__file__', user_site.strpath) + monkeypatch.setattr('site.USER_BASE', user_base.strpath) + monkeypatch.setattr('site.USER_SITE', user_site.strpath) + monkeypatch.setattr('sys.path', sys.path + [install_dir.strpath]) + monkeypatch.setenv('PYTHONPATH', os.path.pathsep.join(sys.path)) + + # Set up the command for performing the installation. + dist = Distribution() + cmd = easy_install(dist) + cmd.install_dir = install_dir.strpath + return cmd + + +def _install_one(requirement, cmd, pkgname, modulename): + cmd.args = [requirement] + cmd.ensure_finalized() + cmd.run() + target = cmd.install_dir + dest_path = glob.glob(os.path.join(target, pkgname + '*.egg')) + assert dest_path + assert os.path.exists(os.path.join(dest_path[0], pkgname, modulename)) + + +def test_stevedore(install_context): + _install_one('stevedore', install_context, + 'stevedore', 'extension.py') + + +@pytest.mark.xfail +def test_virtualenvwrapper(install_context): + _install_one('virtualenvwrapper', install_context, + 'virtualenvwrapper', 'hook_loader.py') + + +@pytest.mark.xfail +def test_pbr(install_context): + _install_one('pbr', install_context, + 'pbr', 'core.py') + + +@pytest.mark.xfail +def test_python_novaclient(install_context): + _install_one('python-novaclient', install_context, + 'novaclient', 'base.py') diff --git a/lib/python3.4/site-packages/setuptools/tests/test_markerlib.py b/lib/python3.4/site-packages/setuptools/tests/test_markerlib.py new file mode 100644 index 0000000..dae71cb --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/tests/test_markerlib.py @@ -0,0 +1,68 @@ +import os +import unittest +from setuptools.tests.py26compat import skipIf + +try: + import ast +except ImportError: + pass + +class TestMarkerlib(unittest.TestCase): + + @skipIf('ast' not in globals(), + "ast not available (Python < 2.6?)") + def test_markers(self): + from _markerlib import interpret, default_environment, compile + + os_name = os.name + + self.assertTrue(interpret("")) + + self.assertTrue(interpret("os.name != 'buuuu'")) + self.assertTrue(interpret("os_name != 'buuuu'")) + self.assertTrue(interpret("python_version > '1.0'")) + self.assertTrue(interpret("python_version < '5.0'")) + self.assertTrue(interpret("python_version <= '5.0'")) + self.assertTrue(interpret("python_version >= '1.0'")) + self.assertTrue(interpret("'%s' in os.name" % os_name)) + self.assertTrue(interpret("'%s' in os_name" % os_name)) + self.assertTrue(interpret("'buuuu' not in os.name")) + + self.assertFalse(interpret("os.name == 'buuuu'")) + self.assertFalse(interpret("os_name == 'buuuu'")) + self.assertFalse(interpret("python_version < '1.0'")) + self.assertFalse(interpret("python_version > '5.0'")) + self.assertFalse(interpret("python_version >= '5.0'")) + self.assertFalse(interpret("python_version <= '1.0'")) + self.assertFalse(interpret("'%s' not in os.name" % os_name)) + self.assertFalse(interpret("'buuuu' in os.name and python_version >= '5.0'")) + self.assertFalse(interpret("'buuuu' in os_name and python_version >= '5.0'")) + + environment = default_environment() + environment['extra'] = 'test' + self.assertTrue(interpret("extra == 'test'", environment)) + self.assertFalse(interpret("extra == 'doc'", environment)) + + def raises_nameError(): + try: + interpret("python.version == '42'") + except NameError: + pass + else: + raise Exception("Expected NameError") + + raises_nameError() + + def raises_syntaxError(): + try: + interpret("(x for x in (4,))") + except SyntaxError: + pass + else: + raise Exception("Expected SyntaxError") + + raises_syntaxError() + + statement = "python_version == '5'" + self.assertEqual(compile(statement).__doc__, statement) + diff --git a/lib/python3.4/site-packages/setuptools/tests/test_packageindex.py b/lib/python3.4/site-packages/setuptools/tests/test_packageindex.py new file mode 100644 index 0000000..664566a --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/tests/test_packageindex.py @@ -0,0 +1,203 @@ +"""Package Index Tests +""" +import sys +import os +import unittest +import pkg_resources +from setuptools.compat import urllib2, httplib, HTTPError, unicode, pathname2url +import distutils.errors +import setuptools.package_index +from setuptools.tests.server import IndexServer + +class TestPackageIndex(unittest.TestCase): + + def test_bad_url_bad_port(self): + index = setuptools.package_index.PackageIndex() + url = 'http://127.0.0.1:0/nonesuch/test_package_index' + try: + v = index.open_url(url) + except Exception: + v = sys.exc_info()[1] + self.assertTrue(url in str(v)) + else: + self.assertTrue(isinstance(v, HTTPError)) + + def test_bad_url_typo(self): + # issue 16 + # easy_install inquant.contentmirror.plone breaks because of a typo + # in its home URL + index = setuptools.package_index.PackageIndex( + hosts=('www.example.com',) + ) + + url = 'url:%20https://svn.plone.org/svn/collective/inquant.contentmirror.plone/trunk' + try: + v = index.open_url(url) + except Exception: + v = sys.exc_info()[1] + self.assertTrue(url in str(v)) + else: + self.assertTrue(isinstance(v, HTTPError)) + + def test_bad_url_bad_status_line(self): + index = setuptools.package_index.PackageIndex( + hosts=('www.example.com',) + ) + + def _urlopen(*args): + raise httplib.BadStatusLine('line') + + index.opener = _urlopen + url = 'http://example.com' + try: + v = index.open_url(url) + except Exception: + v = sys.exc_info()[1] + self.assertTrue('line' in str(v)) + else: + raise AssertionError('Should have raise here!') + + def test_bad_url_double_scheme(self): + """ + A bad URL with a double scheme should raise a DistutilsError. + """ + index = setuptools.package_index.PackageIndex( + hosts=('www.example.com',) + ) + + # issue 20 + url = 'http://http://svn.pythonpaste.org/Paste/wphp/trunk' + try: + index.open_url(url) + except distutils.errors.DistutilsError: + error = sys.exc_info()[1] + msg = unicode(error) + assert 'nonnumeric port' in msg or 'getaddrinfo failed' in msg or 'Name or service not known' in msg + return + raise RuntimeError("Did not raise") + + def test_bad_url_screwy_href(self): + index = setuptools.package_index.PackageIndex( + hosts=('www.example.com',) + ) + + # issue #160 + if sys.version_info[0] == 2 and sys.version_info[1] == 7: + # this should not fail + url = 'http://example.com' + page = ('') + index.process_index(url, page) + + def test_url_ok(self): + index = setuptools.package_index.PackageIndex( + hosts=('www.example.com',) + ) + url = 'file:///tmp/test_package_index' + self.assertTrue(index.url_ok(url, True)) + + def test_links_priority(self): + """ + Download links from the pypi simple index should be used before + external download links. + https://bitbucket.org/tarek/distribute/issue/163 + + Usecase : + - someone uploads a package on pypi, a md5 is generated + - someone manually copies this link (with the md5 in the url) onto an + external page accessible from the package page. + - someone reuploads the package (with a different md5) + - while easy_installing, an MD5 error occurs because the external link + is used + -> Setuptools should use the link from pypi, not the external one. + """ + if sys.platform.startswith('java'): + # Skip this test on jython because binding to :0 fails + return + + # start an index server + server = IndexServer() + server.start() + index_url = server.base_url() + 'test_links_priority/simple/' + + # scan a test index + pi = setuptools.package_index.PackageIndex(index_url) + requirement = pkg_resources.Requirement.parse('foobar') + pi.find_packages(requirement) + server.stop() + + # the distribution has been found + self.assertTrue('foobar' in pi) + # we have only one link, because links are compared without md5 + self.assertTrue(len(pi['foobar'])==1) + # the link should be from the index + self.assertTrue('correct_md5' in pi['foobar'][0].location) + + def test_parse_bdist_wininst(self): + self.assertEqual(setuptools.package_index.parse_bdist_wininst( + 'reportlab-2.5.win32-py2.4.exe'), ('reportlab-2.5', '2.4', 'win32')) + self.assertEqual(setuptools.package_index.parse_bdist_wininst( + 'reportlab-2.5.win32.exe'), ('reportlab-2.5', None, 'win32')) + self.assertEqual(setuptools.package_index.parse_bdist_wininst( + 'reportlab-2.5.win-amd64-py2.7.exe'), ('reportlab-2.5', '2.7', 'win-amd64')) + self.assertEqual(setuptools.package_index.parse_bdist_wininst( + 'reportlab-2.5.win-amd64.exe'), ('reportlab-2.5', None, 'win-amd64')) + + def test__vcs_split_rev_from_url(self): + """ + Test the basic usage of _vcs_split_rev_from_url + """ + vsrfu = setuptools.package_index.PackageIndex._vcs_split_rev_from_url + url, rev = vsrfu('https://example.com/bar@2995') + self.assertEqual(url, 'https://example.com/bar') + self.assertEqual(rev, '2995') + + def test_local_index(self): + """ + local_open should be able to read an index from the file system. + """ + f = open('index.html', 'w') + f.write('
content
') + f.close() + try: + url = 'file:' + pathname2url(os.getcwd()) + '/' + res = setuptools.package_index.local_open(url) + finally: + os.remove('index.html') + assert 'content' in res.read() + + +class TestContentCheckers(unittest.TestCase): + + def test_md5(self): + checker = setuptools.package_index.HashChecker.from_url( + 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') + checker.feed('You should probably not be using MD5'.encode('ascii')) + self.assertEqual(checker.hash.hexdigest(), + 'f12895fdffbd45007040d2e44df98478') + self.assertTrue(checker.is_valid()) + + def test_other_fragment(self): + "Content checks should succeed silently if no hash is present" + checker = setuptools.package_index.HashChecker.from_url( + 'http://foo/bar#something%20completely%20different') + checker.feed('anything'.encode('ascii')) + self.assertTrue(checker.is_valid()) + + def test_blank_md5(self): + "Content checks should succeed if a hash is empty" + checker = setuptools.package_index.HashChecker.from_url( + 'http://foo/bar#md5=') + checker.feed('anything'.encode('ascii')) + self.assertTrue(checker.is_valid()) + + def test_get_hash_name_md5(self): + checker = setuptools.package_index.HashChecker.from_url( + 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') + self.assertEqual(checker.hash_name, 'md5') + + def test_report(self): + checker = setuptools.package_index.HashChecker.from_url( + 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') + rep = checker.report(lambda x: x, 'My message about %s') + self.assertEqual(rep, 'My message about md5') diff --git a/lib/python3.4/site-packages/setuptools/tests/test_resources.py b/lib/python3.4/site-packages/setuptools/tests/test_resources.py new file mode 100644 index 0000000..3baa3ab --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/tests/test_resources.py @@ -0,0 +1,612 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# NOTE: the shebang and encoding lines are for ScriptHeaderTests do not remove + +import os +import sys +import tempfile +import shutil +from unittest import TestCase + +import pkg_resources +from pkg_resources import (parse_requirements, VersionConflict, parse_version, + Distribution, EntryPoint, Requirement, safe_version, safe_name, + WorkingSet) + +from setuptools.command.easy_install import (get_script_header, is_sh, + nt_quote_arg) +from setuptools.compat import StringIO, iteritems, PY3 +from .py26compat import skipIf + +def safe_repr(obj, short=False): + """ copied from Python2.7""" + try: + result = repr(obj) + except Exception: + result = object.__repr__(obj) + if not short or len(result) < pkg_resources._MAX_LENGTH: + return result + return result[:pkg_resources._MAX_LENGTH] + ' [truncated]...' + +class Metadata(pkg_resources.EmptyProvider): + """Mock object to return metadata as if from an on-disk distribution""" + + def __init__(self,*pairs): + self.metadata = dict(pairs) + + def has_metadata(self,name): + return name in self.metadata + + def get_metadata(self,name): + return self.metadata[name] + + def get_metadata_lines(self,name): + return pkg_resources.yield_lines(self.get_metadata(name)) + +dist_from_fn = pkg_resources.Distribution.from_filename + +class DistroTests(TestCase): + + def testCollection(self): + # empty path should produce no distributions + ad = pkg_resources.Environment([], platform=None, python=None) + self.assertEqual(list(ad), []) + self.assertEqual(ad['FooPkg'],[]) + ad.add(dist_from_fn("FooPkg-1.3_1.egg")) + ad.add(dist_from_fn("FooPkg-1.4-py2.4-win32.egg")) + ad.add(dist_from_fn("FooPkg-1.2-py2.4.egg")) + + # Name is in there now + self.assertTrue(ad['FooPkg']) + # But only 1 package + self.assertEqual(list(ad), ['foopkg']) + + # Distributions sort by version + self.assertEqual( + [dist.version for dist in ad['FooPkg']], ['1.4','1.3-1','1.2'] + ) + # Removing a distribution leaves sequence alone + ad.remove(ad['FooPkg'][1]) + self.assertEqual( + [dist.version for dist in ad['FooPkg']], ['1.4','1.2'] + ) + # And inserting adds them in order + ad.add(dist_from_fn("FooPkg-1.9.egg")) + self.assertEqual( + [dist.version for dist in ad['FooPkg']], ['1.9','1.4','1.2'] + ) + + ws = WorkingSet([]) + foo12 = dist_from_fn("FooPkg-1.2-py2.4.egg") + foo14 = dist_from_fn("FooPkg-1.4-py2.4-win32.egg") + req, = parse_requirements("FooPkg>=1.3") + + # Nominal case: no distros on path, should yield all applicable + self.assertEqual(ad.best_match(req,ws).version, '1.9') + # If a matching distro is already installed, should return only that + ws.add(foo14) + self.assertEqual(ad.best_match(req,ws).version, '1.4') + + # If the first matching distro is unsuitable, it's a version conflict + ws = WorkingSet([]) + ws.add(foo12) + ws.add(foo14) + self.assertRaises(VersionConflict, ad.best_match, req, ws) + + # If more than one match on the path, the first one takes precedence + ws = WorkingSet([]) + ws.add(foo14) + ws.add(foo12) + ws.add(foo14) + self.assertEqual(ad.best_match(req,ws).version, '1.4') + + def checkFooPkg(self,d): + self.assertEqual(d.project_name, "FooPkg") + self.assertEqual(d.key, "foopkg") + self.assertEqual(d.version, "1.3-1") + self.assertEqual(d.py_version, "2.4") + self.assertEqual(d.platform, "win32") + self.assertEqual(d.parsed_version, parse_version("1.3-1")) + + def testDistroBasics(self): + d = Distribution( + "/some/path", + project_name="FooPkg",version="1.3-1",py_version="2.4",platform="win32" + ) + self.checkFooPkg(d) + + d = Distribution("/some/path") + self.assertEqual(d.py_version, sys.version[:3]) + self.assertEqual(d.platform, None) + + def testDistroParse(self): + d = dist_from_fn("FooPkg-1.3_1-py2.4-win32.egg") + self.checkFooPkg(d) + d = dist_from_fn("FooPkg-1.3_1-py2.4-win32.egg-info") + self.checkFooPkg(d) + + def testDistroMetadata(self): + d = Distribution( + "/some/path", project_name="FooPkg", py_version="2.4", platform="win32", + metadata = Metadata( + ('PKG-INFO',"Metadata-Version: 1.0\nVersion: 1.3-1\n") + ) + ) + self.checkFooPkg(d) + + def distRequires(self, txt): + return Distribution("/foo", metadata=Metadata(('depends.txt', txt))) + + def checkRequires(self, dist, txt, extras=()): + self.assertEqual( + list(dist.requires(extras)), + list(parse_requirements(txt)) + ) + + def testDistroDependsSimple(self): + for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0": + self.checkRequires(self.distRequires(v), v) + + def testResolve(self): + ad = pkg_resources.Environment([]) + ws = WorkingSet([]) + # Resolving no requirements -> nothing to install + self.assertEqual(list(ws.resolve([],ad)), []) + # Request something not in the collection -> DistributionNotFound + self.assertRaises( + pkg_resources.DistributionNotFound, ws.resolve, parse_requirements("Foo"), ad + ) + Foo = Distribution.from_filename( + "/foo_dir/Foo-1.2.egg", + metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0")) + ) + ad.add(Foo) + ad.add(Distribution.from_filename("Foo-0.9.egg")) + + # Request thing(s) that are available -> list to activate + for i in range(3): + targets = list(ws.resolve(parse_requirements("Foo"), ad)) + self.assertEqual(targets, [Foo]) + list(map(ws.add,targets)) + self.assertRaises(VersionConflict, ws.resolve, + parse_requirements("Foo==0.9"), ad) + ws = WorkingSet([]) # reset + + # Request an extra that causes an unresolved dependency for "Baz" + self.assertRaises( + pkg_resources.DistributionNotFound, ws.resolve,parse_requirements("Foo[bar]"), ad + ) + Baz = Distribution.from_filename( + "/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo")) + ) + ad.add(Baz) + + # Activation list now includes resolved dependency + self.assertEqual( + list(ws.resolve(parse_requirements("Foo[bar]"), ad)), [Foo,Baz] + ) + # Requests for conflicting versions produce VersionConflict + self.assertRaises(VersionConflict, + ws.resolve, parse_requirements("Foo==1.2\nFoo!=1.2"), ad) + + def testDistroDependsOptions(self): + d = self.distRequires(""" + Twisted>=1.5 + [docgen] + ZConfig>=2.0 + docutils>=0.3 + [fastcgi] + fcgiapp>=0.1""") + self.checkRequires(d,"Twisted>=1.5") + self.checkRequires( + d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"] + ) + self.checkRequires( + d,"Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"] + ) + self.checkRequires( + d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(), + ["docgen","fastcgi"] + ) + self.checkRequires( + d,"Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(), + ["fastcgi", "docgen"] + ) + self.assertRaises(pkg_resources.UnknownExtra, d.requires, ["foo"]) + + +class EntryPointTests(TestCase): + + def assertfields(self, ep): + self.assertEqual(ep.name,"foo") + self.assertEqual(ep.module_name,"setuptools.tests.test_resources") + self.assertEqual(ep.attrs, ("EntryPointTests",)) + self.assertEqual(ep.extras, ("x",)) + self.assertTrue(ep.load() is EntryPointTests) + self.assertEqual( + str(ep), + "foo = setuptools.tests.test_resources:EntryPointTests [x]" + ) + + def setUp(self): + self.dist = Distribution.from_filename( + "FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt','[x]'))) + + def testBasics(self): + ep = EntryPoint( + "foo", "setuptools.tests.test_resources", ["EntryPointTests"], + ["x"], self.dist + ) + self.assertfields(ep) + + def testParse(self): + s = "foo = setuptools.tests.test_resources:EntryPointTests [x]" + ep = EntryPoint.parse(s, self.dist) + self.assertfields(ep) + + ep = EntryPoint.parse("bar baz= spammity[PING]") + self.assertEqual(ep.name,"bar baz") + self.assertEqual(ep.module_name,"spammity") + self.assertEqual(ep.attrs, ()) + self.assertEqual(ep.extras, ("ping",)) + + ep = EntryPoint.parse(" fizzly = wocka:foo") + self.assertEqual(ep.name,"fizzly") + self.assertEqual(ep.module_name,"wocka") + self.assertEqual(ep.attrs, ("foo",)) + self.assertEqual(ep.extras, ()) + + def testRejects(self): + for ep in [ + "foo", "x=1=2", "x=a:b:c", "q=x/na", "fez=pish:tush-z", "x=f[a]>2", + ]: + try: EntryPoint.parse(ep) + except ValueError: pass + else: raise AssertionError("Should've been bad", ep) + + def checkSubMap(self, m): + self.assertEqual(len(m), len(self.submap_expect)) + for key, ep in iteritems(self.submap_expect): + self.assertEqual(repr(m.get(key)), repr(ep)) + + submap_expect = dict( + feature1=EntryPoint('feature1', 'somemodule', ['somefunction']), + feature2=EntryPoint('feature2', 'another.module', ['SomeClass'], ['extra1','extra2']), + feature3=EntryPoint('feature3', 'this.module', extras=['something']) + ) + submap_str = """ + # define features for blah blah + feature1 = somemodule:somefunction + feature2 = another.module:SomeClass [extra1,extra2] + feature3 = this.module [something] + """ + + def testParseList(self): + self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str)) + self.assertRaises(ValueError, EntryPoint.parse_group, "x a", "foo=bar") + self.assertRaises(ValueError, EntryPoint.parse_group, "x", + ["foo=baz", "foo=bar"]) + + def testParseMap(self): + m = EntryPoint.parse_map({'xyz':self.submap_str}) + self.checkSubMap(m['xyz']) + self.assertEqual(list(m.keys()),['xyz']) + m = EntryPoint.parse_map("[xyz]\n"+self.submap_str) + self.checkSubMap(m['xyz']) + self.assertEqual(list(m.keys()),['xyz']) + self.assertRaises(ValueError, EntryPoint.parse_map, ["[xyz]", "[xyz]"]) + self.assertRaises(ValueError, EntryPoint.parse_map, self.submap_str) + +class RequirementsTests(TestCase): + + def testBasics(self): + r = Requirement.parse("Twisted>=1.2") + self.assertEqual(str(r),"Twisted>=1.2") + self.assertEqual(repr(r),"Requirement.parse('Twisted>=1.2')") + self.assertEqual(r, Requirement("Twisted", [('>=','1.2')], ())) + self.assertEqual(r, Requirement("twisTed", [('>=','1.2')], ())) + self.assertNotEqual(r, Requirement("Twisted", [('>=','2.0')], ())) + self.assertNotEqual(r, Requirement("Zope", [('>=','1.2')], ())) + self.assertNotEqual(r, Requirement("Zope", [('>=','3.0')], ())) + self.assertNotEqual(r, Requirement.parse("Twisted[extras]>=1.2")) + + def testOrdering(self): + r1 = Requirement("Twisted", [('==','1.2c1'),('>=','1.2')], ()) + r2 = Requirement("Twisted", [('>=','1.2'),('==','1.2c1')], ()) + self.assertEqual(r1,r2) + self.assertEqual(str(r1),str(r2)) + self.assertEqual(str(r2),"Twisted==1.2c1,>=1.2") + + def testBasicContains(self): + r = Requirement("Twisted", [('>=','1.2')], ()) + foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg") + twist11 = Distribution.from_filename("Twisted-1.1.egg") + twist12 = Distribution.from_filename("Twisted-1.2.egg") + self.assertTrue(parse_version('1.2') in r) + self.assertTrue(parse_version('1.1') not in r) + self.assertTrue('1.2' in r) + self.assertTrue('1.1' not in r) + self.assertTrue(foo_dist not in r) + self.assertTrue(twist11 not in r) + self.assertTrue(twist12 in r) + + def testAdvancedContains(self): + r, = parse_requirements("Foo>=1.2,<=1.3,==1.9,>2.0,!=2.5,<3.0,==4.5") + for v in ('1.2','1.2.2','1.3','1.9','2.0.1','2.3','2.6','3.0c1','4.5'): + self.assertTrue(v in r, (v,r)) + for v in ('1.2c1','1.3.1','1.5','1.9.1','2.0','2.5','3.0','4.0'): + self.assertTrue(v not in r, (v,r)) + + def testOptionsAndHashing(self): + r1 = Requirement.parse("Twisted[foo,bar]>=1.2") + r2 = Requirement.parse("Twisted[bar,FOO]>=1.2") + r3 = Requirement.parse("Twisted[BAR,FOO]>=1.2.0") + self.assertEqual(r1,r2) + self.assertEqual(r1,r3) + self.assertEqual(r1.extras, ("foo","bar")) + self.assertEqual(r2.extras, ("bar","foo")) # extras are normalized + self.assertEqual(hash(r1), hash(r2)) + self.assertEqual( + hash(r1), hash(("twisted", ((">=",parse_version("1.2")),), + frozenset(["foo","bar"]))) + ) + + def testVersionEquality(self): + r1 = Requirement.parse("foo==0.3a2") + r2 = Requirement.parse("foo!=0.3a4") + d = Distribution.from_filename + + self.assertTrue(d("foo-0.3a4.egg") not in r1) + self.assertTrue(d("foo-0.3a1.egg") not in r1) + self.assertTrue(d("foo-0.3a4.egg") not in r2) + + self.assertTrue(d("foo-0.3a2.egg") in r1) + self.assertTrue(d("foo-0.3a2.egg") in r2) + self.assertTrue(d("foo-0.3a3.egg") in r2) + self.assertTrue(d("foo-0.3a5.egg") in r2) + + def testSetuptoolsProjectName(self): + """ + The setuptools project should implement the setuptools package. + """ + + self.assertEqual( + Requirement.parse('setuptools').project_name, 'setuptools') + # setuptools 0.7 and higher means setuptools. + self.assertEqual( + Requirement.parse('setuptools == 0.7').project_name, 'setuptools') + self.assertEqual( + Requirement.parse('setuptools == 0.7a1').project_name, 'setuptools') + self.assertEqual( + Requirement.parse('setuptools >= 0.7').project_name, 'setuptools') + + +class ParseTests(TestCase): + + def testEmptyParse(self): + self.assertEqual(list(parse_requirements('')), []) + + def testYielding(self): + for inp,out in [ + ([], []), ('x',['x']), ([[]],[]), (' x\n y', ['x','y']), + (['x\n\n','y'], ['x','y']), + ]: + self.assertEqual(list(pkg_resources.yield_lines(inp)),out) + + def testSplitting(self): + sample = """ + x + [Y] + z + + a + [b ] + # foo + c + [ d] + [q] + v + """ + self.assertEqual(list(pkg_resources.split_sections(sample)), + [(None,["x"]), ("Y",["z","a"]), ("b",["c"]), ("d",[]), ("q",["v"])] + ) + self.assertRaises(ValueError,list,pkg_resources.split_sections("[foo")) + + def testSafeName(self): + self.assertEqual(safe_name("adns-python"), "adns-python") + self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils") + self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils") + self.assertEqual(safe_name("Money$$$Maker"), "Money-Maker") + self.assertNotEqual(safe_name("peak.web"), "peak-web") + + def testSafeVersion(self): + self.assertEqual(safe_version("1.2-1"), "1.2-1") + self.assertEqual(safe_version("1.2 alpha"), "1.2.alpha") + self.assertEqual(safe_version("2.3.4 20050521"), "2.3.4.20050521") + self.assertEqual(safe_version("Money$$$Maker"), "Money-Maker") + self.assertEqual(safe_version("peak.web"), "peak.web") + + def testSimpleRequirements(self): + self.assertEqual( + list(parse_requirements('Twis-Ted>=1.2-1')), + [Requirement('Twis-Ted',[('>=','1.2-1')], ())] + ) + self.assertEqual( + list(parse_requirements('Twisted >=1.2, \ # more\n<2.0')), + [Requirement('Twisted',[('>=','1.2'),('<','2.0')], ())] + ) + self.assertEqual( + Requirement.parse("FooBar==1.99a3"), + Requirement("FooBar", [('==','1.99a3')], ()) + ) + self.assertRaises(ValueError,Requirement.parse,">=2.3") + self.assertRaises(ValueError,Requirement.parse,"x\\") + self.assertRaises(ValueError,Requirement.parse,"x==2 q") + self.assertRaises(ValueError,Requirement.parse,"X==1\nY==2") + self.assertRaises(ValueError,Requirement.parse,"#") + + def testVersionEquality(self): + def c(s1,s2): + p1, p2 = parse_version(s1),parse_version(s2) + self.assertEqual(p1,p2, (s1,s2,p1,p2)) + + c('1.2-rc1', '1.2rc1') + c('0.4', '0.4.0') + c('0.4.0.0', '0.4.0') + c('0.4.0-0', '0.4-0') + c('0pl1', '0.0pl1') + c('0pre1', '0.0c1') + c('0.0.0preview1', '0c1') + c('0.0c1', '0-rc1') + c('1.2a1', '1.2.a.1') + c('1.2...a', '1.2a') + + def testVersionOrdering(self): + def c(s1,s2): + p1, p2 = parse_version(s1),parse_version(s2) + self.assertTrue(p1 dummy-0\.1\.1", + "copying dummy", + "copying dummy\.egg-info", + "hard linking \S+ -> dummy-0\.1\.1", + "hard linking dummy", + "hard linking dummy\.egg-info", + "Writing dummy-0\.1\.1", + "creating dist", + "creating 'dist", + "Creating tar archive", + "running check", + "adding 'dummy-0\.1\.1", + "tar .+ dist/dummy-0\.1\.1\.tar dummy-0\.1\.1", + "gzip .+ dist/dummy-0\.1\.1\.tar", + "removing 'dummy-0\.1\.1' \\(and everything under it\\)", + ) + + print(" DIR: " + os.path.abspath('.')) + for line in datalines: + found = False + for pattern in possible: + if re.match(pattern, line): + print(" READ: " + line) + found = True + break + if not found: + raise AssertionError("Unexpexected: %s\n-in-\n%s" + % (line, data)) + + return data + + def test_sources(self): + self._run() + + +class TestSvn(environment.ZippedEnvironment): + + def setUp(self): + version = svn_utils.SvnInfo.get_svn_version() + if not version: # None or Empty + return + + self.base_version = tuple([int(x) for x in version.split('.')][:2]) + + if not self.base_version: + raise ValueError('No SVN tools installed') + elif self.base_version < (1, 3): + raise ValueError('Insufficient SVN Version %s' % version) + elif self.base_version >= (1, 9): + # trying the latest version + self.base_version = (1, 8) + + self.dataname = "svn%i%i_example" % self.base_version + self.datafile = os.path.join('setuptools', 'tests', + 'svn_data', self.dataname + ".zip") + super(TestSvn, self).setUp() + + @skipIf(not test_svn._svn_check, "No SVN to text, in the first place") + def test_walksvn(self): + if self.base_version >= (1, 6): + folder2 = 'third party2' + folder3 = 'third party3' + else: + folder2 = 'third_party2' + folder3 = 'third_party3' + + # TODO is this right + expected = set([ + os.path.join('a file'), + os.path.join(folder2, 'Changes.txt'), + os.path.join(folder2, 'MD5SUMS'), + os.path.join(folder2, 'README.txt'), + os.path.join(folder3, 'Changes.txt'), + os.path.join(folder3, 'MD5SUMS'), + os.path.join(folder3, 'README.txt'), + os.path.join(folder3, 'TODO.txt'), + os.path.join(folder3, 'fin'), + os.path.join('third_party', 'README.txt'), + os.path.join('folder', folder2, 'Changes.txt'), + os.path.join('folder', folder2, 'MD5SUMS'), + os.path.join('folder', folder2, 'WatashiNiYomimasu.txt'), + os.path.join('folder', folder3, 'Changes.txt'), + os.path.join('folder', folder3, 'fin'), + os.path.join('folder', folder3, 'MD5SUMS'), + os.path.join('folder', folder3, 'oops'), + os.path.join('folder', folder3, 'WatashiNiYomimasu.txt'), + os.path.join('folder', folder3, 'ZuMachen.txt'), + os.path.join('folder', 'third_party', 'WatashiNiYomimasu.txt'), + os.path.join('folder', 'lalala.txt'), + os.path.join('folder', 'quest.txt'), + # The example will have a deleted file + # (or should) but shouldn't return it + ]) + self.assertEqual(set(x for x in walk_revctrl()), expected) + + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/lib/python3.4/site-packages/setuptools/tests/test_svn.py b/lib/python3.4/site-packages/setuptools/tests/test_svn.py new file mode 100644 index 0000000..3340036 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/tests/test_svn.py @@ -0,0 +1,245 @@ +# -*- coding: utf-8 -*- +"""svn tests""" + +import io +import os +import subprocess +import sys +import unittest +from setuptools.tests import environment +from setuptools.compat import unicode, unichr + +from setuptools import svn_utils +from setuptools.tests.py26compat import skipIf + + +def _do_svn_check(): + try: + subprocess.check_call(["svn", "--version"], + shell=(sys.platform == 'win32')) + return True + except (OSError, subprocess.CalledProcessError): + return False +_svn_check = _do_svn_check() + + +class TestSvnVersion(unittest.TestCase): + + def test_no_svn_found(self): + path_variable = None + for env in os.environ: + if env.lower() == 'path': + path_variable = env + + if path_variable is None: + try: + self.skipTest('Cannot figure out how to modify path') + except AttributeError: # PY26 doesn't have this + return + + old_path = os.environ[path_variable] + os.environ[path_variable] = '' + try: + version = svn_utils.SvnInfo.get_svn_version() + self.assertEqual(version, '') + finally: + os.environ[path_variable] = old_path + + @skipIf(not _svn_check, "No SVN to text, in the first place") + def test_svn_should_exist(self): + version = svn_utils.SvnInfo.get_svn_version() + self.assertNotEqual(version, '') + +def _read_utf8_file(path): + fileobj = None + try: + fileobj = io.open(path, 'r', encoding='utf-8') + data = fileobj.read() + return data + finally: + if fileobj: + fileobj.close() + + +class ParserInfoXML(unittest.TestCase): + + def parse_tester(self, svn_name, ext_spaces): + path = os.path.join('setuptools', 'tests', + 'svn_data', svn_name + '_info.xml') + #Remember these are pre-generated to test XML parsing + # so these paths might not valid on your system + example_base = "%s_example" % svn_name + + data = _read_utf8_file(path) + + expected = set([ + ("\\".join((example_base, 'a file')), 'file'), + ("\\".join((example_base, 'folder')), 'dir'), + ("\\".join((example_base, 'folder', 'lalala.txt')), 'file'), + ("\\".join((example_base, 'folder', 'quest.txt')), 'file'), + ]) + self.assertEqual(set(x for x in svn_utils.parse_dir_entries(data)), + expected) + + def test_svn13(self): + self.parse_tester('svn13', False) + + def test_svn14(self): + self.parse_tester('svn14', False) + + def test_svn15(self): + self.parse_tester('svn15', False) + + def test_svn16(self): + self.parse_tester('svn16', True) + + def test_svn17(self): + self.parse_tester('svn17', True) + + def test_svn18(self): + self.parse_tester('svn18', True) + +class ParserExternalXML(unittest.TestCase): + + def parse_tester(self, svn_name, ext_spaces): + path = os.path.join('setuptools', 'tests', + 'svn_data', svn_name + '_ext_list.xml') + example_base = svn_name + '_example' + data = _read_utf8_file(path) + + if ext_spaces: + folder2 = 'third party2' + folder3 = 'third party3' + else: + folder2 = 'third_party2' + folder3 = 'third_party3' + + expected = set([ + os.sep.join((example_base, folder2)), + os.sep.join((example_base, folder3)), + # folder is third_party大介 + os.sep.join((example_base, + unicode('third_party') + + unichr(0x5927) + unichr(0x4ecb))), + os.sep.join((example_base, 'folder', folder2)), + os.sep.join((example_base, 'folder', folder3)), + os.sep.join((example_base, 'folder', + unicode('third_party') + + unichr(0x5927) + unichr(0x4ecb))), + ]) + + expected = set(os.path.normpath(x) for x in expected) + dir_base = os.sep.join(('C:', 'development', 'svn_example')) + self.assertEqual(set(x for x + in svn_utils.parse_externals_xml(data, dir_base)), expected) + + def test_svn15(self): + self.parse_tester('svn15', False) + + def test_svn16(self): + self.parse_tester('svn16', True) + + def test_svn17(self): + self.parse_tester('svn17', True) + + def test_svn18(self): + self.parse_tester('svn18', True) + + +class ParseExternal(unittest.TestCase): + + def parse_tester(self, svn_name, ext_spaces): + path = os.path.join('setuptools', 'tests', + 'svn_data', svn_name + '_ext_list.txt') + data = _read_utf8_file(path) + + if ext_spaces: + expected = set(['third party2', 'third party3', + 'third party3b', 'third_party']) + else: + expected = set(['third_party2', 'third_party3', 'third_party']) + + self.assertEqual(set(x for x in svn_utils.parse_external_prop(data)), + expected) + + def test_svn13(self): + self.parse_tester('svn13', False) + + def test_svn14(self): + self.parse_tester('svn14', False) + + def test_svn15(self): + self.parse_tester('svn15', False) + + def test_svn16(self): + self.parse_tester('svn16', True) + + def test_svn17(self): + self.parse_tester('svn17', True) + + def test_svn18(self): + self.parse_tester('svn18', True) + + +class TestSvn(environment.ZippedEnvironment): + + def setUp(self): + version = svn_utils.SvnInfo.get_svn_version() + if not version: # empty or null + self.dataname = None + self.datafile = None + return + + self.base_version = tuple([int(x) for x in version.split('.')[:2]]) + + if self.base_version < (1,3): + raise ValueError('Insufficient SVN Version %s' % version) + elif self.base_version >= (1,9): + #trying the latest version + self.base_version = (1,8) + + self.dataname = "svn%i%i_example" % self.base_version + self.datafile = os.path.join('setuptools', 'tests', + 'svn_data', self.dataname + ".zip") + super(TestSvn, self).setUp() + + @skipIf(not _svn_check, "No SVN to text, in the first place") + def test_revision(self): + rev = svn_utils.SvnInfo.load('.').get_revision() + self.assertEqual(rev, 6) + + @skipIf(not _svn_check, "No SVN to text, in the first place") + def test_entries(self): + expected = set([ + (os.path.join('a file'), 'file'), + (os.path.join('folder'), 'dir'), + (os.path.join('folder', 'lalala.txt'), 'file'), + (os.path.join('folder', 'quest.txt'), 'file'), + #The example will have a deleted file (or should) + #but shouldn't return it + ]) + info = svn_utils.SvnInfo.load('.') + self.assertEqual(set(x for x in info.entries), expected) + + @skipIf(not _svn_check, "No SVN to text, in the first place") + def test_externals(self): + if self.base_version >= (1,6): + folder2 = 'third party2' + folder3 = 'third party3' + else: + folder2 = 'third_party2' + folder3 = 'third_party3' + + expected = set([ + os.path.join(folder2), + os.path.join(folder3), + os.path.join('third_party'), + os.path.join('folder', folder2), + os.path.join('folder', folder3), + os.path.join('folder', 'third_party'), + ]) + info = svn_utils.SvnInfo.load('.') + self.assertEqual(set([x for x in info.externals]), expected) + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/lib/python3.4/site-packages/setuptools/tests/test_test.py b/lib/python3.4/site-packages/setuptools/tests/test_test.py new file mode 100644 index 0000000..df92085 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/tests/test_test.py @@ -0,0 +1,126 @@ +# -*- coding: UTF-8 -*- + +"""develop tests +""" +import os +import shutil +import site +import sys +import tempfile +import unittest + +from distutils.errors import DistutilsError +from setuptools.compat import StringIO, PY2 +from setuptools.command.test import test +from setuptools.command import easy_install as easy_install_pkg +from setuptools.dist import Distribution + +SETUP_PY = """\ +from setuptools import setup + +setup(name='foo', + packages=['name', 'name.space', 'name.space.tests'], + namespace_packages=['name'], + test_suite='name.space.tests.test_suite', +) +""" + +NS_INIT = """# -*- coding: Latin-1 -*- +# Söme Arbiträry Ünicode to test Issüé 310 +try: + __import__('pkg_resources').declare_namespace(__name__) +except ImportError: + from pkgutil import extend_path + __path__ = extend_path(__path__, __name__) +""" +# Make sure this is Latin-1 binary, before writing: +if PY2: + NS_INIT = NS_INIT.decode('UTF-8') +NS_INIT = NS_INIT.encode('Latin-1') + +TEST_PY = """import unittest + +class TestTest(unittest.TestCase): + def test_test(self): + print "Foo" # Should fail under Python 3 unless 2to3 is used + +test_suite = unittest.makeSuite(TestTest) +""" + +class TestTestTest(unittest.TestCase): + + def setUp(self): + if sys.version < "2.6" or hasattr(sys, 'real_prefix'): + return + + # Directory structure + self.dir = tempfile.mkdtemp() + os.mkdir(os.path.join(self.dir, 'name')) + os.mkdir(os.path.join(self.dir, 'name', 'space')) + os.mkdir(os.path.join(self.dir, 'name', 'space', 'tests')) + # setup.py + setup = os.path.join(self.dir, 'setup.py') + f = open(setup, 'wt') + f.write(SETUP_PY) + f.close() + self.old_cwd = os.getcwd() + # name/__init__.py + init = os.path.join(self.dir, 'name', '__init__.py') + f = open(init, 'wb') + f.write(NS_INIT) + f.close() + # name/space/__init__.py + init = os.path.join(self.dir, 'name', 'space', '__init__.py') + f = open(init, 'wt') + f.write('#empty\n') + f.close() + # name/space/tests/__init__.py + init = os.path.join(self.dir, 'name', 'space', 'tests', '__init__.py') + f = open(init, 'wt') + f.write(TEST_PY) + f.close() + + os.chdir(self.dir) + self.old_base = site.USER_BASE + site.USER_BASE = tempfile.mkdtemp() + self.old_site = site.USER_SITE + site.USER_SITE = tempfile.mkdtemp() + + def tearDown(self): + if sys.version < "2.6" or hasattr(sys, 'real_prefix'): + return + + os.chdir(self.old_cwd) + shutil.rmtree(self.dir) + shutil.rmtree(site.USER_BASE) + shutil.rmtree(site.USER_SITE) + site.USER_BASE = self.old_base + site.USER_SITE = self.old_site + + def test_test(self): + if sys.version < "2.6" or hasattr(sys, 'real_prefix'): + return + + dist = Distribution(dict( + name='foo', + packages=['name', 'name.space', 'name.space.tests'], + namespace_packages=['name'], + test_suite='name.space.tests.test_suite', + use_2to3=True, + )) + dist.script_name = 'setup.py' + cmd = test(dist) + cmd.user = 1 + cmd.ensure_finalized() + cmd.install_dir = site.USER_SITE + cmd.user = 1 + old_stdout = sys.stdout + sys.stdout = StringIO() + try: + try: # try/except/finally doesn't work in Python 2.4, so we need nested try-statements. + cmd.run() + except SystemExit: # The test runner calls sys.exit, stop that making an error. + pass + finally: + sys.stdout = old_stdout + diff --git a/lib/python3.4/site-packages/setuptools/tests/test_upload_docs.py b/lib/python3.4/site-packages/setuptools/tests/test_upload_docs.py new file mode 100644 index 0000000..769f16c --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/tests/test_upload_docs.py @@ -0,0 +1,72 @@ +"""build_ext tests +""" +import sys, os, shutil, tempfile, unittest, site, zipfile +from setuptools.command.upload_docs import upload_docs +from setuptools.dist import Distribution + +SETUP_PY = """\ +from setuptools import setup + +setup(name='foo') +""" + +class TestUploadDocsTest(unittest.TestCase): + def setUp(self): + self.dir = tempfile.mkdtemp() + setup = os.path.join(self.dir, 'setup.py') + f = open(setup, 'w') + f.write(SETUP_PY) + f.close() + self.old_cwd = os.getcwd() + os.chdir(self.dir) + + self.upload_dir = os.path.join(self.dir, 'build') + os.mkdir(self.upload_dir) + + # A test document. + f = open(os.path.join(self.upload_dir, 'index.html'), 'w') + f.write("Hello world.") + f.close() + + # An empty folder. + os.mkdir(os.path.join(self.upload_dir, 'empty')) + + if sys.version >= "2.6": + self.old_base = site.USER_BASE + site.USER_BASE = upload_docs.USER_BASE = tempfile.mkdtemp() + self.old_site = site.USER_SITE + site.USER_SITE = upload_docs.USER_SITE = tempfile.mkdtemp() + + def tearDown(self): + os.chdir(self.old_cwd) + shutil.rmtree(self.dir) + if sys.version >= "2.6": + shutil.rmtree(site.USER_BASE) + shutil.rmtree(site.USER_SITE) + site.USER_BASE = self.old_base + site.USER_SITE = self.old_site + + def test_create_zipfile(self): + # Test to make sure zipfile creation handles common cases. + # This explicitly includes a folder containing an empty folder. + + dist = Distribution() + + cmd = upload_docs(dist) + cmd.upload_dir = self.upload_dir + cmd.target_dir = self.upload_dir + tmp_dir = tempfile.mkdtemp() + tmp_file = os.path.join(tmp_dir, 'foo.zip') + try: + zip_file = cmd.create_zipfile(tmp_file) + + assert zipfile.is_zipfile(tmp_file) + + zip_file = zipfile.ZipFile(tmp_file) # woh... + + assert zip_file.namelist() == ['index.html'] + + zip_file.close() + finally: + shutil.rmtree(tmp_dir) + diff --git a/lib/python3.4/site-packages/setuptools/unicode_utils.py b/lib/python3.4/site-packages/setuptools/unicode_utils.py new file mode 100644 index 0000000..d2de941 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/unicode_utils.py @@ -0,0 +1,41 @@ +import unicodedata +import sys +from setuptools.compat import unicode as decoded_string + + +# HFS Plus uses decomposed UTF-8 +def decompose(path): + if isinstance(path, decoded_string): + return unicodedata.normalize('NFD', path) + try: + path = path.decode('utf-8') + path = unicodedata.normalize('NFD', path) + path = path.encode('utf-8') + except UnicodeError: + pass # Not UTF-8 + return path + + +def filesys_decode(path): + """ + Ensure that the given path is decoded, + NONE when no expected encoding works + """ + + fs_enc = sys.getfilesystemencoding() + if isinstance(path, decoded_string): + return path + + for enc in (fs_enc, "utf-8"): + try: + return path.decode(enc) + except UnicodeDecodeError: + continue + + +def try_encode(string, enc): + "turn unicode encoding into a functional routine" + try: + return string.encode(enc) + except UnicodeEncodeError: + return None diff --git a/lib/python3.4/site-packages/setuptools/version.py b/lib/python3.4/site-packages/setuptools/version.py new file mode 100644 index 0000000..45e4cae --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/version.py @@ -0,0 +1 @@ +__version__ = '5.5.1' diff --git a/lib/python3.4/site-packages/six-1.10.0.dist-info/DESCRIPTION.rst b/lib/python3.4/site-packages/six-1.10.0.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..2e2607d --- /dev/null +++ b/lib/python3.4/site-packages/six-1.10.0.dist-info/DESCRIPTION.rst @@ -0,0 +1,18 @@ +Six is a Python 2 and 3 compatibility library. It provides utility functions +for smoothing over the differences between the Python versions with the goal of +writing Python code that is compatible on both Python versions. See the +documentation for more information on what is provided. + +Six supports every Python version since 2.6. It is contained in only one Python +file, so it can be easily copied into your project. (The copyright and license +notice must be retained.) + +Online documentation is at https://pythonhosted.org/six/. + +Bugs can be reported to https://bitbucket.org/gutworth/six. The code can also +be found there. + +For questions about six or porting in general, email the python-porting mailing +list: https://mail.python.org/mailman/listinfo/python-porting + + diff --git a/lib/python3.4/site-packages/six-1.10.0.dist-info/METADATA b/lib/python3.4/site-packages/six-1.10.0.dist-info/METADATA new file mode 100644 index 0000000..4fc3d07 --- /dev/null +++ b/lib/python3.4/site-packages/six-1.10.0.dist-info/METADATA @@ -0,0 +1,34 @@ +Metadata-Version: 2.0 +Name: six +Version: 1.10.0 +Summary: Python 2 and 3 compatibility utilities +Home-page: http://pypi.python.org/pypi/six/ +Author: Benjamin Peterson +Author-email: benjamin@python.org +License: MIT +Platform: UNKNOWN +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 3 +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Utilities + +Six is a Python 2 and 3 compatibility library. It provides utility functions +for smoothing over the differences between the Python versions with the goal of +writing Python code that is compatible on both Python versions. See the +documentation for more information on what is provided. + +Six supports every Python version since 2.6. It is contained in only one Python +file, so it can be easily copied into your project. (The copyright and license +notice must be retained.) + +Online documentation is at https://pythonhosted.org/six/. + +Bugs can be reported to https://bitbucket.org/gutworth/six. The code can also +be found there. + +For questions about six or porting in general, email the python-porting mailing +list: https://mail.python.org/mailman/listinfo/python-porting + + diff --git a/lib/python3.4/site-packages/six-1.10.0.dist-info/RECORD b/lib/python3.4/site-packages/six-1.10.0.dist-info/RECORD new file mode 100644 index 0000000..6350c4e --- /dev/null +++ b/lib/python3.4/site-packages/six-1.10.0.dist-info/RECORD @@ -0,0 +1,8 @@ +six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098 +six-1.10.0.dist-info/DESCRIPTION.rst,sha256=QWBtSTT2zzabwJv1NQbTfClSX13m-Qc6tqU4TRL1RLs,774 +six-1.10.0.dist-info/METADATA,sha256=5HceJsUnHof2IRamlCKO2MwNjve1eSP4rLzVQDfwpCQ,1283 +six-1.10.0.dist-info/RECORD,, +six-1.10.0.dist-info/WHEEL,sha256=GrqQvamwgBV4nLoJe0vhYRSWzWsx7xjlt74FT0SWYfE,110 +six-1.10.0.dist-info/metadata.json,sha256=jtOeeTBubYDChl_5Ql5ZPlKoHgg6rdqRIjOz1e5Ek2U,658 +six-1.10.0.dist-info/top_level.txt,sha256=_iVH_iYEtEXnD8nYGQYpYFUvkUW9sEO1GYbkeKSAais,4 +__pycache__/six.cpython-34.pyc,, diff --git a/lib/python3.4/site-packages/six-1.10.0.dist-info/WHEEL b/lib/python3.4/site-packages/six-1.10.0.dist-info/WHEEL new file mode 100644 index 0000000..0de529b --- /dev/null +++ b/lib/python3.4/site-packages/six-1.10.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.26.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/lib/python3.4/site-packages/six-1.10.0.dist-info/metadata.json b/lib/python3.4/site-packages/six-1.10.0.dist-info/metadata.json new file mode 100644 index 0000000..21f9f6c --- /dev/null +++ b/lib/python3.4/site-packages/six-1.10.0.dist-info/metadata.json @@ -0,0 +1 @@ +{"generator": "bdist_wheel (0.26.0)", "summary": "Python 2 and 3 compatibility utilities", "classifiers": ["Programming Language :: Python :: 2", "Programming Language :: Python :: 3", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: Software Development :: Libraries", "Topic :: Utilities"], "extensions": {"python.details": {"project_urls": {"Home": "http://pypi.python.org/pypi/six/"}, "contacts": [{"email": "benjamin@python.org", "name": "Benjamin Peterson", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}}}, "license": "MIT", "metadata_version": "2.0", "name": "six", "version": "1.10.0"} \ No newline at end of file diff --git a/lib/python3.4/site-packages/six-1.10.0.dist-info/top_level.txt b/lib/python3.4/site-packages/six-1.10.0.dist-info/top_level.txt new file mode 100644 index 0000000..ffe2fce --- /dev/null +++ b/lib/python3.4/site-packages/six-1.10.0.dist-info/top_level.txt @@ -0,0 +1 @@ +six diff --git a/lib/python3.4/site-packages/six.py b/lib/python3.4/site-packages/six.py new file mode 100644 index 0000000..190c023 --- /dev/null +++ b/lib/python3.4/site-packages/six.py @@ -0,0 +1,868 @@ +"""Utilities for writing code that runs on Python 2 and 3""" + +# Copyright (c) 2010-2015 Benjamin Peterson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +from __future__ import absolute_import + +import functools +import itertools +import operator +import sys +import types + +__author__ = "Benjamin Peterson " +__version__ = "1.10.0" + + +# Useful for very coarse version differentiation. +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0:2] >= (3, 4) + +if PY3: + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + + def __len__(self): + return 1 << 31 + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) # Invokes __set__. + try: + # This is a bit ugly, but it avoids running this again by + # removing this descriptor. + delattr(obj.__class__, self.name) + except AttributeError: + pass + return result + + +class MovedModule(_LazyDescr): + + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + def __getattr__(self, attr): + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = ["__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + # Subclasses should override this + _moved_attributes = [] + + +class MovedAttribute(_LazyDescr): + + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + +class _SixMetaPathImporter(object): + + """ + A meta path importer to import six.moves and its submodules. + + This class implements a PEP302 finder and loader. It should be compatible + with Python 2.5 and all existing versions of Python3 + """ + + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + return None + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + # in case of a reload + return sys.modules[fullname] + except KeyError: + pass + mod = self.__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self.__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self.__get_module(fullname) # eventually raises ImportError + return None + get_source = get_code # same as get_code + +_importer = _SixMetaPathImporter(__name__) + + +class _MovedItems(_LazyModule): + + """Lazy loading of moved objects""" + __path__ = [] # mark as package + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), + MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), + MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections"), + MovedAttribute("UserList", "UserList", "collections"), + MovedAttribute("UserString", "UserString", "collections"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", + "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", + "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", + "tkinter.simpledialog"), + MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), + MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), + MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), +] +# Add windows specific modules. +if sys.platform == "win32": + _moved_attributes += [ + MovedModule("winreg", "_winreg"), + ] + +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + _importer._add_module(attr, "moves." + attr.name) +del attr + +_MovedItems._moved_attributes = _moved_attributes + +moves = _MovedItems(__name__ + ".moves") +_importer._add_module(moves, "moves") + + +class Module_six_moves_urllib_parse(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_parse""" + + +_urllib_parse_moved_attributes = [ + MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), + MovedAttribute("parse_qs", "urlparse", "urllib.parse"), + MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), + MovedAttribute("urldefrag", "urlparse", "urllib.parse"), + MovedAttribute("urljoin", "urlparse", "urllib.parse"), + MovedAttribute("urlparse", "urlparse", "urllib.parse"), + MovedAttribute("urlsplit", "urlparse", "urllib.parse"), + MovedAttribute("urlunparse", "urlparse", "urllib.parse"), + MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), + MovedAttribute("quote", "urllib", "urllib.parse"), + MovedAttribute("quote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote", "urllib", "urllib.parse"), + MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse"), +] +for attr in _urllib_parse_moved_attributes: + setattr(Module_six_moves_urllib_parse, attr.name, attr) +del attr + +Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes + +_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", "moves.urllib.parse") + + +class Module_six_moves_urllib_error(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_error""" + + +_urllib_error_moved_attributes = [ + MovedAttribute("URLError", "urllib2", "urllib.error"), + MovedAttribute("HTTPError", "urllib2", "urllib.error"), + MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), +] +for attr in _urllib_error_moved_attributes: + setattr(Module_six_moves_urllib_error, attr.name, attr) +del attr + +Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes + +_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", "moves.urllib.error") + + +class Module_six_moves_urllib_request(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_request""" + + +_urllib_request_moved_attributes = [ + MovedAttribute("urlopen", "urllib2", "urllib.request"), + MovedAttribute("install_opener", "urllib2", "urllib.request"), + MovedAttribute("build_opener", "urllib2", "urllib.request"), + MovedAttribute("pathname2url", "urllib", "urllib.request"), + MovedAttribute("url2pathname", "urllib", "urllib.request"), + MovedAttribute("getproxies", "urllib", "urllib.request"), + MovedAttribute("Request", "urllib2", "urllib.request"), + MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), + MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), + MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), + MovedAttribute("BaseHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), + MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), + MovedAttribute("FileHandler", "urllib2", "urllib.request"), + MovedAttribute("FTPHandler", "urllib2", "urllib.request"), + MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), + MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), + MovedAttribute("urlretrieve", "urllib", "urllib.request"), + MovedAttribute("urlcleanup", "urllib", "urllib.request"), + MovedAttribute("URLopener", "urllib", "urllib.request"), + MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request"), +] +for attr in _urllib_request_moved_attributes: + setattr(Module_six_moves_urllib_request, attr.name, attr) +del attr + +Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes + +_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", "moves.urllib.request") + + +class Module_six_moves_urllib_response(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_response""" + + +_urllib_response_moved_attributes = [ + MovedAttribute("addbase", "urllib", "urllib.response"), + MovedAttribute("addclosehook", "urllib", "urllib.response"), + MovedAttribute("addinfo", "urllib", "urllib.response"), + MovedAttribute("addinfourl", "urllib", "urllib.response"), +] +for attr in _urllib_response_moved_attributes: + setattr(Module_six_moves_urllib_response, attr.name, attr) +del attr + +Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes + +_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", "moves.urllib.response") + + +class Module_six_moves_urllib_robotparser(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_robotparser""" + + +_urllib_robotparser_moved_attributes = [ + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), +] +for attr in _urllib_robotparser_moved_attributes: + setattr(Module_six_moves_urllib_robotparser, attr.name, attr) +del attr + +Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes + +_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", "moves.urllib.robotparser") + + +class Module_six_moves_urllib(types.ModuleType): + + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" + __path__ = [] # mark as package + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") + + def __dir__(self): + return ['parse', 'error', 'request', 'response', 'robotparser'] + +_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), + "moves.urllib") + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" + + +try: + advance_iterator = next +except NameError: + def advance_iterator(it): + return it.next() +next = advance_iterator + + +try: + callable = callable +except NameError: + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + + +if PY3: + def get_unbound_function(unbound): + return unbound + + create_bound_method = types.MethodType + + def create_unbound_method(func, cls): + return func + + Iterator = object +else: + def get_unbound_function(unbound): + return unbound.im_func + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) + + def create_unbound_method(func, cls): + return types.MethodType(func, None, cls) + + class Iterator(object): + + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc(get_unbound_function, + """Get the function out of a possibly unbound function""") + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_closure = operator.attrgetter(_func_closure) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) +get_function_globals = operator.attrgetter(_func_globals) + + +if PY3: + def iterkeys(d, **kw): + return iter(d.keys(**kw)) + + def itervalues(d, **kw): + return iter(d.values(**kw)) + + def iteritems(d, **kw): + return iter(d.items(**kw)) + + def iterlists(d, **kw): + return iter(d.lists(**kw)) + + viewkeys = operator.methodcaller("keys") + + viewvalues = operator.methodcaller("values") + + viewitems = operator.methodcaller("items") +else: + def iterkeys(d, **kw): + return d.iterkeys(**kw) + + def itervalues(d, **kw): + return d.itervalues(**kw) + + def iteritems(d, **kw): + return d.iteritems(**kw) + + def iterlists(d, **kw): + return d.iterlists(**kw) + + viewkeys = operator.methodcaller("viewkeys") + + viewvalues = operator.methodcaller("viewvalues") + + viewitems = operator.methodcaller("viewitems") + +_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") +_add_doc(itervalues, "Return an iterator over the values of a dictionary.") +_add_doc(iteritems, + "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc(iterlists, + "Return an iterator over the (key, [values]) pairs of a dictionary.") + + +if PY3: + def b(s): + return s.encode("latin-1") + + def u(s): + return s + unichr = chr + import struct + int2byte = struct.Struct(">B").pack + del struct + byte2int = operator.itemgetter(0) + indexbytes = operator.getitem + iterbytes = iter + import io + StringIO = io.StringIO + BytesIO = io.BytesIO + _assertCountEqual = "assertCountEqual" + if sys.version_info[1] <= 1: + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + else: + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" +else: + def b(s): + return s + # Workaround for standalone backslash + + def u(s): + return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") + unichr = unichr + int2byte = chr + + def byte2int(bs): + return ord(bs[0]) + + def indexbytes(buf, i): + return ord(buf[i]) + iterbytes = functools.partial(itertools.imap, ord) + import StringIO + StringIO = BytesIO = StringIO.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +def assertCountEqual(self, *args, **kwargs): + return getattr(self, _assertCountEqual)(*args, **kwargs) + + +def assertRaisesRegex(self, *args, **kwargs): + return getattr(self, _assertRaisesRegex)(*args, **kwargs) + + +def assertRegex(self, *args, **kwargs): + return getattr(self, _assertRegex)(*args, **kwargs) + + +if PY3: + exec_ = getattr(moves.builtins, "exec") + + def reraise(tp, value, tb=None): + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + +else: + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + + exec_("""def reraise(tp, value, tb=None): + raise tp, value, tb +""") + + +if sys.version_info[:2] == (3, 2): + exec_("""def raise_from(value, from_value): + if from_value is None: + raise value + raise value from from_value +""") +elif sys.version_info[:2] > (3, 2): + exec_("""def raise_from(value, from_value): + raise value from from_value +""") +else: + def raise_from(value, from_value): + raise value + + +print_ = getattr(moves.builtins, "print", None) +if print_ is None: + def print_(*args, **kwargs): + """The new-style print function for Python 2.4 and 2.5.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + + def write(data): + if not isinstance(data, basestring): + data = str(data) + # If the file has an encoding, encode unicode with it. + if (isinstance(fp, file) and + isinstance(data, unicode) and + fp.encoding is not None): + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) + fp.write(data) + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) +if sys.version_info[:2] < (3, 3): + _print = print_ + + def print_(*args, **kwargs): + fp = kwargs.get("file", sys.stdout) + flush = kwargs.pop("flush", False) + _print(*args, **kwargs) + if flush and fp is not None: + fp.flush() + +_add_doc(reraise, """Reraise an exception.""") + +if sys.version_info[0:2] < (3, 4): + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + def wrapper(f): + f = functools.wraps(wrapped, assigned, updated)(f) + f.__wrapped__ = wrapped + return f + return wrapper +else: + wraps = functools.wraps + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(meta): + + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + return type.__new__(metaclass, 'temporary_class', (), {}) + + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + def wrapper(cls): + orig_vars = cls.__dict__.copy() + slots = orig_vars.get('__slots__') + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) + orig_vars.pop('__dict__', None) + orig_vars.pop('__weakref__', None) + return metaclass(cls.__name__, cls.__bases__, orig_vars) + return wrapper + + +def python_2_unicode_compatible(klass): + """ + A decorator that defines __unicode__ and __str__ methods under Python 2. + Under Python 3 it does nothing. + + To support Python 2 and 3 with a single code base, define a __str__ method + returning text and apply this decorator to the class. + """ + if PY2: + if '__str__' not in klass.__dict__: + raise ValueError("@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % + klass.__name__) + klass.__unicode__ = klass.__str__ + klass.__str__ = lambda self: self.__unicode__().encode('utf-8') + return klass + + +# Complete the moves implementation. +# This code is at the end of this module to speed up module loading. +# Turn this module into a package. +__path__ = [] # required for PEP 302 and PEP 451 +__package__ = __name__ # see PEP 366 @ReservedAssignment +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable +# Remove other six meta path importers, since they cause problems. This can +# happen if six is removed from sys.modules and then reloaded. (Setuptools does +# this for some reason.) +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + # Here's some real nastiness: Another "instance" of the six module might + # be floating around. Therefore, we can't use isinstance() to check for + # the six meta path importer, since the other six instance will have + # inserted an importer with different class. + if (type(importer).__name__ == "_SixMetaPathImporter" and + importer.name == __name__): + del sys.meta_path[i] + break + del i, importer +# Finally, add the importer to the meta path import hook. +sys.meta_path.append(_importer) diff --git a/lib/python3.4/site-packages/sqlalchemy/__init__.py b/lib/python3.4/site-packages/sqlalchemy/__init__.py new file mode 100644 index 0000000..3008120 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/__init__.py @@ -0,0 +1,138 @@ +# sqlalchemy/__init__.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + + +from .sql import ( + alias, + and_, + asc, + between, + bindparam, + case, + cast, + collate, + column, + delete, + desc, + distinct, + except_, + except_all, + exists, + extract, + false, + func, + funcfilter, + insert, + intersect, + intersect_all, + join, + literal, + literal_column, + modifier, + not_, + null, + or_, + outerjoin, + outparam, + over, + select, + subquery, + table, + text, + true, + tuple_, + type_coerce, + union, + union_all, + update, + ) + +from .types import ( + BIGINT, + BINARY, + BLOB, + BOOLEAN, + BigInteger, + Binary, + Boolean, + CHAR, + CLOB, + DATE, + DATETIME, + DECIMAL, + Date, + DateTime, + Enum, + FLOAT, + Float, + INT, + INTEGER, + Integer, + Interval, + LargeBinary, + NCHAR, + NVARCHAR, + NUMERIC, + Numeric, + PickleType, + REAL, + SMALLINT, + SmallInteger, + String, + TEXT, + TIME, + TIMESTAMP, + Text, + Time, + TypeDecorator, + Unicode, + UnicodeText, + VARBINARY, + VARCHAR, + ) + + +from .schema import ( + CheckConstraint, + Column, + ColumnDefault, + Constraint, + DefaultClause, + FetchedValue, + ForeignKey, + ForeignKeyConstraint, + Index, + MetaData, + PassiveDefault, + PrimaryKeyConstraint, + Sequence, + Table, + ThreadLocalMetaData, + UniqueConstraint, + DDL, +) + + +from .inspection import inspect +from .engine import create_engine, engine_from_config + +__version__ = '1.0.12' + + +def __go(lcls): + global __all__ + + from . import events + from . import util as _sa_util + + import inspect as _inspect + + __all__ = sorted(name for name, obj in lcls.items() + if not (name.startswith('_') or _inspect.ismodule(obj))) + + _sa_util.dependencies.resolve_all("sqlalchemy") +__go(locals()) diff --git a/lib/python3.4/site-packages/sqlalchemy/connectors/__init__.py b/lib/python3.4/site-packages/sqlalchemy/connectors/__init__.py new file mode 100644 index 0000000..d72c390 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/connectors/__init__.py @@ -0,0 +1,10 @@ +# connectors/__init__.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + + +class Connector(object): + pass diff --git a/lib/python3.4/site-packages/sqlalchemy/connectors/mxodbc.py b/lib/python3.4/site-packages/sqlalchemy/connectors/mxodbc.py new file mode 100644 index 0000000..9fc0ce6 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/connectors/mxodbc.py @@ -0,0 +1,150 @@ +# connectors/mxodbc.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +Provide an SQLALchemy connector for the eGenix mxODBC commercial +Python adapter for ODBC. This is not a free product, but eGenix +provides SQLAlchemy with a license for use in continuous integration +testing. + +This has been tested for use with mxODBC 3.1.2 on SQL Server 2005 +and 2008, using the SQL Server Native driver. However, it is +possible for this to be used on other database platforms. + +For more info on mxODBC, see http://www.egenix.com/ + +""" + +import sys +import re +import warnings + +from . import Connector + + +class MxODBCConnector(Connector): + driver = 'mxodbc' + + supports_sane_multi_rowcount = False + supports_unicode_statements = True + supports_unicode_binds = True + + supports_native_decimal = True + + @classmethod + def dbapi(cls): + # this classmethod will normally be replaced by an instance + # attribute of the same name, so this is normally only called once. + cls._load_mx_exceptions() + platform = sys.platform + if platform == 'win32': + from mx.ODBC import Windows as module + # this can be the string "linux2", and possibly others + elif 'linux' in platform: + from mx.ODBC import unixODBC as module + elif platform == 'darwin': + from mx.ODBC import iODBC as module + else: + raise ImportError("Unrecognized platform for mxODBC import") + return module + + @classmethod + def _load_mx_exceptions(cls): + """ Import mxODBC exception classes into the module namespace, + as if they had been imported normally. This is done here + to avoid requiring all SQLAlchemy users to install mxODBC. + """ + global InterfaceError, ProgrammingError + from mx.ODBC import InterfaceError + from mx.ODBC import ProgrammingError + + def on_connect(self): + def connect(conn): + conn.stringformat = self.dbapi.MIXED_STRINGFORMAT + conn.datetimeformat = self.dbapi.PYDATETIME_DATETIMEFORMAT + conn.decimalformat = self.dbapi.DECIMAL_DECIMALFORMAT + conn.errorhandler = self._error_handler() + return connect + + def _error_handler(self): + """ Return a handler that adjusts mxODBC's raised Warnings to + emit Python standard warnings. + """ + from mx.ODBC.Error import Warning as MxOdbcWarning + + def error_handler(connection, cursor, errorclass, errorvalue): + if issubclass(errorclass, MxOdbcWarning): + errorclass.__bases__ = (Warning,) + warnings.warn(message=str(errorvalue), + category=errorclass, + stacklevel=2) + else: + raise errorclass(errorvalue) + return error_handler + + def create_connect_args(self, url): + """ Return a tuple of *args,**kwargs for creating a connection. + + The mxODBC 3.x connection constructor looks like this: + + connect(dsn, user='', password='', + clear_auto_commit=1, errorhandler=None) + + This method translates the values in the provided uri + into args and kwargs needed to instantiate an mxODBC Connection. + + The arg 'errorhandler' is not used by SQLAlchemy and will + not be populated. + + """ + opts = url.translate_connect_args(username='user') + opts.update(url.query) + args = opts.pop('host') + opts.pop('port', None) + opts.pop('database', None) + return (args,), opts + + def is_disconnect(self, e, connection, cursor): + # TODO: eGenix recommends checking connection.closed here + # Does that detect dropped connections ? + if isinstance(e, self.dbapi.ProgrammingError): + return "connection already closed" in str(e) + elif isinstance(e, self.dbapi.Error): + return '[08S01]' in str(e) + else: + return False + + def _get_server_version_info(self, connection): + # eGenix suggests using conn.dbms_version instead + # of what we're doing here + dbapi_con = connection.connection + version = [] + r = re.compile('[.\-]') + # 18 == pyodbc.SQL_DBMS_VER + for n in r.split(dbapi_con.getinfo(18)[1]): + try: + version.append(int(n)) + except ValueError: + version.append(n) + return tuple(version) + + def _get_direct(self, context): + if context: + native_odbc_execute = context.execution_options.\ + get('native_odbc_execute', 'auto') + # default to direct=True in all cases, is more generally + # compatible especially with SQL Server + return False if native_odbc_execute is True else True + else: + return True + + def do_executemany(self, cursor, statement, parameters, context=None): + cursor.executemany( + statement, parameters, direct=self._get_direct(context)) + + def do_execute(self, cursor, statement, parameters, context=None): + cursor.execute(statement, parameters, direct=self._get_direct(context)) diff --git a/lib/python3.4/site-packages/sqlalchemy/connectors/pyodbc.py b/lib/python3.4/site-packages/sqlalchemy/connectors/pyodbc.py new file mode 100644 index 0000000..68bbcc4 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/connectors/pyodbc.py @@ -0,0 +1,183 @@ +# connectors/pyodbc.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from . import Connector +from .. import util + + +import sys +import re + + +class PyODBCConnector(Connector): + driver = 'pyodbc' + + supports_sane_multi_rowcount = False + + if util.py2k: + # PyODBC unicode is broken on UCS-4 builds + supports_unicode = sys.maxunicode == 65535 + supports_unicode_statements = supports_unicode + + supports_native_decimal = True + default_paramstyle = 'named' + + # for non-DSN connections, this *may* be used to + # hold the desired driver name + pyodbc_driver_name = None + + # will be set to True after initialize() + # if the freetds.so is detected + freetds = False + + # will be set to the string version of + # the FreeTDS driver if freetds is detected + freetds_driver_version = None + + # will be set to True after initialize() + # if the libessqlsrv.so is detected + easysoft = False + + def __init__(self, supports_unicode_binds=None, **kw): + super(PyODBCConnector, self).__init__(**kw) + self._user_supports_unicode_binds = supports_unicode_binds + + @classmethod + def dbapi(cls): + return __import__('pyodbc') + + def create_connect_args(self, url): + opts = url.translate_connect_args(username='user') + opts.update(url.query) + + keys = opts + query = url.query + + connect_args = {} + for param in ('ansi', 'unicode_results', 'autocommit'): + if param in keys: + connect_args[param] = util.asbool(keys.pop(param)) + + if 'odbc_connect' in keys: + connectors = [util.unquote_plus(keys.pop('odbc_connect'))] + else: + dsn_connection = 'dsn' in keys or \ + ('host' in keys and 'database' not in keys) + if dsn_connection: + connectors = ['dsn=%s' % (keys.pop('host', '') or + keys.pop('dsn', ''))] + else: + port = '' + if 'port' in keys and 'port' not in query: + port = ',%d' % int(keys.pop('port')) + + connectors = [] + driver = keys.pop('driver', self.pyodbc_driver_name) + if driver is None: + util.warn( + "No driver name specified; " + "this is expected by PyODBC when using " + "DSN-less connections") + else: + connectors.append("DRIVER={%s}" % driver) + + connectors.extend( + [ + 'Server=%s%s' % (keys.pop('host', ''), port), + 'Database=%s' % keys.pop('database', '') + ]) + + user = keys.pop("user", None) + if user: + connectors.append("UID=%s" % user) + connectors.append("PWD=%s" % keys.pop('password', '')) + else: + connectors.append("Trusted_Connection=Yes") + + # if set to 'Yes', the ODBC layer will try to automagically + # convert textual data from your database encoding to your + # client encoding. This should obviously be set to 'No' if + # you query a cp1253 encoded database from a latin1 client... + if 'odbc_autotranslate' in keys: + connectors.append("AutoTranslate=%s" % + keys.pop("odbc_autotranslate")) + + connectors.extend(['%s=%s' % (k, v) for k, v in keys.items()]) + return [[";".join(connectors)], connect_args] + + def is_disconnect(self, e, connection, cursor): + if isinstance(e, self.dbapi.ProgrammingError): + return "The cursor's connection has been closed." in str(e) or \ + 'Attempt to use a closed connection.' in str(e) + elif isinstance(e, self.dbapi.Error): + return '[08S01]' in str(e) + else: + return False + + def initialize(self, connection): + # determine FreeTDS first. can't issue SQL easily + # without getting unicode_statements/binds set up. + + pyodbc = self.dbapi + + dbapi_con = connection.connection + + _sql_driver_name = dbapi_con.getinfo(pyodbc.SQL_DRIVER_NAME) + self.freetds = bool(re.match(r".*libtdsodbc.*\.so", _sql_driver_name + )) + self.easysoft = bool(re.match(r".*libessqlsrv.*\.so", _sql_driver_name + )) + + if self.freetds: + self.freetds_driver_version = dbapi_con.getinfo( + pyodbc.SQL_DRIVER_VER) + + self.supports_unicode_statements = ( + not util.py2k or + (not self.freetds and not self.easysoft) + ) + + if self._user_supports_unicode_binds is not None: + self.supports_unicode_binds = self._user_supports_unicode_binds + elif util.py2k: + self.supports_unicode_binds = ( + not self.freetds or self.freetds_driver_version >= '0.91' + ) and not self.easysoft + else: + self.supports_unicode_binds = True + + # run other initialization which asks for user name, etc. + super(PyODBCConnector, self).initialize(connection) + + + def _dbapi_version(self): + if not self.dbapi: + return () + return self._parse_dbapi_version(self.dbapi.version) + + def _parse_dbapi_version(self, vers): + m = re.match( + r'(?:py.*-)?([\d\.]+)(?:-(\w+))?', + vers + ) + if not m: + return () + vers = tuple([int(x) for x in m.group(1).split(".")]) + if m.group(2): + vers += (m.group(2),) + return vers + + def _get_server_version_info(self, connection): + dbapi_con = connection.connection + version = [] + r = re.compile('[.\-]') + for n in r.split(dbapi_con.getinfo(self.dbapi.SQL_DBMS_VER)): + try: + version.append(int(n)) + except ValueError: + version.append(n) + return tuple(version) diff --git a/lib/python3.4/site-packages/sqlalchemy/connectors/zxJDBC.py b/lib/python3.4/site-packages/sqlalchemy/connectors/zxJDBC.py new file mode 100644 index 0000000..e7b2dc9 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/connectors/zxJDBC.py @@ -0,0 +1,60 @@ +# connectors/zxJDBC.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +import sys +from . import Connector + + +class ZxJDBCConnector(Connector): + driver = 'zxjdbc' + + supports_sane_rowcount = False + supports_sane_multi_rowcount = False + + supports_unicode_binds = True + supports_unicode_statements = sys.version > '2.5.0+' + description_encoding = None + default_paramstyle = 'qmark' + + jdbc_db_name = None + jdbc_driver_name = None + + @classmethod + def dbapi(cls): + from com.ziclix.python.sql import zxJDBC + return zxJDBC + + def _driver_kwargs(self): + """Return kw arg dict to be sent to connect().""" + return {} + + def _create_jdbc_url(self, url): + """Create a JDBC url from a :class:`~sqlalchemy.engine.url.URL`""" + return 'jdbc:%s://%s%s/%s' % (self.jdbc_db_name, url.host, + url.port is not None + and ':%s' % url.port or '', + url.database) + + def create_connect_args(self, url): + opts = self._driver_kwargs() + opts.update(url.query) + return [ + [self._create_jdbc_url(url), + url.username, url.password, + self.jdbc_driver_name], + opts] + + def is_disconnect(self, e, connection, cursor): + if not isinstance(e, self.dbapi.ProgrammingError): + return False + e = str(e) + return 'connection is closed' in e or 'cursor is closed' in e + + def _get_server_version_info(self, connection): + # use connection.connection.dbversion, and parse appropriately + # to get a tuple + raise NotImplementedError() diff --git a/lib/python3.4/site-packages/sqlalchemy/cprocessors.cpython-34m.so b/lib/python3.4/site-packages/sqlalchemy/cprocessors.cpython-34m.so new file mode 100755 index 0000000..6a7b1ab Binary files /dev/null and b/lib/python3.4/site-packages/sqlalchemy/cprocessors.cpython-34m.so differ diff --git a/lib/python3.4/site-packages/sqlalchemy/cresultproxy.cpython-34m.so b/lib/python3.4/site-packages/sqlalchemy/cresultproxy.cpython-34m.so new file mode 100755 index 0000000..b7fe7f3 Binary files /dev/null and b/lib/python3.4/site-packages/sqlalchemy/cresultproxy.cpython-34m.so differ diff --git a/lib/python3.4/site-packages/sqlalchemy/cutils.cpython-34m.so b/lib/python3.4/site-packages/sqlalchemy/cutils.cpython-34m.so new file mode 100755 index 0000000..2d13444 Binary files /dev/null and b/lib/python3.4/site-packages/sqlalchemy/cutils.cpython-34m.so differ diff --git a/lib/python3.4/site-packages/sqlalchemy/databases/__init__.py b/lib/python3.4/site-packages/sqlalchemy/databases/__init__.py new file mode 100644 index 0000000..0bfc937 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/databases/__init__.py @@ -0,0 +1,30 @@ +# databases/__init__.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Include imports from the sqlalchemy.dialects package for backwards +compatibility with pre 0.6 versions. + +""" +from ..dialects.sqlite import base as sqlite +from ..dialects.postgresql import base as postgresql +postgres = postgresql +from ..dialects.mysql import base as mysql +from ..dialects.oracle import base as oracle +from ..dialects.firebird import base as firebird +from ..dialects.mssql import base as mssql +from ..dialects.sybase import base as sybase + + +__all__ = ( + 'firebird', + 'mssql', + 'mysql', + 'postgresql', + 'sqlite', + 'oracle', + 'sybase', +) diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/__init__.py b/lib/python3.4/site-packages/sqlalchemy/dialects/__init__.py new file mode 100644 index 0000000..5653f5b --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/__init__.py @@ -0,0 +1,45 @@ +# dialects/__init__.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +__all__ = ( + 'firebird', + 'mssql', + 'mysql', + 'oracle', + 'postgresql', + 'sqlite', + 'sybase', +) + +from .. import util + + +def _auto_fn(name): + """default dialect importer. + + plugs into the :class:`.PluginLoader` + as a first-hit system. + + """ + if "." in name: + dialect, driver = name.split(".") + else: + dialect = name + driver = "base" + try: + module = __import__('sqlalchemy.dialects.%s' % (dialect, )).dialects + except ImportError: + return None + + module = getattr(module, dialect) + if hasattr(module, driver): + module = getattr(module, driver) + return lambda: module.dialect + else: + return None + +registry = util.PluginLoader("sqlalchemy.dialects", auto_fn=_auto_fn) diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/firebird/__init__.py b/lib/python3.4/site-packages/sqlalchemy/dialects/firebird/__init__.py new file mode 100644 index 0000000..f27bdc0 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/firebird/__init__.py @@ -0,0 +1,21 @@ +# firebird/__init__.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from sqlalchemy.dialects.firebird import base, kinterbasdb, fdb + +base.dialect = fdb.dialect + +from sqlalchemy.dialects.firebird.base import \ + SMALLINT, BIGINT, FLOAT, FLOAT, DATE, TIME, \ + TEXT, NUMERIC, FLOAT, TIMESTAMP, VARCHAR, CHAR, BLOB,\ + dialect + +__all__ = ( + 'SMALLINT', 'BIGINT', 'FLOAT', 'FLOAT', 'DATE', 'TIME', + 'TEXT', 'NUMERIC', 'FLOAT', 'TIMESTAMP', 'VARCHAR', 'CHAR', 'BLOB', + 'dialect' +) diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/firebird/base.py b/lib/python3.4/site-packages/sqlalchemy/dialects/firebird/base.py new file mode 100644 index 0000000..4dbf382 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/firebird/base.py @@ -0,0 +1,738 @@ +# firebird/base.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" + +.. dialect:: firebird + :name: Firebird + +Firebird Dialects +----------------- + +Firebird offers two distinct dialects_ (not to be confused with a +SQLAlchemy ``Dialect``): + +dialect 1 + This is the old syntax and behaviour, inherited from Interbase pre-6.0. + +dialect 3 + This is the newer and supported syntax, introduced in Interbase 6.0. + +The SQLAlchemy Firebird dialect detects these versions and +adjusts its representation of SQL accordingly. However, +support for dialect 1 is not well tested and probably has +incompatibilities. + +Locking Behavior +---------------- + +Firebird locks tables aggressively. For this reason, a DROP TABLE may +hang until other transactions are released. SQLAlchemy does its best +to release transactions as quickly as possible. The most common cause +of hanging transactions is a non-fully consumed result set, i.e.:: + + result = engine.execute("select * from table") + row = result.fetchone() + return + +Where above, the ``ResultProxy`` has not been fully consumed. The +connection will be returned to the pool and the transactional state +rolled back once the Python garbage collector reclaims the objects +which hold onto the connection, which often occurs asynchronously. +The above use case can be alleviated by calling ``first()`` on the +``ResultProxy`` which will fetch the first row and immediately close +all remaining cursor/connection resources. + +RETURNING support +----------------- + +Firebird 2.0 supports returning a result set from inserts, and 2.1 +extends that to deletes and updates. This is generically exposed by +the SQLAlchemy ``returning()`` method, such as:: + + # INSERT..RETURNING + result = table.insert().returning(table.c.col1, table.c.col2).\\ + values(name='foo') + print result.fetchall() + + # UPDATE..RETURNING + raises = empl.update().returning(empl.c.id, empl.c.salary).\\ + where(empl.c.sales>100).\\ + values(dict(salary=empl.c.salary * 1.1)) + print raises.fetchall() + + +.. _dialects: http://mc-computing.com/Databases/Firebird/SQL_Dialect.html + +""" + +import datetime + +from sqlalchemy import schema as sa_schema +from sqlalchemy import exc, types as sqltypes, sql, util +from sqlalchemy.sql import expression +from sqlalchemy.engine import base, default, reflection +from sqlalchemy.sql import compiler + +from sqlalchemy.types import (BIGINT, BLOB, DATE, FLOAT, INTEGER, NUMERIC, + SMALLINT, TEXT, TIME, TIMESTAMP, Integer) + + +RESERVED_WORDS = set([ + "active", "add", "admin", "after", "all", "alter", "and", "any", "as", + "asc", "ascending", "at", "auto", "avg", "before", "begin", "between", + "bigint", "bit_length", "blob", "both", "by", "case", "cast", "char", + "character", "character_length", "char_length", "check", "close", + "collate", "column", "commit", "committed", "computed", "conditional", + "connect", "constraint", "containing", "count", "create", "cross", + "cstring", "current", "current_connection", "current_date", + "current_role", "current_time", "current_timestamp", + "current_transaction", "current_user", "cursor", "database", "date", + "day", "dec", "decimal", "declare", "default", "delete", "desc", + "descending", "disconnect", "distinct", "do", "domain", "double", + "drop", "else", "end", "entry_point", "escape", "exception", + "execute", "exists", "exit", "external", "extract", "fetch", "file", + "filter", "float", "for", "foreign", "from", "full", "function", + "gdscode", "generator", "gen_id", "global", "grant", "group", + "having", "hour", "if", "in", "inactive", "index", "inner", + "input_type", "insensitive", "insert", "int", "integer", "into", "is", + "isolation", "join", "key", "leading", "left", "length", "level", + "like", "long", "lower", "manual", "max", "maximum_segment", "merge", + "min", "minute", "module_name", "month", "names", "national", + "natural", "nchar", "no", "not", "null", "numeric", "octet_length", + "of", "on", "only", "open", "option", "or", "order", "outer", + "output_type", "overflow", "page", "pages", "page_size", "parameter", + "password", "plan", "position", "post_event", "precision", "primary", + "privileges", "procedure", "protected", "rdb$db_key", "read", "real", + "record_version", "recreate", "recursive", "references", "release", + "reserv", "reserving", "retain", "returning_values", "returns", + "revoke", "right", "rollback", "rows", "row_count", "savepoint", + "schema", "second", "segment", "select", "sensitive", "set", "shadow", + "shared", "singular", "size", "smallint", "snapshot", "some", "sort", + "sqlcode", "stability", "start", "starting", "starts", "statistics", + "sub_type", "sum", "suspend", "table", "then", "time", "timestamp", + "to", "trailing", "transaction", "trigger", "trim", "uncommitted", + "union", "unique", "update", "upper", "user", "using", "value", + "values", "varchar", "variable", "varying", "view", "wait", "when", + "where", "while", "with", "work", "write", "year", +]) + + +class _StringType(sqltypes.String): + """Base for Firebird string types.""" + + def __init__(self, charset=None, **kw): + self.charset = charset + super(_StringType, self).__init__(**kw) + + +class VARCHAR(_StringType, sqltypes.VARCHAR): + """Firebird VARCHAR type""" + __visit_name__ = 'VARCHAR' + + def __init__(self, length=None, **kwargs): + super(VARCHAR, self).__init__(length=length, **kwargs) + + +class CHAR(_StringType, sqltypes.CHAR): + """Firebird CHAR type""" + __visit_name__ = 'CHAR' + + def __init__(self, length=None, **kwargs): + super(CHAR, self).__init__(length=length, **kwargs) + + +class _FBDateTime(sqltypes.DateTime): + def bind_processor(self, dialect): + def process(value): + if type(value) == datetime.date: + return datetime.datetime(value.year, value.month, value.day) + else: + return value + return process + +colspecs = { + sqltypes.DateTime: _FBDateTime +} + +ischema_names = { + 'SHORT': SMALLINT, + 'LONG': INTEGER, + 'QUAD': FLOAT, + 'FLOAT': FLOAT, + 'DATE': DATE, + 'TIME': TIME, + 'TEXT': TEXT, + 'INT64': BIGINT, + 'DOUBLE': FLOAT, + 'TIMESTAMP': TIMESTAMP, + 'VARYING': VARCHAR, + 'CSTRING': CHAR, + 'BLOB': BLOB, +} + + +# TODO: date conversion types (should be implemented as _FBDateTime, +# _FBDate, etc. as bind/result functionality is required) + +class FBTypeCompiler(compiler.GenericTypeCompiler): + def visit_boolean(self, type_, **kw): + return self.visit_SMALLINT(type_, **kw) + + def visit_datetime(self, type_, **kw): + return self.visit_TIMESTAMP(type_, **kw) + + def visit_TEXT(self, type_, **kw): + return "BLOB SUB_TYPE 1" + + def visit_BLOB(self, type_, **kw): + return "BLOB SUB_TYPE 0" + + def _extend_string(self, type_, basic): + charset = getattr(type_, 'charset', None) + if charset is None: + return basic + else: + return '%s CHARACTER SET %s' % (basic, charset) + + def visit_CHAR(self, type_, **kw): + basic = super(FBTypeCompiler, self).visit_CHAR(type_, **kw) + return self._extend_string(type_, basic) + + def visit_VARCHAR(self, type_, **kw): + if not type_.length: + raise exc.CompileError( + "VARCHAR requires a length on dialect %s" % + self.dialect.name) + basic = super(FBTypeCompiler, self).visit_VARCHAR(type_, **kw) + return self._extend_string(type_, basic) + + +class FBCompiler(sql.compiler.SQLCompiler): + """Firebird specific idiosyncrasies""" + + ansi_bind_rules = True + + # def visit_contains_op_binary(self, binary, operator, **kw): + # cant use CONTAINING b.c. it's case insensitive. + + # def visit_notcontains_op_binary(self, binary, operator, **kw): + # cant use NOT CONTAINING b.c. it's case insensitive. + + def visit_now_func(self, fn, **kw): + return "CURRENT_TIMESTAMP" + + def visit_startswith_op_binary(self, binary, operator, **kw): + return '%s STARTING WITH %s' % ( + binary.left._compiler_dispatch(self, **kw), + binary.right._compiler_dispatch(self, **kw)) + + def visit_notstartswith_op_binary(self, binary, operator, **kw): + return '%s NOT STARTING WITH %s' % ( + binary.left._compiler_dispatch(self, **kw), + binary.right._compiler_dispatch(self, **kw)) + + def visit_mod_binary(self, binary, operator, **kw): + return "mod(%s, %s)" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw)) + + def visit_alias(self, alias, asfrom=False, **kwargs): + if self.dialect._version_two: + return super(FBCompiler, self).\ + visit_alias(alias, asfrom=asfrom, **kwargs) + else: + # Override to not use the AS keyword which FB 1.5 does not like + if asfrom: + alias_name = isinstance(alias.name, + expression._truncated_label) and \ + self._truncated_identifier("alias", + alias.name) or alias.name + + return self.process( + alias.original, asfrom=asfrom, **kwargs) + \ + " " + \ + self.preparer.format_alias(alias, alias_name) + else: + return self.process(alias.original, **kwargs) + + def visit_substring_func(self, func, **kw): + s = self.process(func.clauses.clauses[0]) + start = self.process(func.clauses.clauses[1]) + if len(func.clauses.clauses) > 2: + length = self.process(func.clauses.clauses[2]) + return "SUBSTRING(%s FROM %s FOR %s)" % (s, start, length) + else: + return "SUBSTRING(%s FROM %s)" % (s, start) + + def visit_length_func(self, function, **kw): + if self.dialect._version_two: + return "char_length" + self.function_argspec(function) + else: + return "strlen" + self.function_argspec(function) + + visit_char_length_func = visit_length_func + + def function_argspec(self, func, **kw): + # TODO: this probably will need to be + # narrowed to a fixed list, some no-arg functions + # may require parens - see similar example in the oracle + # dialect + if func.clauses is not None and len(func.clauses): + return self.process(func.clause_expr, **kw) + else: + return "" + + def default_from(self): + return " FROM rdb$database" + + def visit_sequence(self, seq): + return "gen_id(%s, 1)" % self.preparer.format_sequence(seq) + + def get_select_precolumns(self, select, **kw): + """Called when building a ``SELECT`` statement, position is just + before column list Firebird puts the limit and offset right + after the ``SELECT``... + """ + + result = "" + if select._limit_clause is not None: + result += "FIRST %s " % self.process(select._limit_clause, **kw) + if select._offset_clause is not None: + result += "SKIP %s " % self.process(select._offset_clause, **kw) + if select._distinct: + result += "DISTINCT " + return result + + def limit_clause(self, select, **kw): + """Already taken care of in the `get_select_precolumns` method.""" + + return "" + + def returning_clause(self, stmt, returning_cols): + columns = [ + self._label_select_column(None, c, True, False, {}) + for c in expression._select_iterables(returning_cols) + ] + + return 'RETURNING ' + ', '.join(columns) + + +class FBDDLCompiler(sql.compiler.DDLCompiler): + """Firebird syntactic idiosyncrasies""" + + def visit_create_sequence(self, create): + """Generate a ``CREATE GENERATOR`` statement for the sequence.""" + + # no syntax for these + # http://www.firebirdsql.org/manual/generatorguide-sqlsyntax.html + if create.element.start is not None: + raise NotImplemented( + "Firebird SEQUENCE doesn't support START WITH") + if create.element.increment is not None: + raise NotImplemented( + "Firebird SEQUENCE doesn't support INCREMENT BY") + + if self.dialect._version_two: + return "CREATE SEQUENCE %s" % \ + self.preparer.format_sequence(create.element) + else: + return "CREATE GENERATOR %s" % \ + self.preparer.format_sequence(create.element) + + def visit_drop_sequence(self, drop): + """Generate a ``DROP GENERATOR`` statement for the sequence.""" + + if self.dialect._version_two: + return "DROP SEQUENCE %s" % \ + self.preparer.format_sequence(drop.element) + else: + return "DROP GENERATOR %s" % \ + self.preparer.format_sequence(drop.element) + + +class FBIdentifierPreparer(sql.compiler.IdentifierPreparer): + """Install Firebird specific reserved words.""" + + reserved_words = RESERVED_WORDS + illegal_initial_characters = compiler.ILLEGAL_INITIAL_CHARACTERS.union( + ['_']) + + def __init__(self, dialect): + super(FBIdentifierPreparer, self).__init__(dialect, omit_schema=True) + + +class FBExecutionContext(default.DefaultExecutionContext): + def fire_sequence(self, seq, type_): + """Get the next value from the sequence using ``gen_id()``.""" + + return self._execute_scalar( + "SELECT gen_id(%s, 1) FROM rdb$database" % + self.dialect.identifier_preparer.format_sequence(seq), + type_ + ) + + +class FBDialect(default.DefaultDialect): + """Firebird dialect""" + + name = 'firebird' + + max_identifier_length = 31 + + supports_sequences = True + sequences_optional = False + supports_default_values = True + postfetch_lastrowid = False + + supports_native_boolean = False + + requires_name_normalize = True + supports_empty_insert = False + + statement_compiler = FBCompiler + ddl_compiler = FBDDLCompiler + preparer = FBIdentifierPreparer + type_compiler = FBTypeCompiler + execution_ctx_cls = FBExecutionContext + + colspecs = colspecs + ischema_names = ischema_names + + construct_arguments = [] + + # defaults to dialect ver. 3, + # will be autodetected off upon + # first connect + _version_two = True + + def initialize(self, connection): + super(FBDialect, self).initialize(connection) + self._version_two = ('firebird' in self.server_version_info and + self.server_version_info >= (2, ) + ) or \ + ('interbase' in self.server_version_info and + self.server_version_info >= (6, ) + ) + + if not self._version_two: + # TODO: whatever other pre < 2.0 stuff goes here + self.ischema_names = ischema_names.copy() + self.ischema_names['TIMESTAMP'] = sqltypes.DATE + self.colspecs = { + sqltypes.DateTime: sqltypes.DATE + } + + self.implicit_returning = self._version_two and \ + self.__dict__.get('implicit_returning', True) + + def normalize_name(self, name): + # Remove trailing spaces: FB uses a CHAR() type, + # that is padded with spaces + name = name and name.rstrip() + if name is None: + return None + elif name.upper() == name and \ + not self.identifier_preparer._requires_quotes(name.lower()): + return name.lower() + else: + return name + + def denormalize_name(self, name): + if name is None: + return None + elif name.lower() == name and \ + not self.identifier_preparer._requires_quotes(name.lower()): + return name.upper() + else: + return name + + def has_table(self, connection, table_name, schema=None): + """Return ``True`` if the given table exists, ignoring + the `schema`.""" + + tblqry = """ + SELECT 1 AS has_table FROM rdb$database + WHERE EXISTS (SELECT rdb$relation_name + FROM rdb$relations + WHERE rdb$relation_name=?) + """ + c = connection.execute(tblqry, [self.denormalize_name(table_name)]) + return c.first() is not None + + def has_sequence(self, connection, sequence_name, schema=None): + """Return ``True`` if the given sequence (generator) exists.""" + + genqry = """ + SELECT 1 AS has_sequence FROM rdb$database + WHERE EXISTS (SELECT rdb$generator_name + FROM rdb$generators + WHERE rdb$generator_name=?) + """ + c = connection.execute(genqry, [self.denormalize_name(sequence_name)]) + return c.first() is not None + + @reflection.cache + def get_table_names(self, connection, schema=None, **kw): + # there are two queries commonly mentioned for this. + # this one, using view_blr, is at the Firebird FAQ among other places: + # http://www.firebirdfaq.org/faq174/ + s = """ + select rdb$relation_name + from rdb$relations + where rdb$view_blr is null + and (rdb$system_flag is null or rdb$system_flag = 0); + """ + + # the other query is this one. It's not clear if there's really + # any difference between these two. This link: + # http://www.alberton.info/firebird_sql_meta_info.html#.Ur3vXfZGni8 + # states them as interchangeable. Some discussion at [ticket:2898] + # SELECT DISTINCT rdb$relation_name + # FROM rdb$relation_fields + # WHERE rdb$system_flag=0 AND rdb$view_context IS NULL + + return [self.normalize_name(row[0]) for row in connection.execute(s)] + + @reflection.cache + def get_view_names(self, connection, schema=None, **kw): + # see http://www.firebirdfaq.org/faq174/ + s = """ + select rdb$relation_name + from rdb$relations + where rdb$view_blr is not null + and (rdb$system_flag is null or rdb$system_flag = 0); + """ + return [self.normalize_name(row[0]) for row in connection.execute(s)] + + @reflection.cache + def get_view_definition(self, connection, view_name, schema=None, **kw): + qry = """ + SELECT rdb$view_source AS view_source + FROM rdb$relations + WHERE rdb$relation_name=? + """ + rp = connection.execute(qry, [self.denormalize_name(view_name)]) + row = rp.first() + if row: + return row['view_source'] + else: + return None + + @reflection.cache + def get_pk_constraint(self, connection, table_name, schema=None, **kw): + # Query to extract the PK/FK constrained fields of the given table + keyqry = """ + SELECT se.rdb$field_name AS fname + FROM rdb$relation_constraints rc + JOIN rdb$index_segments se ON rc.rdb$index_name=se.rdb$index_name + WHERE rc.rdb$constraint_type=? AND rc.rdb$relation_name=? + """ + tablename = self.denormalize_name(table_name) + # get primary key fields + c = connection.execute(keyqry, ["PRIMARY KEY", tablename]) + pkfields = [self.normalize_name(r['fname']) for r in c.fetchall()] + return {'constrained_columns': pkfields, 'name': None} + + @reflection.cache + def get_column_sequence(self, connection, + table_name, column_name, + schema=None, **kw): + tablename = self.denormalize_name(table_name) + colname = self.denormalize_name(column_name) + # Heuristic-query to determine the generator associated to a PK field + genqry = """ + SELECT trigdep.rdb$depended_on_name AS fgenerator + FROM rdb$dependencies tabdep + JOIN rdb$dependencies trigdep + ON tabdep.rdb$dependent_name=trigdep.rdb$dependent_name + AND trigdep.rdb$depended_on_type=14 + AND trigdep.rdb$dependent_type=2 + JOIN rdb$triggers trig ON + trig.rdb$trigger_name=tabdep.rdb$dependent_name + WHERE tabdep.rdb$depended_on_name=? + AND tabdep.rdb$depended_on_type=0 + AND trig.rdb$trigger_type=1 + AND tabdep.rdb$field_name=? + AND (SELECT count(*) + FROM rdb$dependencies trigdep2 + WHERE trigdep2.rdb$dependent_name = trigdep.rdb$dependent_name) = 2 + """ + genr = connection.execute(genqry, [tablename, colname]).first() + if genr is not None: + return dict(name=self.normalize_name(genr['fgenerator'])) + + @reflection.cache + def get_columns(self, connection, table_name, schema=None, **kw): + # Query to extract the details of all the fields of the given table + tblqry = """ + SELECT r.rdb$field_name AS fname, + r.rdb$null_flag AS null_flag, + t.rdb$type_name AS ftype, + f.rdb$field_sub_type AS stype, + f.rdb$field_length/ + COALESCE(cs.rdb$bytes_per_character,1) AS flen, + f.rdb$field_precision AS fprec, + f.rdb$field_scale AS fscale, + COALESCE(r.rdb$default_source, + f.rdb$default_source) AS fdefault + FROM rdb$relation_fields r + JOIN rdb$fields f ON r.rdb$field_source=f.rdb$field_name + JOIN rdb$types t + ON t.rdb$type=f.rdb$field_type AND + t.rdb$field_name='RDB$FIELD_TYPE' + LEFT JOIN rdb$character_sets cs ON + f.rdb$character_set_id=cs.rdb$character_set_id + WHERE f.rdb$system_flag=0 AND r.rdb$relation_name=? + ORDER BY r.rdb$field_position + """ + # get the PK, used to determine the eventual associated sequence + pk_constraint = self.get_pk_constraint(connection, table_name) + pkey_cols = pk_constraint['constrained_columns'] + + tablename = self.denormalize_name(table_name) + # get all of the fields for this table + c = connection.execute(tblqry, [tablename]) + cols = [] + while True: + row = c.fetchone() + if row is None: + break + name = self.normalize_name(row['fname']) + orig_colname = row['fname'] + + # get the data type + colspec = row['ftype'].rstrip() + coltype = self.ischema_names.get(colspec) + if coltype is None: + util.warn("Did not recognize type '%s' of column '%s'" % + (colspec, name)) + coltype = sqltypes.NULLTYPE + elif issubclass(coltype, Integer) and row['fprec'] != 0: + coltype = NUMERIC( + precision=row['fprec'], + scale=row['fscale'] * -1) + elif colspec in ('VARYING', 'CSTRING'): + coltype = coltype(row['flen']) + elif colspec == 'TEXT': + coltype = TEXT(row['flen']) + elif colspec == 'BLOB': + if row['stype'] == 1: + coltype = TEXT() + else: + coltype = BLOB() + else: + coltype = coltype() + + # does it have a default value? + defvalue = None + if row['fdefault'] is not None: + # the value comes down as "DEFAULT 'value'": there may be + # more than one whitespace around the "DEFAULT" keyword + # and it may also be lower case + # (see also http://tracker.firebirdsql.org/browse/CORE-356) + defexpr = row['fdefault'].lstrip() + assert defexpr[:8].rstrip().upper() == \ + 'DEFAULT', "Unrecognized default value: %s" % \ + defexpr + defvalue = defexpr[8:].strip() + if defvalue == 'NULL': + # Redundant + defvalue = None + col_d = { + 'name': name, + 'type': coltype, + 'nullable': not bool(row['null_flag']), + 'default': defvalue, + 'autoincrement': defvalue is None + } + + if orig_colname.lower() == orig_colname: + col_d['quote'] = True + + # if the PK is a single field, try to see if its linked to + # a sequence thru a trigger + if len(pkey_cols) == 1 and name == pkey_cols[0]: + seq_d = self.get_column_sequence(connection, tablename, name) + if seq_d is not None: + col_d['sequence'] = seq_d + + cols.append(col_d) + return cols + + @reflection.cache + def get_foreign_keys(self, connection, table_name, schema=None, **kw): + # Query to extract the details of each UK/FK of the given table + fkqry = """ + SELECT rc.rdb$constraint_name AS cname, + cse.rdb$field_name AS fname, + ix2.rdb$relation_name AS targetrname, + se.rdb$field_name AS targetfname + FROM rdb$relation_constraints rc + JOIN rdb$indices ix1 ON ix1.rdb$index_name=rc.rdb$index_name + JOIN rdb$indices ix2 ON ix2.rdb$index_name=ix1.rdb$foreign_key + JOIN rdb$index_segments cse ON + cse.rdb$index_name=ix1.rdb$index_name + JOIN rdb$index_segments se + ON se.rdb$index_name=ix2.rdb$index_name + AND se.rdb$field_position=cse.rdb$field_position + WHERE rc.rdb$constraint_type=? AND rc.rdb$relation_name=? + ORDER BY se.rdb$index_name, se.rdb$field_position + """ + tablename = self.denormalize_name(table_name) + + c = connection.execute(fkqry, ["FOREIGN KEY", tablename]) + fks = util.defaultdict(lambda: { + 'name': None, + 'constrained_columns': [], + 'referred_schema': None, + 'referred_table': None, + 'referred_columns': [] + }) + + for row in c: + cname = self.normalize_name(row['cname']) + fk = fks[cname] + if not fk['name']: + fk['name'] = cname + fk['referred_table'] = self.normalize_name(row['targetrname']) + fk['constrained_columns'].append( + self.normalize_name(row['fname'])) + fk['referred_columns'].append( + self.normalize_name(row['targetfname'])) + return list(fks.values()) + + @reflection.cache + def get_indexes(self, connection, table_name, schema=None, **kw): + qry = """ + SELECT ix.rdb$index_name AS index_name, + ix.rdb$unique_flag AS unique_flag, + ic.rdb$field_name AS field_name + FROM rdb$indices ix + JOIN rdb$index_segments ic + ON ix.rdb$index_name=ic.rdb$index_name + LEFT OUTER JOIN rdb$relation_constraints + ON rdb$relation_constraints.rdb$index_name = + ic.rdb$index_name + WHERE ix.rdb$relation_name=? AND ix.rdb$foreign_key IS NULL + AND rdb$relation_constraints.rdb$constraint_type IS NULL + ORDER BY index_name, ic.rdb$field_position + """ + c = connection.execute(qry, [self.denormalize_name(table_name)]) + + indexes = util.defaultdict(dict) + for row in c: + indexrec = indexes[row['index_name']] + if 'name' not in indexrec: + indexrec['name'] = self.normalize_name(row['index_name']) + indexrec['column_names'] = [] + indexrec['unique'] = bool(row['unique_flag']) + + indexrec['column_names'].append( + self.normalize_name(row['field_name'])) + + return list(indexes.values()) diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/firebird/fdb.py b/lib/python3.4/site-packages/sqlalchemy/dialects/firebird/fdb.py new file mode 100644 index 0000000..aff8cff --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/firebird/fdb.py @@ -0,0 +1,118 @@ +# firebird/fdb.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +.. dialect:: firebird+fdb + :name: fdb + :dbapi: pyodbc + :connectstring: firebird+fdb://user:password@host:port/path/to/db\ +[?key=value&key=value...] + :url: http://pypi.python.org/pypi/fdb/ + + fdb is a kinterbasdb compatible DBAPI for Firebird. + + .. versionadded:: 0.8 - Support for the fdb Firebird driver. + + .. versionchanged:: 0.9 - The fdb dialect is now the default dialect + under the ``firebird://`` URL space, as ``fdb`` is now the official + Python driver for Firebird. + +Arguments +---------- + +The ``fdb`` dialect is based on the +:mod:`sqlalchemy.dialects.firebird.kinterbasdb` dialect, however does not +accept every argument that Kinterbasdb does. + +* ``enable_rowcount`` - True by default, setting this to False disables + the usage of "cursor.rowcount" with the + Kinterbasdb dialect, which SQLAlchemy ordinarily calls upon automatically + after any UPDATE or DELETE statement. When disabled, SQLAlchemy's + ResultProxy will return -1 for result.rowcount. The rationale here is + that Kinterbasdb requires a second round trip to the database when + .rowcount is called - since SQLA's resultproxy automatically closes + the cursor after a non-result-returning statement, rowcount must be + called, if at all, before the result object is returned. Additionally, + cursor.rowcount may not return correct results with older versions + of Firebird, and setting this flag to False will also cause the + SQLAlchemy ORM to ignore its usage. The behavior can also be controlled on a + per-execution basis using the ``enable_rowcount`` option with + :meth:`.Connection.execution_options`:: + + conn = engine.connect().execution_options(enable_rowcount=True) + r = conn.execute(stmt) + print r.rowcount + +* ``retaining`` - False by default. Setting this to True will pass the + ``retaining=True`` keyword argument to the ``.commit()`` and ``.rollback()`` + methods of the DBAPI connection, which can improve performance in some + situations, but apparently with significant caveats. + Please read the fdb and/or kinterbasdb DBAPI documentation in order to + understand the implications of this flag. + + .. versionadded:: 0.8.2 - ``retaining`` keyword argument specifying + transaction retaining behavior - in 0.8 it defaults to ``True`` + for backwards compatibility. + + .. versionchanged:: 0.9.0 - the ``retaining`` flag defaults to ``False``. + In 0.8 it defaulted to ``True``. + + .. seealso:: + + http://pythonhosted.org/fdb/usage-guide.html#retaining-transactions + - information on the "retaining" flag. + +""" + +from .kinterbasdb import FBDialect_kinterbasdb +from ... import util + + +class FBDialect_fdb(FBDialect_kinterbasdb): + + def __init__(self, enable_rowcount=True, + retaining=False, **kwargs): + super(FBDialect_fdb, self).__init__( + enable_rowcount=enable_rowcount, + retaining=retaining, **kwargs) + + @classmethod + def dbapi(cls): + return __import__('fdb') + + def create_connect_args(self, url): + opts = url.translate_connect_args(username='user') + if opts.get('port'): + opts['host'] = "%s/%s" % (opts['host'], opts['port']) + del opts['port'] + opts.update(url.query) + + util.coerce_kw_type(opts, 'type_conv', int) + + return ([], opts) + + def _get_server_version_info(self, connection): + """Get the version of the Firebird server used by a connection. + + Returns a tuple of (`major`, `minor`, `build`), three integers + representing the version of the attached server. + """ + + # This is the simpler approach (the other uses the services api), + # that for backward compatibility reasons returns a string like + # LI-V6.3.3.12981 Firebird 2.0 + # where the first version is a fake one resembling the old + # Interbase signature. + + isc_info_firebird_version = 103 + fbconn = connection.connection + + version = fbconn.db_info(isc_info_firebird_version) + + return self._parse_version_info(version) + +dialect = FBDialect_fdb diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/firebird/kinterbasdb.py b/lib/python3.4/site-packages/sqlalchemy/dialects/firebird/kinterbasdb.py new file mode 100644 index 0000000..3df9f73 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/firebird/kinterbasdb.py @@ -0,0 +1,184 @@ +# firebird/kinterbasdb.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +.. dialect:: firebird+kinterbasdb + :name: kinterbasdb + :dbapi: kinterbasdb + :connectstring: firebird+kinterbasdb://user:password@host:port/path/to/db\ +[?key=value&key=value...] + :url: http://firebirdsql.org/index.php?op=devel&sub=python + +Arguments +---------- + +The Kinterbasdb backend accepts the ``enable_rowcount`` and ``retaining`` +arguments accepted by the :mod:`sqlalchemy.dialects.firebird.fdb` dialect. +In addition, it also accepts the following: + +* ``type_conv`` - select the kind of mapping done on the types: by default + SQLAlchemy uses 200 with Unicode, datetime and decimal support. See + the linked documents below for further information. + +* ``concurrency_level`` - set the backend policy with regards to threading + issues: by default SQLAlchemy uses policy 1. See the linked documents + below for further information. + +.. seealso:: + + http://sourceforge.net/projects/kinterbasdb + + http://kinterbasdb.sourceforge.net/dist_docs/usage.html#adv_param_conv_dynamic_type_translation + + http://kinterbasdb.sourceforge.net/dist_docs/usage.html#special_issue_concurrency + +""" + +from .base import FBDialect, FBExecutionContext +from ... import util, types as sqltypes +from re import match +import decimal + + +class _kinterbasdb_numeric(object): + def bind_processor(self, dialect): + def process(value): + if isinstance(value, decimal.Decimal): + return str(value) + else: + return value + return process + + +class _FBNumeric_kinterbasdb(_kinterbasdb_numeric, sqltypes.Numeric): + pass + + +class _FBFloat_kinterbasdb(_kinterbasdb_numeric, sqltypes.Float): + pass + + +class FBExecutionContext_kinterbasdb(FBExecutionContext): + @property + def rowcount(self): + if self.execution_options.get('enable_rowcount', + self.dialect.enable_rowcount): + return self.cursor.rowcount + else: + return -1 + + +class FBDialect_kinterbasdb(FBDialect): + driver = 'kinterbasdb' + supports_sane_rowcount = False + supports_sane_multi_rowcount = False + execution_ctx_cls = FBExecutionContext_kinterbasdb + + supports_native_decimal = True + + colspecs = util.update_copy( + FBDialect.colspecs, + { + sqltypes.Numeric: _FBNumeric_kinterbasdb, + sqltypes.Float: _FBFloat_kinterbasdb, + } + + ) + + def __init__(self, type_conv=200, concurrency_level=1, + enable_rowcount=True, + retaining=False, **kwargs): + super(FBDialect_kinterbasdb, self).__init__(**kwargs) + self.enable_rowcount = enable_rowcount + self.type_conv = type_conv + self.concurrency_level = concurrency_level + self.retaining = retaining + if enable_rowcount: + self.supports_sane_rowcount = True + + @classmethod + def dbapi(cls): + return __import__('kinterbasdb') + + def do_execute(self, cursor, statement, parameters, context=None): + # kinterbase does not accept a None, but wants an empty list + # when there are no arguments. + cursor.execute(statement, parameters or []) + + def do_rollback(self, dbapi_connection): + dbapi_connection.rollback(self.retaining) + + def do_commit(self, dbapi_connection): + dbapi_connection.commit(self.retaining) + + def create_connect_args(self, url): + opts = url.translate_connect_args(username='user') + if opts.get('port'): + opts['host'] = "%s/%s" % (opts['host'], opts['port']) + del opts['port'] + opts.update(url.query) + + util.coerce_kw_type(opts, 'type_conv', int) + + type_conv = opts.pop('type_conv', self.type_conv) + concurrency_level = opts.pop('concurrency_level', + self.concurrency_level) + + if self.dbapi is not None: + initialized = getattr(self.dbapi, 'initialized', None) + if initialized is None: + # CVS rev 1.96 changed the name of the attribute: + # http://kinterbasdb.cvs.sourceforge.net/viewvc/kinterbasdb/ + # Kinterbasdb-3.0/__init__.py?r1=1.95&r2=1.96 + initialized = getattr(self.dbapi, '_initialized', False) + if not initialized: + self.dbapi.init(type_conv=type_conv, + concurrency_level=concurrency_level) + return ([], opts) + + def _get_server_version_info(self, connection): + """Get the version of the Firebird server used by a connection. + + Returns a tuple of (`major`, `minor`, `build`), three integers + representing the version of the attached server. + """ + + # This is the simpler approach (the other uses the services api), + # that for backward compatibility reasons returns a string like + # LI-V6.3.3.12981 Firebird 2.0 + # where the first version is a fake one resembling the old + # Interbase signature. + + fbconn = connection.connection + version = fbconn.server_version + + return self._parse_version_info(version) + + def _parse_version_info(self, version): + m = match( + '\w+-V(\d+)\.(\d+)\.(\d+)\.(\d+)( \w+ (\d+)\.(\d+))?', version) + if not m: + raise AssertionError( + "Could not determine version from string '%s'" % version) + + if m.group(5) != None: + return tuple([int(x) for x in m.group(6, 7, 4)] + ['firebird']) + else: + return tuple([int(x) for x in m.group(1, 2, 3)] + ['interbase']) + + def is_disconnect(self, e, connection, cursor): + if isinstance(e, (self.dbapi.OperationalError, + self.dbapi.ProgrammingError)): + msg = str(e) + return ('Unable to complete network request to host' in msg or + 'Invalid connection state' in msg or + 'Invalid cursor state' in msg or + 'connection shutdown' in msg) + else: + return False + +dialect = FBDialect_kinterbasdb diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/__init__.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/__init__.py new file mode 100644 index 0000000..8c9e858 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/__init__.py @@ -0,0 +1,27 @@ +# mssql/__init__.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from sqlalchemy.dialects.mssql import base, pyodbc, adodbapi, \ + pymssql, zxjdbc, mxodbc + +base.dialect = pyodbc.dialect + +from sqlalchemy.dialects.mssql.base import \ + INTEGER, BIGINT, SMALLINT, TINYINT, VARCHAR, NVARCHAR, CHAR, \ + NCHAR, TEXT, NTEXT, DECIMAL, NUMERIC, FLOAT, DATETIME,\ + DATETIME2, DATETIMEOFFSET, DATE, TIME, SMALLDATETIME, \ + BINARY, VARBINARY, BIT, REAL, IMAGE, TIMESTAMP,\ + MONEY, SMALLMONEY, UNIQUEIDENTIFIER, SQL_VARIANT, dialect + + +__all__ = ( + 'INTEGER', 'BIGINT', 'SMALLINT', 'TINYINT', 'VARCHAR', 'NVARCHAR', 'CHAR', + 'NCHAR', 'TEXT', 'NTEXT', 'DECIMAL', 'NUMERIC', 'FLOAT', 'DATETIME', + 'DATETIME2', 'DATETIMEOFFSET', 'DATE', 'TIME', 'SMALLDATETIME', + 'BINARY', 'VARBINARY', 'BIT', 'REAL', 'IMAGE', 'TIMESTAMP', + 'MONEY', 'SMALLMONEY', 'UNIQUEIDENTIFIER', 'SQL_VARIANT', 'dialect' +) diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/adodbapi.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/adodbapi.py new file mode 100644 index 0000000..60fa25d --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/adodbapi.py @@ -0,0 +1,80 @@ +# mssql/adodbapi.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +.. dialect:: mssql+adodbapi + :name: adodbapi + :dbapi: adodbapi + :connectstring: mssql+adodbapi://:@ + :url: http://adodbapi.sourceforge.net/ + +.. note:: + + The adodbapi dialect is not implemented SQLAlchemy versions 0.6 and + above at this time. + +""" +import datetime +from sqlalchemy import types as sqltypes, util +from sqlalchemy.dialects.mssql.base import MSDateTime, MSDialect +import sys + + +class MSDateTime_adodbapi(MSDateTime): + def result_processor(self, dialect, coltype): + def process(value): + # adodbapi will return datetimes with empty time + # values as datetime.date() objects. + # Promote them back to full datetime.datetime() + if type(value) is datetime.date: + return datetime.datetime(value.year, value.month, value.day) + return value + return process + + +class MSDialect_adodbapi(MSDialect): + supports_sane_rowcount = True + supports_sane_multi_rowcount = True + supports_unicode = sys.maxunicode == 65535 + supports_unicode_statements = True + driver = 'adodbapi' + + @classmethod + def import_dbapi(cls): + import adodbapi as module + return module + + colspecs = util.update_copy( + MSDialect.colspecs, + { + sqltypes.DateTime: MSDateTime_adodbapi + } + ) + + def create_connect_args(self, url): + keys = url.query + + connectors = ["Provider=SQLOLEDB"] + if 'port' in keys: + connectors.append("Data Source=%s, %s" % + (keys.get("host"), keys.get("port"))) + else: + connectors.append("Data Source=%s" % keys.get("host")) + connectors.append("Initial Catalog=%s" % keys.get("database")) + user = keys.get("user") + if user: + connectors.append("User Id=%s" % user) + connectors.append("Password=%s" % keys.get("password", "")) + else: + connectors.append("Integrated Security=SSPI") + return [[";".join(connectors)], {}] + + def is_disconnect(self, e, connection, cursor): + return isinstance(e, self.dbapi.adodbapi.DatabaseError) and \ + "'connection failure'" in str(e) + +dialect = MSDialect_adodbapi diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/base.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/base.py new file mode 100644 index 0000000..36a8a93 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/base.py @@ -0,0 +1,1929 @@ +# mssql/base.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +.. dialect:: mssql + :name: Microsoft SQL Server + + +Auto Increment Behavior +----------------------- + +SQL Server provides so-called "auto incrementing" behavior using the +``IDENTITY`` construct, which can be placed on an integer primary key. +SQLAlchemy considers ``IDENTITY`` within its default "autoincrement" behavior, +described at :paramref:`.Column.autoincrement`; this means +that by default, the first integer primary key column in a :class:`.Table` +will be considered to be the identity column and will generate DDL as such:: + + from sqlalchemy import Table, MetaData, Column, Integer + + m = MetaData() + t = Table('t', m, + Column('id', Integer, primary_key=True), + Column('x', Integer)) + m.create_all(engine) + +The above example will generate DDL as: + +.. sourcecode:: sql + + CREATE TABLE t ( + id INTEGER NOT NULL IDENTITY(1,1), + x INTEGER NULL, + PRIMARY KEY (id) + ) + +For the case where this default generation of ``IDENTITY`` is not desired, +specify ``autoincrement=False`` on all integer primary key columns:: + + m = MetaData() + t = Table('t', m, + Column('id', Integer, primary_key=True, autoincrement=False), + Column('x', Integer)) + m.create_all(engine) + +.. note:: + + An INSERT statement which refers to an explicit value for such + a column is prohibited by SQL Server, however SQLAlchemy will detect this + and modify the ``IDENTITY_INSERT`` flag accordingly at statement execution + time. As this is not a high performing process, care should be taken to + set the ``autoincrement`` flag appropriately for columns that will not + actually require IDENTITY behavior. + +Controlling "Start" and "Increment" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Specific control over the parameters of the ``IDENTITY`` value is supported +using the :class:`.schema.Sequence` object. While this object normally +represents an explicit "sequence" for supporting backends, on SQL Server it is +re-purposed to specify behavior regarding the identity column, including +support of the "start" and "increment" values:: + + from sqlalchemy import Table, Integer, Sequence, Column + + Table('test', metadata, + Column('id', Integer, + Sequence('blah', start=100, increment=10), + primary_key=True), + Column('name', String(20)) + ).create(some_engine) + +would yield: + +.. sourcecode:: sql + + CREATE TABLE test ( + id INTEGER NOT NULL IDENTITY(100,10) PRIMARY KEY, + name VARCHAR(20) NULL, + ) + +Note that the ``start`` and ``increment`` values for sequences are +optional and will default to 1,1. + +INSERT behavior +^^^^^^^^^^^^^^^^ + +Handling of the ``IDENTITY`` column at INSERT time involves two key +techniques. The most common is being able to fetch the "last inserted value" +for a given ``IDENTITY`` column, a process which SQLAlchemy performs +implicitly in many cases, most importantly within the ORM. + +The process for fetching this value has several variants: + +* In the vast majority of cases, RETURNING is used in conjunction with INSERT + statements on SQL Server in order to get newly generated primary key values: + + .. sourcecode:: sql + + INSERT INTO t (x) OUTPUT inserted.id VALUES (?) + +* When RETURNING is not available or has been disabled via + ``implicit_returning=False``, either the ``scope_identity()`` function or + the ``@@identity`` variable is used; behavior varies by backend: + + * when using PyODBC, the phrase ``; select scope_identity()`` will be + appended to the end of the INSERT statement; a second result set will be + fetched in order to receive the value. Given a table as:: + + t = Table('t', m, Column('id', Integer, primary_key=True), + Column('x', Integer), + implicit_returning=False) + + an INSERT will look like: + + .. sourcecode:: sql + + INSERT INTO t (x) VALUES (?); select scope_identity() + + * Other dialects such as pymssql will call upon + ``SELECT scope_identity() AS lastrowid`` subsequent to an INSERT + statement. If the flag ``use_scope_identity=False`` is passed to + :func:`.create_engine`, the statement ``SELECT @@identity AS lastrowid`` + is used instead. + +A table that contains an ``IDENTITY`` column will prohibit an INSERT statement +that refers to the identity column explicitly. The SQLAlchemy dialect will +detect when an INSERT construct, created using a core :func:`.insert` +construct (not a plain string SQL), refers to the identity column, and +in this case will emit ``SET IDENTITY_INSERT ON`` prior to the insert +statement proceeding, and ``SET IDENTITY_INSERT OFF`` subsequent to the +execution. Given this example:: + + m = MetaData() + t = Table('t', m, Column('id', Integer, primary_key=True), + Column('x', Integer)) + m.create_all(engine) + + engine.execute(t.insert(), {'id': 1, 'x':1}, {'id':2, 'x':2}) + +The above column will be created with IDENTITY, however the INSERT statement +we emit is specifying explicit values. In the echo output we can see +how SQLAlchemy handles this: + +.. sourcecode:: sql + + CREATE TABLE t ( + id INTEGER NOT NULL IDENTITY(1,1), + x INTEGER NULL, + PRIMARY KEY (id) + ) + + COMMIT + SET IDENTITY_INSERT t ON + INSERT INTO t (id, x) VALUES (?, ?) + ((1, 1), (2, 2)) + SET IDENTITY_INSERT t OFF + COMMIT + + + +This +is an auxilliary use case suitable for testing and bulk insert scenarios. + +.. _legacy_schema_rendering: + +Rendering of SQL statements that include schema qualifiers +--------------------------------------------------------- + +When using :class:`.Table` metadata that includes a "schema" qualifier, +such as:: + + account_table = Table( + 'account', metadata, + Column('id', Integer, primary_key=True), + Column('info', String(100)), + schema="customer_schema" + ) + +The SQL Server dialect has a long-standing behavior that it will attempt +to turn a schema-qualified table name into an alias, such as:: + + >>> eng = create_engine("mssql+pymssql://mydsn") + >>> print(account_table.select().compile(eng)) + SELECT account_1.id, account_1.info + FROM customer_schema.account AS account_1 + +This behavior is legacy, does not function correctly for many forms +of SQL statements, and will be disabled by default in the 1.1 series +of SQLAlchemy. As of 1.0.5, the above statement will produce the following +warning:: + + SAWarning: legacy_schema_aliasing flag is defaulted to True; + some schema-qualified queries may not function correctly. + Consider setting this flag to False for modern SQL Server versions; + this flag will default to False in version 1.1 + +This warning encourages the :class:`.Engine` to be created as follows:: + + >>> eng = create_engine("mssql+pymssql://mydsn", legacy_schema_aliasing=False) + +Where the above SELECT statement will produce:: + + >>> print(account_table.select().compile(eng)) + SELECT customer_schema.account.id, customer_schema.account.info + FROM customer_schema.account + +The warning will not emit if the ``legacy_schema_aliasing`` flag is set +to either True or False. + +.. versionadded:: 1.0.5 - Added the ``legacy_schema_aliasing`` flag to disable + the SQL Server dialect's legacy behavior with schema-qualified table + names. This flag will default to False in version 1.1. + +Collation Support +----------------- + +Character collations are supported by the base string types, +specified by the string argument "collation":: + + from sqlalchemy import VARCHAR + Column('login', VARCHAR(32, collation='Latin1_General_CI_AS')) + +When such a column is associated with a :class:`.Table`, the +CREATE TABLE statement for this column will yield:: + + login VARCHAR(32) COLLATE Latin1_General_CI_AS NULL + +.. versionadded:: 0.8 Character collations are now part of the base string + types. + +LIMIT/OFFSET Support +-------------------- + +MSSQL has no support for the LIMIT or OFFSET keywords. LIMIT is +supported directly through the ``TOP`` Transact SQL keyword:: + + select.limit + +will yield:: + + SELECT TOP n + +If using SQL Server 2005 or above, LIMIT with OFFSET +support is available through the ``ROW_NUMBER OVER`` construct. +For versions below 2005, LIMIT with OFFSET usage will fail. + +Nullability +----------- +MSSQL has support for three levels of column nullability. The default +nullability allows nulls and is explicit in the CREATE TABLE +construct:: + + name VARCHAR(20) NULL + +If ``nullable=None`` is specified then no specification is made. In +other words the database's configured default is used. This will +render:: + + name VARCHAR(20) + +If ``nullable`` is ``True`` or ``False`` then the column will be +``NULL` or ``NOT NULL`` respectively. + +Date / Time Handling +-------------------- +DATE and TIME are supported. Bind parameters are converted +to datetime.datetime() objects as required by most MSSQL drivers, +and results are processed from strings if needed. +The DATE and TIME types are not available for MSSQL 2005 and +previous - if a server version below 2008 is detected, DDL +for these types will be issued as DATETIME. + +.. _mssql_large_type_deprecation: + +Large Text/Binary Type Deprecation +---------------------------------- + +Per `SQL Server 2012/2014 Documentation `_, +the ``NTEXT``, ``TEXT`` and ``IMAGE`` datatypes are to be removed from SQL Server +in a future release. SQLAlchemy normally relates these types to the +:class:`.UnicodeText`, :class:`.Text` and :class:`.LargeBinary` datatypes. + +In order to accommodate this change, a new flag ``deprecate_large_types`` +is added to the dialect, which will be automatically set based on detection +of the server version in use, if not otherwise set by the user. The +behavior of this flag is as follows: + +* When this flag is ``True``, the :class:`.UnicodeText`, :class:`.Text` and + :class:`.LargeBinary` datatypes, when used to render DDL, will render the + types ``NVARCHAR(max)``, ``VARCHAR(max)``, and ``VARBINARY(max)``, + respectively. This is a new behavior as of the addition of this flag. + +* When this flag is ``False``, the :class:`.UnicodeText`, :class:`.Text` and + :class:`.LargeBinary` datatypes, when used to render DDL, will render the + types ``NTEXT``, ``TEXT``, and ``IMAGE``, + respectively. This is the long-standing behavior of these types. + +* The flag begins with the value ``None``, before a database connection is + established. If the dialect is used to render DDL without the flag being + set, it is interpreted the same as ``False``. + +* On first connection, the dialect detects if SQL Server version 2012 or greater + is in use; if the flag is still at ``None``, it sets it to ``True`` or + ``False`` based on whether 2012 or greater is detected. + +* The flag can be set to either ``True`` or ``False`` when the dialect + is created, typically via :func:`.create_engine`:: + + eng = create_engine("mssql+pymssql://user:pass@host/db", + deprecate_large_types=True) + +* Complete control over whether the "old" or "new" types are rendered is + available in all SQLAlchemy versions by using the UPPERCASE type objects + instead: :class:`.NVARCHAR`, :class:`.VARCHAR`, :class:`.types.VARBINARY`, + :class:`.TEXT`, :class:`.mssql.NTEXT`, :class:`.mssql.IMAGE` will always remain + fixed and always output exactly that type. + +.. versionadded:: 1.0.0 + +.. _mssql_indexes: + +Clustered Index Support +----------------------- + +The MSSQL dialect supports clustered indexes (and primary keys) via the +``mssql_clustered`` option. This option is available to :class:`.Index`, +:class:`.UniqueConstraint`. and :class:`.PrimaryKeyConstraint`. + +To generate a clustered index:: + + Index("my_index", table.c.x, mssql_clustered=True) + +which renders the index as ``CREATE CLUSTERED INDEX my_index ON table (x)``. + +.. versionadded:: 0.8 + +To generate a clustered primary key use:: + + Table('my_table', metadata, + Column('x', ...), + Column('y', ...), + PrimaryKeyConstraint("x", "y", mssql_clustered=True)) + +which will render the table, for example, as:: + + CREATE TABLE my_table (x INTEGER NOT NULL, y INTEGER NOT NULL, + PRIMARY KEY CLUSTERED (x, y)) + +Similarly, we can generate a clustered unique constraint using:: + + Table('my_table', metadata, + Column('x', ...), + Column('y', ...), + PrimaryKeyConstraint("x"), + UniqueConstraint("y", mssql_clustered=True), + ) + + .. versionadded:: 0.9.2 + +MSSQL-Specific Index Options +----------------------------- + +In addition to clustering, the MSSQL dialect supports other special options +for :class:`.Index`. + +INCLUDE +^^^^^^^ + +The ``mssql_include`` option renders INCLUDE(colname) for the given string +names:: + + Index("my_index", table.c.x, mssql_include=['y']) + +would render the index as ``CREATE INDEX my_index ON table (x) INCLUDE (y)`` + +.. versionadded:: 0.8 + +Index ordering +^^^^^^^^^^^^^^ + +Index ordering is available via functional expressions, such as:: + + Index("my_index", table.c.x.desc()) + +would render the index as ``CREATE INDEX my_index ON table (x DESC)`` + +.. versionadded:: 0.8 + +.. seealso:: + + :ref:`schema_indexes_functional` + +Compatibility Levels +-------------------- +MSSQL supports the notion of setting compatibility levels at the +database level. This allows, for instance, to run a database that +is compatible with SQL2000 while running on a SQL2005 database +server. ``server_version_info`` will always return the database +server version information (in this case SQL2005) and not the +compatibility level information. Because of this, if running under +a backwards compatibility mode SQAlchemy may attempt to use T-SQL +statements that are unable to be parsed by the database server. + +Triggers +-------- + +SQLAlchemy by default uses OUTPUT INSERTED to get at newly +generated primary key values via IDENTITY columns or other +server side defaults. MS-SQL does not +allow the usage of OUTPUT INSERTED on tables that have triggers. +To disable the usage of OUTPUT INSERTED on a per-table basis, +specify ``implicit_returning=False`` for each :class:`.Table` +which has triggers:: + + Table('mytable', metadata, + Column('id', Integer, primary_key=True), + # ..., + implicit_returning=False + ) + +Declarative form:: + + class MyClass(Base): + # ... + __table_args__ = {'implicit_returning':False} + + +This option can also be specified engine-wide using the +``implicit_returning=False`` argument on :func:`.create_engine`. + +Enabling Snapshot Isolation +--------------------------- + +Not necessarily specific to SQLAlchemy, SQL Server has a default transaction +isolation mode that locks entire tables, and causes even mildly concurrent +applications to have long held locks and frequent deadlocks. +Enabling snapshot isolation for the database as a whole is recommended +for modern levels of concurrency support. This is accomplished via the +following ALTER DATABASE commands executed at the SQL prompt:: + + ALTER DATABASE MyDatabase SET ALLOW_SNAPSHOT_ISOLATION ON + + ALTER DATABASE MyDatabase SET READ_COMMITTED_SNAPSHOT ON + +Background on SQL Server snapshot isolation is available at +http://msdn.microsoft.com/en-us/library/ms175095.aspx. + +Known Issues +------------ + +* No support for more than one ``IDENTITY`` column per table +* reflection of indexes does not work with versions older than + SQL Server 2005 + +""" +import datetime +import operator +import re + +from ... import sql, schema as sa_schema, exc, util +from ...sql import compiler, expression, util as sql_util +from ... import engine +from ...engine import reflection, default +from ... import types as sqltypes +from ...types import INTEGER, BIGINT, SMALLINT, DECIMAL, NUMERIC, \ + FLOAT, TIMESTAMP, DATETIME, DATE, BINARY,\ + TEXT, VARCHAR, NVARCHAR, CHAR, NCHAR + + +from ...util import update_wrapper +from . import information_schema as ischema + +# http://sqlserverbuilds.blogspot.com/ +MS_2012_VERSION = (11,) +MS_2008_VERSION = (10,) +MS_2005_VERSION = (9,) +MS_2000_VERSION = (8,) + +RESERVED_WORDS = set( + ['add', 'all', 'alter', 'and', 'any', 'as', 'asc', 'authorization', + 'backup', 'begin', 'between', 'break', 'browse', 'bulk', 'by', 'cascade', + 'case', 'check', 'checkpoint', 'close', 'clustered', 'coalesce', + 'collate', 'column', 'commit', 'compute', 'constraint', 'contains', + 'containstable', 'continue', 'convert', 'create', 'cross', 'current', + 'current_date', 'current_time', 'current_timestamp', 'current_user', + 'cursor', 'database', 'dbcc', 'deallocate', 'declare', 'default', + 'delete', 'deny', 'desc', 'disk', 'distinct', 'distributed', 'double', + 'drop', 'dump', 'else', 'end', 'errlvl', 'escape', 'except', 'exec', + 'execute', 'exists', 'exit', 'external', 'fetch', 'file', 'fillfactor', + 'for', 'foreign', 'freetext', 'freetexttable', 'from', 'full', + 'function', 'goto', 'grant', 'group', 'having', 'holdlock', 'identity', + 'identity_insert', 'identitycol', 'if', 'in', 'index', 'inner', 'insert', + 'intersect', 'into', 'is', 'join', 'key', 'kill', 'left', 'like', + 'lineno', 'load', 'merge', 'national', 'nocheck', 'nonclustered', 'not', + 'null', 'nullif', 'of', 'off', 'offsets', 'on', 'open', 'opendatasource', + 'openquery', 'openrowset', 'openxml', 'option', 'or', 'order', 'outer', + 'over', 'percent', 'pivot', 'plan', 'precision', 'primary', 'print', + 'proc', 'procedure', 'public', 'raiserror', 'read', 'readtext', + 'reconfigure', 'references', 'replication', 'restore', 'restrict', + 'return', 'revert', 'revoke', 'right', 'rollback', 'rowcount', + 'rowguidcol', 'rule', 'save', 'schema', 'securityaudit', 'select', + 'session_user', 'set', 'setuser', 'shutdown', 'some', 'statistics', + 'system_user', 'table', 'tablesample', 'textsize', 'then', 'to', 'top', + 'tran', 'transaction', 'trigger', 'truncate', 'tsequal', 'union', + 'unique', 'unpivot', 'update', 'updatetext', 'use', 'user', 'values', + 'varying', 'view', 'waitfor', 'when', 'where', 'while', 'with', + 'writetext', + ]) + + +class REAL(sqltypes.REAL): + __visit_name__ = 'REAL' + + def __init__(self, **kw): + # REAL is a synonym for FLOAT(24) on SQL server + kw['precision'] = 24 + super(REAL, self).__init__(**kw) + + +class TINYINT(sqltypes.Integer): + __visit_name__ = 'TINYINT' + + +# MSSQL DATE/TIME types have varied behavior, sometimes returning +# strings. MSDate/TIME check for everything, and always +# filter bind parameters into datetime objects (required by pyodbc, +# not sure about other dialects). + +class _MSDate(sqltypes.Date): + + def bind_processor(self, dialect): + def process(value): + if type(value) == datetime.date: + return datetime.datetime(value.year, value.month, value.day) + else: + return value + return process + + _reg = re.compile(r"(\d+)-(\d+)-(\d+)") + + def result_processor(self, dialect, coltype): + def process(value): + if isinstance(value, datetime.datetime): + return value.date() + elif isinstance(value, util.string_types): + m = self._reg.match(value) + if not m: + raise ValueError( + "could not parse %r as a date value" % (value, )) + return datetime.date(*[ + int(x or 0) + for x in m.groups() + ]) + else: + return value + return process + + +class TIME(sqltypes.TIME): + + def __init__(self, precision=None, **kwargs): + self.precision = precision + super(TIME, self).__init__() + + __zero_date = datetime.date(1900, 1, 1) + + def bind_processor(self, dialect): + def process(value): + if isinstance(value, datetime.datetime): + value = datetime.datetime.combine( + self.__zero_date, value.time()) + elif isinstance(value, datetime.time): + value = datetime.datetime.combine(self.__zero_date, value) + return value + return process + + _reg = re.compile(r"(\d+):(\d+):(\d+)(?:\.(\d{0,6}))?") + + def result_processor(self, dialect, coltype): + def process(value): + if isinstance(value, datetime.datetime): + return value.time() + elif isinstance(value, util.string_types): + m = self._reg.match(value) + if not m: + raise ValueError( + "could not parse %r as a time value" % (value, )) + return datetime.time(*[ + int(x or 0) + for x in m.groups()]) + else: + return value + return process +_MSTime = TIME + + +class _DateTimeBase(object): + + def bind_processor(self, dialect): + def process(value): + if type(value) == datetime.date: + return datetime.datetime(value.year, value.month, value.day) + else: + return value + return process + + +class _MSDateTime(_DateTimeBase, sqltypes.DateTime): + pass + + +class SMALLDATETIME(_DateTimeBase, sqltypes.DateTime): + __visit_name__ = 'SMALLDATETIME' + + +class DATETIME2(_DateTimeBase, sqltypes.DateTime): + __visit_name__ = 'DATETIME2' + + def __init__(self, precision=None, **kw): + super(DATETIME2, self).__init__(**kw) + self.precision = precision + + +# TODO: is this not an Interval ? +class DATETIMEOFFSET(sqltypes.TypeEngine): + __visit_name__ = 'DATETIMEOFFSET' + + def __init__(self, precision=None, **kwargs): + self.precision = precision + + +class _StringType(object): + + """Base for MSSQL string types.""" + + def __init__(self, collation=None): + super(_StringType, self).__init__(collation=collation) + + +class NTEXT(sqltypes.UnicodeText): + + """MSSQL NTEXT type, for variable-length unicode text up to 2^30 + characters.""" + + __visit_name__ = 'NTEXT' + + +class VARBINARY(sqltypes.VARBINARY, sqltypes.LargeBinary): + """The MSSQL VARBINARY type. + + This type extends both :class:`.types.VARBINARY` and + :class:`.types.LargeBinary`. In "deprecate_large_types" mode, + the :class:`.types.LargeBinary` type will produce ``VARBINARY(max)`` + on SQL Server. + + .. versionadded:: 1.0.0 + + .. seealso:: + + :ref:`mssql_large_type_deprecation` + + + + """ + __visit_name__ = 'VARBINARY' + + +class IMAGE(sqltypes.LargeBinary): + __visit_name__ = 'IMAGE' + + +class BIT(sqltypes.TypeEngine): + __visit_name__ = 'BIT' + + +class MONEY(sqltypes.TypeEngine): + __visit_name__ = 'MONEY' + + +class SMALLMONEY(sqltypes.TypeEngine): + __visit_name__ = 'SMALLMONEY' + + +class UNIQUEIDENTIFIER(sqltypes.TypeEngine): + __visit_name__ = "UNIQUEIDENTIFIER" + + +class SQL_VARIANT(sqltypes.TypeEngine): + __visit_name__ = 'SQL_VARIANT' + +# old names. +MSDateTime = _MSDateTime +MSDate = _MSDate +MSReal = REAL +MSTinyInteger = TINYINT +MSTime = TIME +MSSmallDateTime = SMALLDATETIME +MSDateTime2 = DATETIME2 +MSDateTimeOffset = DATETIMEOFFSET +MSText = TEXT +MSNText = NTEXT +MSString = VARCHAR +MSNVarchar = NVARCHAR +MSChar = CHAR +MSNChar = NCHAR +MSBinary = BINARY +MSVarBinary = VARBINARY +MSImage = IMAGE +MSBit = BIT +MSMoney = MONEY +MSSmallMoney = SMALLMONEY +MSUniqueIdentifier = UNIQUEIDENTIFIER +MSVariant = SQL_VARIANT + +ischema_names = { + 'int': INTEGER, + 'bigint': BIGINT, + 'smallint': SMALLINT, + 'tinyint': TINYINT, + 'varchar': VARCHAR, + 'nvarchar': NVARCHAR, + 'char': CHAR, + 'nchar': NCHAR, + 'text': TEXT, + 'ntext': NTEXT, + 'decimal': DECIMAL, + 'numeric': NUMERIC, + 'float': FLOAT, + 'datetime': DATETIME, + 'datetime2': DATETIME2, + 'datetimeoffset': DATETIMEOFFSET, + 'date': DATE, + 'time': TIME, + 'smalldatetime': SMALLDATETIME, + 'binary': BINARY, + 'varbinary': VARBINARY, + 'bit': BIT, + 'real': REAL, + 'image': IMAGE, + 'timestamp': TIMESTAMP, + 'money': MONEY, + 'smallmoney': SMALLMONEY, + 'uniqueidentifier': UNIQUEIDENTIFIER, + 'sql_variant': SQL_VARIANT, +} + + +class MSTypeCompiler(compiler.GenericTypeCompiler): + def _extend(self, spec, type_, length=None): + """Extend a string-type declaration with standard SQL + COLLATE annotations. + + """ + + if getattr(type_, 'collation', None): + collation = 'COLLATE %s' % type_.collation + else: + collation = None + + if not length: + length = type_.length + + if length: + spec = spec + "(%s)" % length + + return ' '.join([c for c in (spec, collation) + if c is not None]) + + def visit_FLOAT(self, type_, **kw): + precision = getattr(type_, 'precision', None) + if precision is None: + return "FLOAT" + else: + return "FLOAT(%(precision)s)" % {'precision': precision} + + def visit_TINYINT(self, type_, **kw): + return "TINYINT" + + def visit_DATETIMEOFFSET(self, type_, **kw): + if type_.precision is not None: + return "DATETIMEOFFSET(%s)" % type_.precision + else: + return "DATETIMEOFFSET" + + def visit_TIME(self, type_, **kw): + precision = getattr(type_, 'precision', None) + if precision is not None: + return "TIME(%s)" % precision + else: + return "TIME" + + def visit_DATETIME2(self, type_, **kw): + precision = getattr(type_, 'precision', None) + if precision is not None: + return "DATETIME2(%s)" % precision + else: + return "DATETIME2" + + def visit_SMALLDATETIME(self, type_, **kw): + return "SMALLDATETIME" + + def visit_unicode(self, type_, **kw): + return self.visit_NVARCHAR(type_, **kw) + + def visit_text(self, type_, **kw): + if self.dialect.deprecate_large_types: + return self.visit_VARCHAR(type_, **kw) + else: + return self.visit_TEXT(type_, **kw) + + def visit_unicode_text(self, type_, **kw): + if self.dialect.deprecate_large_types: + return self.visit_NVARCHAR(type_, **kw) + else: + return self.visit_NTEXT(type_, **kw) + + def visit_NTEXT(self, type_, **kw): + return self._extend("NTEXT", type_) + + def visit_TEXT(self, type_, **kw): + return self._extend("TEXT", type_) + + def visit_VARCHAR(self, type_, **kw): + return self._extend("VARCHAR", type_, length=type_.length or 'max') + + def visit_CHAR(self, type_, **kw): + return self._extend("CHAR", type_) + + def visit_NCHAR(self, type_, **kw): + return self._extend("NCHAR", type_) + + def visit_NVARCHAR(self, type_, **kw): + return self._extend("NVARCHAR", type_, length=type_.length or 'max') + + def visit_date(self, type_, **kw): + if self.dialect.server_version_info < MS_2008_VERSION: + return self.visit_DATETIME(type_, **kw) + else: + return self.visit_DATE(type_, **kw) + + def visit_time(self, type_, **kw): + if self.dialect.server_version_info < MS_2008_VERSION: + return self.visit_DATETIME(type_, **kw) + else: + return self.visit_TIME(type_, **kw) + + def visit_large_binary(self, type_, **kw): + if self.dialect.deprecate_large_types: + return self.visit_VARBINARY(type_, **kw) + else: + return self.visit_IMAGE(type_, **kw) + + def visit_IMAGE(self, type_, **kw): + return "IMAGE" + + def visit_VARBINARY(self, type_, **kw): + return self._extend( + "VARBINARY", + type_, + length=type_.length or 'max') + + def visit_boolean(self, type_, **kw): + return self.visit_BIT(type_) + + def visit_BIT(self, type_, **kw): + return "BIT" + + def visit_MONEY(self, type_, **kw): + return "MONEY" + + def visit_SMALLMONEY(self, type_, **kw): + return 'SMALLMONEY' + + def visit_UNIQUEIDENTIFIER(self, type_, **kw): + return "UNIQUEIDENTIFIER" + + def visit_SQL_VARIANT(self, type_, **kw): + return 'SQL_VARIANT' + + +class MSExecutionContext(default.DefaultExecutionContext): + _enable_identity_insert = False + _select_lastrowid = False + _result_proxy = None + _lastrowid = None + + def _opt_encode(self, statement): + if not self.dialect.supports_unicode_statements: + return self.dialect._encoder(statement)[0] + else: + return statement + + def pre_exec(self): + """Activate IDENTITY_INSERT if needed.""" + + if self.isinsert: + tbl = self.compiled.statement.table + seq_column = tbl._autoincrement_column + insert_has_sequence = seq_column is not None + + if insert_has_sequence: + self._enable_identity_insert = \ + seq_column.key in self.compiled_parameters[0] or \ + ( + self.compiled.statement.parameters and ( + ( + self.compiled.statement._has_multi_parameters + and + seq_column.key in + self.compiled.statement.parameters[0] + ) or ( + not + self.compiled.statement._has_multi_parameters + and + seq_column.key in + self.compiled.statement.parameters + ) + ) + ) + else: + self._enable_identity_insert = False + + self._select_lastrowid = insert_has_sequence and \ + not self.compiled.returning and \ + not self._enable_identity_insert and \ + not self.executemany + + if self._enable_identity_insert: + self.root_connection._cursor_execute( + self.cursor, + self._opt_encode( + "SET IDENTITY_INSERT %s ON" % + self.dialect.identifier_preparer.format_table(tbl)), + (), + self) + + def post_exec(self): + """Disable IDENTITY_INSERT if enabled.""" + + conn = self.root_connection + if self._select_lastrowid: + if self.dialect.use_scope_identity: + conn._cursor_execute( + self.cursor, + "SELECT scope_identity() AS lastrowid", (), self) + else: + conn._cursor_execute(self.cursor, + "SELECT @@identity AS lastrowid", + (), + self) + # fetchall() ensures the cursor is consumed without closing it + row = self.cursor.fetchall()[0] + self._lastrowid = int(row[0]) + + if (self.isinsert or self.isupdate or self.isdelete) and \ + self.compiled.returning: + self._result_proxy = engine.FullyBufferedResultProxy(self) + + if self._enable_identity_insert: + conn._cursor_execute( + self.cursor, + self._opt_encode( + "SET IDENTITY_INSERT %s OFF" % + self.dialect.identifier_preparer. format_table( + self.compiled.statement.table)), + (), + self) + + def get_lastrowid(self): + return self._lastrowid + + def handle_dbapi_exception(self, e): + if self._enable_identity_insert: + try: + self.cursor.execute( + self._opt_encode( + "SET IDENTITY_INSERT %s OFF" % + self.dialect.identifier_preparer. format_table( + self.compiled.statement.table))) + except Exception: + pass + + def get_result_proxy(self): + if self._result_proxy: + return self._result_proxy + else: + return engine.ResultProxy(self) + + +class MSSQLCompiler(compiler.SQLCompiler): + returning_precedes_values = True + + extract_map = util.update_copy( + compiler.SQLCompiler.extract_map, + { + 'doy': 'dayofyear', + 'dow': 'weekday', + 'milliseconds': 'millisecond', + 'microseconds': 'microsecond' + }) + + def __init__(self, *args, **kwargs): + self.tablealiases = {} + super(MSSQLCompiler, self).__init__(*args, **kwargs) + + def _with_legacy_schema_aliasing(fn): + def decorate(self, *arg, **kw): + if self.dialect.legacy_schema_aliasing: + return fn(self, *arg, **kw) + else: + super_ = getattr(super(MSSQLCompiler, self), fn.__name__) + return super_(*arg, **kw) + return decorate + + def visit_now_func(self, fn, **kw): + return "CURRENT_TIMESTAMP" + + def visit_current_date_func(self, fn, **kw): + return "GETDATE()" + + def visit_length_func(self, fn, **kw): + return "LEN%s" % self.function_argspec(fn, **kw) + + def visit_char_length_func(self, fn, **kw): + return "LEN%s" % self.function_argspec(fn, **kw) + + def visit_concat_op_binary(self, binary, operator, **kw): + return "%s + %s" % \ + (self.process(binary.left, **kw), + self.process(binary.right, **kw)) + + def visit_true(self, expr, **kw): + return '1' + + def visit_false(self, expr, **kw): + return '0' + + def visit_match_op_binary(self, binary, operator, **kw): + return "CONTAINS (%s, %s)" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw)) + + def get_select_precolumns(self, select, **kw): + """ MS-SQL puts TOP, it's version of LIMIT here """ + + s = "" + if select._distinct: + s += "DISTINCT " + + if select._simple_int_limit and not select._offset: + # ODBC drivers and possibly others + # don't support bind params in the SELECT clause on SQL Server. + # so have to use literal here. + s += "TOP %d " % select._limit + + if s: + return s + else: + return compiler.SQLCompiler.get_select_precolumns( + self, select, **kw) + + def get_from_hint_text(self, table, text): + return text + + def get_crud_hint_text(self, table, text): + return text + + def limit_clause(self, select, **kw): + # Limit in mssql is after the select keyword + return "" + + def visit_select(self, select, **kwargs): + """Look for ``LIMIT`` and OFFSET in a select statement, and if + so tries to wrap it in a subquery with ``row_number()`` criterion. + + """ + if ( + ( + not select._simple_int_limit and + select._limit_clause is not None + ) or ( + select._offset_clause is not None and + not select._simple_int_offset or select._offset + ) + ) and not getattr(select, '_mssql_visit', None): + + # to use ROW_NUMBER(), an ORDER BY is required. + if not select._order_by_clause.clauses: + raise exc.CompileError('MSSQL requires an order_by when ' + 'using an OFFSET or a non-simple ' + 'LIMIT clause') + + _order_by_clauses = select._order_by_clause.clauses + limit_clause = select._limit_clause + offset_clause = select._offset_clause + kwargs['select_wraps_for'] = select + select = select._generate() + select._mssql_visit = True + select = select.column( + sql.func.ROW_NUMBER().over(order_by=_order_by_clauses) + .label("mssql_rn")).order_by(None).alias() + + mssql_rn = sql.column('mssql_rn') + limitselect = sql.select([c for c in select.c if + c.key != 'mssql_rn']) + if offset_clause is not None: + limitselect.append_whereclause(mssql_rn > offset_clause) + if limit_clause is not None: + limitselect.append_whereclause( + mssql_rn <= (limit_clause + offset_clause)) + else: + limitselect.append_whereclause( + mssql_rn <= (limit_clause)) + return self.process(limitselect, **kwargs) + else: + return compiler.SQLCompiler.visit_select(self, select, **kwargs) + + @_with_legacy_schema_aliasing + def visit_table(self, table, mssql_aliased=False, iscrud=False, **kwargs): + if mssql_aliased is table or iscrud: + return super(MSSQLCompiler, self).visit_table(table, **kwargs) + + # alias schema-qualified tables + alias = self._schema_aliased_table(table) + if alias is not None: + return self.process(alias, mssql_aliased=table, **kwargs) + else: + return super(MSSQLCompiler, self).visit_table(table, **kwargs) + + @_with_legacy_schema_aliasing + def visit_alias(self, alias, **kw): + # translate for schema-qualified table aliases + kw['mssql_aliased'] = alias.original + return super(MSSQLCompiler, self).visit_alias(alias, **kw) + + @_with_legacy_schema_aliasing + def visit_column(self, column, add_to_result_map=None, **kw): + if column.table is not None and \ + (not self.isupdate and not self.isdelete) or \ + self.is_subquery(): + # translate for schema-qualified table aliases + t = self._schema_aliased_table(column.table) + if t is not None: + converted = expression._corresponding_column_or_error( + t, column) + if add_to_result_map is not None: + add_to_result_map( + column.name, + column.name, + (column, column.name, column.key), + column.type + ) + + return super(MSSQLCompiler, self).\ + visit_column(converted, **kw) + + return super(MSSQLCompiler, self).visit_column( + column, add_to_result_map=add_to_result_map, **kw) + + def _schema_aliased_table(self, table): + if getattr(table, 'schema', None) is not None: + if self.dialect._warn_schema_aliasing and \ + table.schema.lower() != 'information_schema': + util.warn( + "legacy_schema_aliasing flag is defaulted to True; " + "some schema-qualified queries may not function " + "correctly. Consider setting this flag to False for " + "modern SQL Server versions; this flag will default to " + "False in version 1.1") + + if table not in self.tablealiases: + self.tablealiases[table] = table.alias() + return self.tablealiases[table] + else: + return None + + def visit_extract(self, extract, **kw): + field = self.extract_map.get(extract.field, extract.field) + return 'DATEPART(%s, %s)' % \ + (field, self.process(extract.expr, **kw)) + + def visit_savepoint(self, savepoint_stmt): + return "SAVE TRANSACTION %s" % \ + self.preparer.format_savepoint(savepoint_stmt) + + def visit_rollback_to_savepoint(self, savepoint_stmt): + return ("ROLLBACK TRANSACTION %s" + % self.preparer.format_savepoint(savepoint_stmt)) + + def visit_binary(self, binary, **kwargs): + """Move bind parameters to the right-hand side of an operator, where + possible. + + """ + if ( + isinstance(binary.left, expression.BindParameter) + and binary.operator == operator.eq + and not isinstance(binary.right, expression.BindParameter) + ): + return self.process( + expression.BinaryExpression(binary.right, + binary.left, + binary.operator), + **kwargs) + return super(MSSQLCompiler, self).visit_binary(binary, **kwargs) + + def returning_clause(self, stmt, returning_cols): + + if self.isinsert or self.isupdate: + target = stmt.table.alias("inserted") + else: + target = stmt.table.alias("deleted") + + adapter = sql_util.ClauseAdapter(target) + + columns = [ + self._label_select_column(None, adapter.traverse(c), + True, False, {}) + for c in expression._select_iterables(returning_cols) + ] + + return 'OUTPUT ' + ', '.join(columns) + + def get_cte_preamble(self, recursive): + # SQL Server finds it too inconvenient to accept + # an entirely optional, SQL standard specified, + # "RECURSIVE" word with their "WITH", + # so here we go + return "WITH" + + def label_select_column(self, select, column, asfrom): + if isinstance(column, expression.Function): + return column.label(None) + else: + return super(MSSQLCompiler, self).\ + label_select_column(select, column, asfrom) + + def for_update_clause(self, select): + # "FOR UPDATE" is only allowed on "DECLARE CURSOR" which + # SQLAlchemy doesn't use + return '' + + def order_by_clause(self, select, **kw): + order_by = self.process(select._order_by_clause, **kw) + + # MSSQL only allows ORDER BY in subqueries if there is a LIMIT + if order_by and (not self.is_subquery() or select._limit): + return " ORDER BY " + order_by + else: + return "" + + def update_from_clause(self, update_stmt, + from_table, extra_froms, + from_hints, + **kw): + """Render the UPDATE..FROM clause specific to MSSQL. + + In MSSQL, if the UPDATE statement involves an alias of the table to + be updated, then the table itself must be added to the FROM list as + well. Otherwise, it is optional. Here, we add it regardless. + + """ + return "FROM " + ', '.join( + t._compiler_dispatch(self, asfrom=True, + fromhints=from_hints, **kw) + for t in [from_table] + extra_froms) + + +class MSSQLStrictCompiler(MSSQLCompiler): + + """A subclass of MSSQLCompiler which disables the usage of bind + parameters where not allowed natively by MS-SQL. + + A dialect may use this compiler on a platform where native + binds are used. + + """ + ansi_bind_rules = True + + def visit_in_op_binary(self, binary, operator, **kw): + kw['literal_binds'] = True + return "%s IN %s" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw) + ) + + def visit_notin_op_binary(self, binary, operator, **kw): + kw['literal_binds'] = True + return "%s NOT IN %s" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw) + ) + + def render_literal_value(self, value, type_): + """ + For date and datetime values, convert to a string + format acceptable to MSSQL. That seems to be the + so-called ODBC canonical date format which looks + like this: + + yyyy-mm-dd hh:mi:ss.mmm(24h) + + For other data types, call the base class implementation. + """ + # datetime and date are both subclasses of datetime.date + if issubclass(type(value), datetime.date): + # SQL Server wants single quotes around the date string. + return "'" + str(value) + "'" + else: + return super(MSSQLStrictCompiler, self).\ + render_literal_value(value, type_) + + +class MSDDLCompiler(compiler.DDLCompiler): + + def get_column_specification(self, column, **kwargs): + colspec = ( + self.preparer.format_column(column) + " " + + self.dialect.type_compiler.process( + column.type, type_expression=column) + ) + + if column.nullable is not None: + if not column.nullable or column.primary_key or \ + isinstance(column.default, sa_schema.Sequence): + colspec += " NOT NULL" + else: + colspec += " NULL" + + if column.table is None: + raise exc.CompileError( + "mssql requires Table-bound columns " + "in order to generate DDL") + + # install an IDENTITY Sequence if we either a sequence or an implicit + # IDENTITY column + if isinstance(column.default, sa_schema.Sequence): + if column.default.start == 0: + start = 0 + else: + start = column.default.start or 1 + + colspec += " IDENTITY(%s,%s)" % (start, + column.default.increment or 1) + elif column is column.table._autoincrement_column: + colspec += " IDENTITY(1,1)" + else: + default = self.get_column_default_string(column) + if default is not None: + colspec += " DEFAULT " + default + + return colspec + + def visit_create_index(self, create, include_schema=False): + index = create.element + self._verify_index_table(index) + preparer = self.preparer + text = "CREATE " + if index.unique: + text += "UNIQUE " + + # handle clustering option + if index.dialect_options['mssql']['clustered']: + text += "CLUSTERED " + + text += "INDEX %s ON %s (%s)" \ + % ( + self._prepared_index_name(index, + include_schema=include_schema), + preparer.format_table(index.table), + ', '.join( + self.sql_compiler.process(expr, + include_table=False, + literal_binds=True) for + expr in index.expressions) + ) + + # handle other included columns + if index.dialect_options['mssql']['include']: + inclusions = [index.table.c[col] + if isinstance(col, util.string_types) else col + for col in + index.dialect_options['mssql']['include'] + ] + + text += " INCLUDE (%s)" \ + % ', '.join([preparer.quote(c.name) + for c in inclusions]) + + return text + + def visit_drop_index(self, drop): + return "\nDROP INDEX %s ON %s" % ( + self._prepared_index_name(drop.element, include_schema=False), + self.preparer.format_table(drop.element.table) + ) + + def visit_primary_key_constraint(self, constraint): + if len(constraint) == 0: + return '' + text = "" + if constraint.name is not None: + text += "CONSTRAINT %s " % \ + self.preparer.format_constraint(constraint) + text += "PRIMARY KEY " + + if constraint.dialect_options['mssql']['clustered']: + text += "CLUSTERED " + + text += "(%s)" % ', '.join(self.preparer.quote(c.name) + for c in constraint) + text += self.define_constraint_deferrability(constraint) + return text + + def visit_unique_constraint(self, constraint): + if len(constraint) == 0: + return '' + text = "" + if constraint.name is not None: + text += "CONSTRAINT %s " % \ + self.preparer.format_constraint(constraint) + text += "UNIQUE " + + if constraint.dialect_options['mssql']['clustered']: + text += "CLUSTERED " + + text += "(%s)" % ', '.join(self.preparer.quote(c.name) + for c in constraint) + text += self.define_constraint_deferrability(constraint) + return text + + +class MSIdentifierPreparer(compiler.IdentifierPreparer): + reserved_words = RESERVED_WORDS + + def __init__(self, dialect): + super(MSIdentifierPreparer, self).__init__(dialect, initial_quote='[', + final_quote=']') + + def _escape_identifier(self, value): + return value + + def quote_schema(self, schema, force=None): + """Prepare a quoted table and schema name.""" + result = '.'.join([self.quote(x, force) for x in schema.split('.')]) + return result + + +def _db_plus_owner_listing(fn): + def wrap(dialect, connection, schema=None, **kw): + dbname, owner = _owner_plus_db(dialect, schema) + return _switch_db(dbname, connection, fn, dialect, connection, + dbname, owner, schema, **kw) + return update_wrapper(wrap, fn) + + +def _db_plus_owner(fn): + def wrap(dialect, connection, tablename, schema=None, **kw): + dbname, owner = _owner_plus_db(dialect, schema) + return _switch_db(dbname, connection, fn, dialect, connection, + tablename, dbname, owner, schema, **kw) + return update_wrapper(wrap, fn) + + +def _switch_db(dbname, connection, fn, *arg, **kw): + if dbname: + current_db = connection.scalar("select db_name()") + connection.execute("use %s" % dbname) + try: + return fn(*arg, **kw) + finally: + if dbname: + connection.execute("use %s" % current_db) + + +def _owner_plus_db(dialect, schema): + if not schema: + return None, dialect.default_schema_name + elif "." in schema: + return schema.split(".", 1) + else: + return None, schema + + +class MSDialect(default.DefaultDialect): + name = 'mssql' + supports_default_values = True + supports_empty_insert = False + execution_ctx_cls = MSExecutionContext + use_scope_identity = True + max_identifier_length = 128 + schema_name = "dbo" + + colspecs = { + sqltypes.DateTime: _MSDateTime, + sqltypes.Date: _MSDate, + sqltypes.Time: TIME, + } + + engine_config_types = default.DefaultDialect.engine_config_types.union([ + ('legacy_schema_aliasing', util.asbool), + ]) + + ischema_names = ischema_names + + supports_native_boolean = False + supports_unicode_binds = True + postfetch_lastrowid = True + + server_version_info = () + + statement_compiler = MSSQLCompiler + ddl_compiler = MSDDLCompiler + type_compiler = MSTypeCompiler + preparer = MSIdentifierPreparer + + construct_arguments = [ + (sa_schema.PrimaryKeyConstraint, { + "clustered": False + }), + (sa_schema.UniqueConstraint, { + "clustered": False + }), + (sa_schema.Index, { + "clustered": False, + "include": None + }) + ] + + def __init__(self, + query_timeout=None, + use_scope_identity=True, + max_identifier_length=None, + schema_name="dbo", + deprecate_large_types=None, + legacy_schema_aliasing=None, **opts): + self.query_timeout = int(query_timeout or 0) + self.schema_name = schema_name + + self.use_scope_identity = use_scope_identity + self.max_identifier_length = int(max_identifier_length or 0) or \ + self.max_identifier_length + self.deprecate_large_types = deprecate_large_types + + if legacy_schema_aliasing is None: + self.legacy_schema_aliasing = True + self._warn_schema_aliasing = True + else: + self.legacy_schema_aliasing = legacy_schema_aliasing + self._warn_schema_aliasing = False + + super(MSDialect, self).__init__(**opts) + + def do_savepoint(self, connection, name): + # give the DBAPI a push + connection.execute("IF @@TRANCOUNT = 0 BEGIN TRANSACTION") + super(MSDialect, self).do_savepoint(connection, name) + + def do_release_savepoint(self, connection, name): + # SQL Server does not support RELEASE SAVEPOINT + pass + + def initialize(self, connection): + super(MSDialect, self).initialize(connection) + self._setup_version_attributes() + + def _setup_version_attributes(self): + if self.server_version_info[0] not in list(range(8, 17)): + # FreeTDS with version 4.2 seems to report here + # a number like "95.10.255". Don't know what + # that is. So emit warning. + # Use TDS Version 7.0 through 7.3, per the MS information here: + # https://msdn.microsoft.com/en-us/library/dd339982.aspx + # and FreeTDS information here (7.3 highest supported version): + # http://www.freetds.org/userguide/choosingtdsprotocol.htm + util.warn( + "Unrecognized server version info '%s'. Version specific " + "behaviors may not function properly. If using ODBC " + "with FreeTDS, ensure TDS_VERSION 7.0 through 7.3, not " + "4.2, is configured in the FreeTDS configuration." % + ".".join(str(x) for x in self.server_version_info)) + if self.server_version_info >= MS_2005_VERSION and \ + 'implicit_returning' not in self.__dict__: + self.implicit_returning = True + if self.server_version_info >= MS_2008_VERSION: + self.supports_multivalues_insert = True + if self.deprecate_large_types is None: + self.deprecate_large_types = \ + self.server_version_info >= MS_2012_VERSION + + def _get_default_schema_name(self, connection): + if self.server_version_info < MS_2005_VERSION: + return self.schema_name + + query = sql.text(""" + SELECT default_schema_name FROM + sys.database_principals + WHERE principal_id=database_principal_id() + """) + default_schema_name = connection.scalar(query) + if default_schema_name is not None: + return util.text_type(default_schema_name) + else: + return self.schema_name + + @_db_plus_owner + def has_table(self, connection, tablename, dbname, owner, schema): + columns = ischema.columns + + whereclause = columns.c.table_name == tablename + + if owner: + whereclause = sql.and_(whereclause, + columns.c.table_schema == owner) + s = sql.select([columns], whereclause) + c = connection.execute(s) + return c.first() is not None + + @reflection.cache + def get_schema_names(self, connection, **kw): + s = sql.select([ischema.schemata.c.schema_name], + order_by=[ischema.schemata.c.schema_name] + ) + schema_names = [r[0] for r in connection.execute(s)] + return schema_names + + @reflection.cache + @_db_plus_owner_listing + def get_table_names(self, connection, dbname, owner, schema, **kw): + tables = ischema.tables + s = sql.select([tables.c.table_name], + sql.and_( + tables.c.table_schema == owner, + tables.c.table_type == 'BASE TABLE' + ), + order_by=[tables.c.table_name] + ) + table_names = [r[0] for r in connection.execute(s)] + return table_names + + @reflection.cache + @_db_plus_owner_listing + def get_view_names(self, connection, dbname, owner, schema, **kw): + tables = ischema.tables + s = sql.select([tables.c.table_name], + sql.and_( + tables.c.table_schema == owner, + tables.c.table_type == 'VIEW' + ), + order_by=[tables.c.table_name] + ) + view_names = [r[0] for r in connection.execute(s)] + return view_names + + @reflection.cache + @_db_plus_owner + def get_indexes(self, connection, tablename, dbname, owner, schema, **kw): + # using system catalogs, don't support index reflection + # below MS 2005 + if self.server_version_info < MS_2005_VERSION: + return [] + + rp = connection.execute( + sql.text("select ind.index_id, ind.is_unique, ind.name " + "from sys.indexes as ind join sys.tables as tab on " + "ind.object_id=tab.object_id " + "join sys.schemas as sch on sch.schema_id=tab.schema_id " + "where tab.name = :tabname " + "and sch.name=:schname " + "and ind.is_primary_key=0", + bindparams=[ + sql.bindparam('tabname', tablename, + sqltypes.String(convert_unicode=True)), + sql.bindparam('schname', owner, + sqltypes.String(convert_unicode=True)) + ], + typemap={ + 'name': sqltypes.Unicode() + } + ) + ) + indexes = {} + for row in rp: + indexes[row['index_id']] = { + 'name': row['name'], + 'unique': row['is_unique'] == 1, + 'column_names': [] + } + rp = connection.execute( + sql.text( + "select ind_col.index_id, ind_col.object_id, col.name " + "from sys.columns as col " + "join sys.tables as tab on tab.object_id=col.object_id " + "join sys.index_columns as ind_col on " + "(ind_col.column_id=col.column_id and " + "ind_col.object_id=tab.object_id) " + "join sys.schemas as sch on sch.schema_id=tab.schema_id " + "where tab.name=:tabname " + "and sch.name=:schname", + bindparams=[ + sql.bindparam('tabname', tablename, + sqltypes.String(convert_unicode=True)), + sql.bindparam('schname', owner, + sqltypes.String(convert_unicode=True)) + ], + typemap={'name': sqltypes.Unicode()} + ), + ) + for row in rp: + if row['index_id'] in indexes: + indexes[row['index_id']]['column_names'].append(row['name']) + + return list(indexes.values()) + + @reflection.cache + @_db_plus_owner + def get_view_definition(self, connection, viewname, + dbname, owner, schema, **kw): + rp = connection.execute( + sql.text( + "select definition from sys.sql_modules as mod, " + "sys.views as views, " + "sys.schemas as sch" + " where " + "mod.object_id=views.object_id and " + "views.schema_id=sch.schema_id and " + "views.name=:viewname and sch.name=:schname", + bindparams=[ + sql.bindparam('viewname', viewname, + sqltypes.String(convert_unicode=True)), + sql.bindparam('schname', owner, + sqltypes.String(convert_unicode=True)) + ] + ) + ) + + if rp: + view_def = rp.scalar() + return view_def + + @reflection.cache + @_db_plus_owner + def get_columns(self, connection, tablename, dbname, owner, schema, **kw): + # Get base columns + columns = ischema.columns + if owner: + whereclause = sql.and_(columns.c.table_name == tablename, + columns.c.table_schema == owner) + else: + whereclause = columns.c.table_name == tablename + s = sql.select([columns], whereclause, + order_by=[columns.c.ordinal_position]) + + c = connection.execute(s) + cols = [] + while True: + row = c.fetchone() + if row is None: + break + (name, type, nullable, charlen, + numericprec, numericscale, default, collation) = ( + row[columns.c.column_name], + row[columns.c.data_type], + row[columns.c.is_nullable] == 'YES', + row[columns.c.character_maximum_length], + row[columns.c.numeric_precision], + row[columns.c.numeric_scale], + row[columns.c.column_default], + row[columns.c.collation_name] + ) + coltype = self.ischema_names.get(type, None) + + kwargs = {} + if coltype in (MSString, MSChar, MSNVarchar, MSNChar, MSText, + MSNText, MSBinary, MSVarBinary, + sqltypes.LargeBinary): + if charlen == -1: + charlen = 'max' + kwargs['length'] = charlen + if collation: + kwargs['collation'] = collation + + if coltype is None: + util.warn( + "Did not recognize type '%s' of column '%s'" % + (type, name)) + coltype = sqltypes.NULLTYPE + else: + if issubclass(coltype, sqltypes.Numeric) and \ + coltype is not MSReal: + kwargs['scale'] = numericscale + kwargs['precision'] = numericprec + + coltype = coltype(**kwargs) + cdict = { + 'name': name, + 'type': coltype, + 'nullable': nullable, + 'default': default, + 'autoincrement': False, + } + cols.append(cdict) + # autoincrement and identity + colmap = {} + for col in cols: + colmap[col['name']] = col + # We also run an sp_columns to check for identity columns: + cursor = connection.execute("sp_columns @table_name = '%s', " + "@table_owner = '%s'" + % (tablename, owner)) + ic = None + while True: + row = cursor.fetchone() + if row is None: + break + (col_name, type_name) = row[3], row[5] + if type_name.endswith("identity") and col_name in colmap: + ic = col_name + colmap[col_name]['autoincrement'] = True + colmap[col_name]['sequence'] = dict( + name='%s_identity' % col_name) + break + cursor.close() + + if ic is not None and self.server_version_info >= MS_2005_VERSION: + table_fullname = "%s.%s" % (owner, tablename) + cursor = connection.execute( + "select ident_seed('%s'), ident_incr('%s')" + % (table_fullname, table_fullname) + ) + + row = cursor.first() + if row is not None and row[0] is not None: + colmap[ic]['sequence'].update({ + 'start': int(row[0]), + 'increment': int(row[1]) + }) + return cols + + @reflection.cache + @_db_plus_owner + def get_pk_constraint(self, connection, tablename, + dbname, owner, schema, **kw): + pkeys = [] + TC = ischema.constraints + C = ischema.key_constraints.alias('C') + + # Primary key constraints + s = sql.select([C.c.column_name, + TC.c.constraint_type, + C.c.constraint_name], + sql.and_(TC.c.constraint_name == C.c.constraint_name, + TC.c.table_schema == C.c.table_schema, + C.c.table_name == tablename, + C.c.table_schema == owner) + ) + c = connection.execute(s) + constraint_name = None + for row in c: + if 'PRIMARY' in row[TC.c.constraint_type.name]: + pkeys.append(row[0]) + if constraint_name is None: + constraint_name = row[C.c.constraint_name.name] + return {'constrained_columns': pkeys, 'name': constraint_name} + + @reflection.cache + @_db_plus_owner + def get_foreign_keys(self, connection, tablename, + dbname, owner, schema, **kw): + RR = ischema.ref_constraints + C = ischema.key_constraints.alias('C') + R = ischema.key_constraints.alias('R') + + # Foreign key constraints + s = sql.select([C.c.column_name, + R.c.table_schema, R.c.table_name, R.c.column_name, + RR.c.constraint_name, RR.c.match_option, + RR.c.update_rule, + RR.c.delete_rule], + sql.and_(C.c.table_name == tablename, + C.c.table_schema == owner, + C.c.constraint_name == RR.c.constraint_name, + R.c.constraint_name == + RR.c.unique_constraint_name, + C.c.ordinal_position == R.c.ordinal_position + ), + order_by=[RR.c.constraint_name, R.c.ordinal_position] + ) + + # group rows by constraint ID, to handle multi-column FKs + fkeys = [] + fknm, scols, rcols = (None, [], []) + + def fkey_rec(): + return { + 'name': None, + 'constrained_columns': [], + 'referred_schema': None, + 'referred_table': None, + 'referred_columns': [] + } + + fkeys = util.defaultdict(fkey_rec) + + for r in connection.execute(s).fetchall(): + scol, rschema, rtbl, rcol, rfknm, fkmatch, fkuprule, fkdelrule = r + + rec = fkeys[rfknm] + rec['name'] = rfknm + if not rec['referred_table']: + rec['referred_table'] = rtbl + if schema is not None or owner != rschema: + if dbname: + rschema = dbname + "." + rschema + rec['referred_schema'] = rschema + + local_cols, remote_cols = \ + rec['constrained_columns'],\ + rec['referred_columns'] + + local_cols.append(scol) + remote_cols.append(rcol) + + return list(fkeys.values()) diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/information_schema.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/information_schema.py new file mode 100644 index 0000000..e2c0a46 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/information_schema.py @@ -0,0 +1,136 @@ +# mssql/information_schema.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +# TODO: should be using the sys. catalog with SQL Server, not information +# schema + +from ... import Table, MetaData, Column +from ...types import String, Unicode, UnicodeText, Integer, TypeDecorator +from ... import cast +from ... import util +from ...sql import expression +from ...ext.compiler import compiles + +ischema = MetaData() + + +class CoerceUnicode(TypeDecorator): + impl = Unicode + + def process_bind_param(self, value, dialect): + if util.py2k and isinstance(value, util.binary_type): + value = value.decode(dialect.encoding) + return value + + def bind_expression(self, bindvalue): + return _cast_on_2005(bindvalue) + + +class _cast_on_2005(expression.ColumnElement): + def __init__(self, bindvalue): + self.bindvalue = bindvalue + + +@compiles(_cast_on_2005) +def _compile(element, compiler, **kw): + from . import base + if compiler.dialect.server_version_info < base.MS_2005_VERSION: + return compiler.process(element.bindvalue, **kw) + else: + return compiler.process(cast(element.bindvalue, Unicode), **kw) + +schemata = Table("SCHEMATA", ischema, + Column("CATALOG_NAME", CoerceUnicode, key="catalog_name"), + Column("SCHEMA_NAME", CoerceUnicode, key="schema_name"), + Column("SCHEMA_OWNER", CoerceUnicode, key="schema_owner"), + schema="INFORMATION_SCHEMA") + +tables = Table("TABLES", ischema, + Column("TABLE_CATALOG", CoerceUnicode, key="table_catalog"), + Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"), + Column("TABLE_NAME", CoerceUnicode, key="table_name"), + Column( + "TABLE_TYPE", String(convert_unicode=True), + key="table_type"), + schema="INFORMATION_SCHEMA") + +columns = Table("COLUMNS", ischema, + Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"), + Column("TABLE_NAME", CoerceUnicode, key="table_name"), + Column("COLUMN_NAME", CoerceUnicode, key="column_name"), + Column("IS_NULLABLE", Integer, key="is_nullable"), + Column("DATA_TYPE", String, key="data_type"), + Column("ORDINAL_POSITION", Integer, key="ordinal_position"), + Column("CHARACTER_MAXIMUM_LENGTH", Integer, + key="character_maximum_length"), + Column("NUMERIC_PRECISION", Integer, key="numeric_precision"), + Column("NUMERIC_SCALE", Integer, key="numeric_scale"), + Column("COLUMN_DEFAULT", Integer, key="column_default"), + Column("COLLATION_NAME", String, key="collation_name"), + schema="INFORMATION_SCHEMA") + +constraints = Table("TABLE_CONSTRAINTS", ischema, + Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"), + Column("TABLE_NAME", CoerceUnicode, key="table_name"), + Column("CONSTRAINT_NAME", CoerceUnicode, + key="constraint_name"), + Column("CONSTRAINT_TYPE", String( + convert_unicode=True), key="constraint_type"), + schema="INFORMATION_SCHEMA") + +column_constraints = Table("CONSTRAINT_COLUMN_USAGE", ischema, + Column("TABLE_SCHEMA", CoerceUnicode, + key="table_schema"), + Column("TABLE_NAME", CoerceUnicode, + key="table_name"), + Column("COLUMN_NAME", CoerceUnicode, + key="column_name"), + Column("CONSTRAINT_NAME", CoerceUnicode, + key="constraint_name"), + schema="INFORMATION_SCHEMA") + +key_constraints = Table("KEY_COLUMN_USAGE", ischema, + Column("TABLE_SCHEMA", CoerceUnicode, + key="table_schema"), + Column("TABLE_NAME", CoerceUnicode, + key="table_name"), + Column("COLUMN_NAME", CoerceUnicode, + key="column_name"), + Column("CONSTRAINT_NAME", CoerceUnicode, + key="constraint_name"), + Column("ORDINAL_POSITION", Integer, + key="ordinal_position"), + schema="INFORMATION_SCHEMA") + +ref_constraints = Table("REFERENTIAL_CONSTRAINTS", ischema, + Column("CONSTRAINT_CATALOG", CoerceUnicode, + key="constraint_catalog"), + Column("CONSTRAINT_SCHEMA", CoerceUnicode, + key="constraint_schema"), + Column("CONSTRAINT_NAME", CoerceUnicode, + key="constraint_name"), + # TODO: is CATLOG misspelled ? + Column("UNIQUE_CONSTRAINT_CATLOG", CoerceUnicode, + key="unique_constraint_catalog"), + + Column("UNIQUE_CONSTRAINT_SCHEMA", CoerceUnicode, + key="unique_constraint_schema"), + Column("UNIQUE_CONSTRAINT_NAME", CoerceUnicode, + key="unique_constraint_name"), + Column("MATCH_OPTION", String, key="match_option"), + Column("UPDATE_RULE", String, key="update_rule"), + Column("DELETE_RULE", String, key="delete_rule"), + schema="INFORMATION_SCHEMA") + +views = Table("VIEWS", ischema, + Column("TABLE_CATALOG", CoerceUnicode, key="table_catalog"), + Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"), + Column("TABLE_NAME", CoerceUnicode, key="table_name"), + Column("VIEW_DEFINITION", CoerceUnicode, key="view_definition"), + Column("CHECK_OPTION", String, key="check_option"), + Column("IS_UPDATABLE", String, key="is_updatable"), + schema="INFORMATION_SCHEMA") diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/mxodbc.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/mxodbc.py new file mode 100644 index 0000000..5e20ed1 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/mxodbc.py @@ -0,0 +1,112 @@ +# mssql/mxodbc.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +.. dialect:: mssql+mxodbc + :name: mxODBC + :dbapi: mxodbc + :connectstring: mssql+mxodbc://:@ + :url: http://www.egenix.com/ + +Execution Modes +--------------- + +mxODBC features two styles of statement execution, using the +``cursor.execute()`` and ``cursor.executedirect()`` methods (the second being +an extension to the DBAPI specification). The former makes use of a particular +API call specific to the SQL Server Native Client ODBC driver known +SQLDescribeParam, while the latter does not. + +mxODBC apparently only makes repeated use of a single prepared statement +when SQLDescribeParam is used. The advantage to prepared statement reuse is +one of performance. The disadvantage is that SQLDescribeParam has a limited +set of scenarios in which bind parameters are understood, including that they +cannot be placed within the argument lists of function calls, anywhere outside +the FROM, or even within subqueries within the FROM clause - making the usage +of bind parameters within SELECT statements impossible for all but the most +simplistic statements. + +For this reason, the mxODBC dialect uses the "native" mode by default only for +INSERT, UPDATE, and DELETE statements, and uses the escaped string mode for +all other statements. + +This behavior can be controlled via +:meth:`~sqlalchemy.sql.expression.Executable.execution_options` using the +``native_odbc_execute`` flag with a value of ``True`` or ``False``, where a +value of ``True`` will unconditionally use native bind parameters and a value +of ``False`` will unconditionally use string-escaped parameters. + +""" + + +from ... import types as sqltypes +from ...connectors.mxodbc import MxODBCConnector +from .pyodbc import MSExecutionContext_pyodbc, _MSNumeric_pyodbc +from .base import (MSDialect, + MSSQLStrictCompiler, + _MSDateTime, _MSDate, _MSTime) + + +class _MSNumeric_mxodbc(_MSNumeric_pyodbc): + """Include pyodbc's numeric processor. + """ + + +class _MSDate_mxodbc(_MSDate): + def bind_processor(self, dialect): + def process(value): + if value is not None: + return "%s-%s-%s" % (value.year, value.month, value.day) + else: + return None + return process + + +class _MSTime_mxodbc(_MSTime): + def bind_processor(self, dialect): + def process(value): + if value is not None: + return "%s:%s:%s" % (value.hour, value.minute, value.second) + else: + return None + return process + + +class MSExecutionContext_mxodbc(MSExecutionContext_pyodbc): + """ + The pyodbc execution context is useful for enabling + SELECT SCOPE_IDENTITY in cases where OUTPUT clause + does not work (tables with insert triggers). + """ + # todo - investigate whether the pyodbc execution context + # is really only being used in cases where OUTPUT + # won't work. + + +class MSDialect_mxodbc(MxODBCConnector, MSDialect): + + # this is only needed if "native ODBC" mode is used, + # which is now disabled by default. + # statement_compiler = MSSQLStrictCompiler + + execution_ctx_cls = MSExecutionContext_mxodbc + + # flag used by _MSNumeric_mxodbc + _need_decimal_fix = True + + colspecs = { + sqltypes.Numeric: _MSNumeric_mxodbc, + sqltypes.DateTime: _MSDateTime, + sqltypes.Date: _MSDate_mxodbc, + sqltypes.Time: _MSTime_mxodbc, + } + + def __init__(self, description_encoding=None, **params): + super(MSDialect_mxodbc, self).__init__(**params) + self.description_encoding = description_encoding + +dialect = MSDialect_mxodbc diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/pymssql.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/pymssql.py new file mode 100644 index 0000000..e3a4db8 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/pymssql.py @@ -0,0 +1,96 @@ +# mssql/pymssql.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +.. dialect:: mssql+pymssql + :name: pymssql + :dbapi: pymssql + :connectstring: mssql+pymssql://:@?\ +charset=utf8 + :url: http://pymssql.org/ + +pymssql is a Python module that provides a Python DBAPI interface around +`FreeTDS `_. Compatible builds are available for +Linux, MacOSX and Windows platforms. + +""" +from .base import MSDialect +from ... import types as sqltypes, util, processors +import re + + +class _MSNumeric_pymssql(sqltypes.Numeric): + def result_processor(self, dialect, type_): + if not self.asdecimal: + return processors.to_float + else: + return sqltypes.Numeric.result_processor(self, dialect, type_) + + +class MSDialect_pymssql(MSDialect): + supports_sane_rowcount = False + driver = 'pymssql' + + colspecs = util.update_copy( + MSDialect.colspecs, + { + sqltypes.Numeric: _MSNumeric_pymssql, + sqltypes.Float: sqltypes.Float, + } + ) + + @classmethod + def dbapi(cls): + module = __import__('pymssql') + # pymmsql < 2.1.1 doesn't have a Binary method. we use string + client_ver = tuple(int(x) for x in module.__version__.split(".")) + if client_ver < (2, 1, 1): + # TODO: monkeypatching here is less than ideal + module.Binary = lambda x: x if hasattr(x, 'decode') else str(x) + + if client_ver < (1, ): + util.warn("The pymssql dialect expects at least " + "the 1.0 series of the pymssql DBAPI.") + return module + + def __init__(self, **params): + super(MSDialect_pymssql, self).__init__(**params) + self.use_scope_identity = True + + def _get_server_version_info(self, connection): + vers = connection.scalar("select @@version") + m = re.match( + r"Microsoft .*? - (\d+).(\d+).(\d+).(\d+)", vers) + if m: + return tuple(int(x) for x in m.group(1, 2, 3, 4)) + else: + return None + + def create_connect_args(self, url): + opts = url.translate_connect_args(username='user') + opts.update(url.query) + port = opts.pop('port', None) + if port and 'host' in opts: + opts['host'] = "%s:%s" % (opts['host'], port) + return [[], opts] + + def is_disconnect(self, e, connection, cursor): + for msg in ( + "Adaptive Server connection timed out", + "Net-Lib error during Connection reset by peer", + "message 20003", # connection timeout + "Error 10054", + "Not connected to any MS SQL server", + "Connection is closed", + "message 20006", # Write to the server failed + ): + if msg in str(e): + return True + else: + return False + +dialect = MSDialect_pymssql diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/pyodbc.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/pyodbc.py new file mode 100644 index 0000000..c938368 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/pyodbc.py @@ -0,0 +1,265 @@ +# mssql/pyodbc.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +.. dialect:: mssql+pyodbc + :name: PyODBC + :dbapi: pyodbc + :connectstring: mssql+pyodbc://:@ + :url: http://pypi.python.org/pypi/pyodbc/ + +Connecting to PyODBC +-------------------- + +The URL here is to be translated to PyODBC connection strings, as +detailed in `ConnectionStrings `_. + +DSN Connections +^^^^^^^^^^^^^^^ + +A DSN-based connection is **preferred** overall when using ODBC. A +basic DSN-based connection looks like:: + + engine = create_engine("mssql+pyodbc://scott:tiger@some_dsn") + +Which above, will pass the following connection string to PyODBC:: + + dsn=mydsn;UID=user;PWD=pass + +If the username and password are omitted, the DSN form will also add +the ``Trusted_Connection=yes`` directive to the ODBC string. + +Hostname Connections +^^^^^^^^^^^^^^^^^^^^ + +Hostname-based connections are **not preferred**, however are supported. +The ODBC driver name must be explicitly specified:: + + engine = create_engine("mssql+pyodbc://scott:tiger@myhost:port/databasename?driver=SQL+Server+Native+Client+10.0") + +.. versionchanged:: 1.0.0 Hostname-based PyODBC connections now require the + SQL Server driver name specified explicitly. SQLAlchemy cannot + choose an optimal default here as it varies based on platform + and installed drivers. + +Other keywords interpreted by the Pyodbc dialect to be passed to +``pyodbc.connect()`` in both the DSN and hostname cases include: +``odbc_autotranslate``, ``ansi``, ``unicode_results``, ``autocommit``. + +Pass through exact Pyodbc string +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +A PyODBC connection string can also be sent exactly as specified in +`ConnectionStrings `_ +into the driver using the parameter ``odbc_connect``. The delimeters must be URL escaped, however, +as illustrated below using ``urllib.quote_plus``:: + + import urllib + params = urllib.quote_plus("DRIVER={SQL Server Native Client 10.0};SERVER=dagger;DATABASE=test;UID=user;PWD=password") + + engine = create_engine("mssql+pyodbc:///?odbc_connect=%s" % params) + + +Unicode Binds +------------- + +The current state of PyODBC on a unix backend with FreeTDS and/or +EasySoft is poor regarding unicode; different OS platforms and versions of +UnixODBC versus IODBC versus FreeTDS/EasySoft versus PyODBC itself +dramatically alter how strings are received. The PyODBC dialect attempts to +use all the information it knows to determine whether or not a Python unicode +literal can be passed directly to the PyODBC driver or not; while SQLAlchemy +can encode these to bytestrings first, some users have reported that PyODBC +mis-handles bytestrings for certain encodings and requires a Python unicode +object, while the author has observed widespread cases where a Python unicode +is completely misinterpreted by PyODBC, particularly when dealing with +the information schema tables used in table reflection, and the value +must first be encoded to a bytestring. + +It is for this reason that whether or not unicode literals for bound +parameters be sent to PyODBC can be controlled using the +``supports_unicode_binds`` parameter to ``create_engine()``. When +left at its default of ``None``, the PyODBC dialect will use its +best guess as to whether or not the driver deals with unicode literals +well. When ``False``, unicode literals will be encoded first, and when +``True`` unicode literals will be passed straight through. This is an interim +flag that hopefully should not be needed when the unicode situation stabilizes +for unix + PyODBC. + +.. versionadded:: 0.7.7 + ``supports_unicode_binds`` parameter to ``create_engine()``\ . + +""" + +from .base import MSExecutionContext, MSDialect, VARBINARY +from ...connectors.pyodbc import PyODBCConnector +from ... import types as sqltypes, util +import decimal + + +class _ms_numeric_pyodbc(object): + + """Turns Decimals with adjusted() < 0 or > 7 into strings. + + The routines here are needed for older pyodbc versions + as well as current mxODBC versions. + + """ + + def bind_processor(self, dialect): + + super_process = super(_ms_numeric_pyodbc, self).\ + bind_processor(dialect) + + if not dialect._need_decimal_fix: + return super_process + + def process(value): + if self.asdecimal and \ + isinstance(value, decimal.Decimal): + + adjusted = value.adjusted() + if adjusted < 0: + return self._small_dec_to_string(value) + elif adjusted > 7: + return self._large_dec_to_string(value) + + if super_process: + return super_process(value) + else: + return value + return process + + # these routines needed for older versions of pyodbc. + # as of 2.1.8 this logic is integrated. + + def _small_dec_to_string(self, value): + return "%s0.%s%s" % ( + (value < 0 and '-' or ''), + '0' * (abs(value.adjusted()) - 1), + "".join([str(nint) for nint in value.as_tuple()[1]])) + + def _large_dec_to_string(self, value): + _int = value.as_tuple()[1] + if 'E' in str(value): + result = "%s%s%s" % ( + (value < 0 and '-' or ''), + "".join([str(s) for s in _int]), + "0" * (value.adjusted() - (len(_int) - 1))) + else: + if (len(_int) - 1) > value.adjusted(): + result = "%s%s.%s" % ( + (value < 0 and '-' or ''), + "".join( + [str(s) for s in _int][0:value.adjusted() + 1]), + "".join( + [str(s) for s in _int][value.adjusted() + 1:])) + else: + result = "%s%s" % ( + (value < 0 and '-' or ''), + "".join( + [str(s) for s in _int][0:value.adjusted() + 1])) + return result + + +class _MSNumeric_pyodbc(_ms_numeric_pyodbc, sqltypes.Numeric): + pass + + +class _MSFloat_pyodbc(_ms_numeric_pyodbc, sqltypes.Float): + pass + + +class _VARBINARY_pyodbc(VARBINARY): + def bind_processor(self, dialect): + if dialect.dbapi is None: + return None + + DBAPIBinary = dialect.dbapi.Binary + + def process(value): + if value is not None: + return DBAPIBinary(value) + else: + # pyodbc-specific + return dialect.dbapi.BinaryNull + return process + + +class MSExecutionContext_pyodbc(MSExecutionContext): + _embedded_scope_identity = False + + def pre_exec(self): + """where appropriate, issue "select scope_identity()" in the same + statement. + + Background on why "scope_identity()" is preferable to "@@identity": + http://msdn.microsoft.com/en-us/library/ms190315.aspx + + Background on why we attempt to embed "scope_identity()" into the same + statement as the INSERT: + http://code.google.com/p/pyodbc/wiki/FAQs#How_do_I_retrieve_autogenerated/identity_values? + + """ + + super(MSExecutionContext_pyodbc, self).pre_exec() + + # don't embed the scope_identity select into an + # "INSERT .. DEFAULT VALUES" + if self._select_lastrowid and \ + self.dialect.use_scope_identity and \ + len(self.parameters[0]): + self._embedded_scope_identity = True + + self.statement += "; select scope_identity()" + + def post_exec(self): + if self._embedded_scope_identity: + # Fetch the last inserted id from the manipulated statement + # We may have to skip over a number of result sets with + # no data (due to triggers, etc.) + while True: + try: + # fetchall() ensures the cursor is consumed + # without closing it (FreeTDS particularly) + row = self.cursor.fetchall()[0] + break + except self.dialect.dbapi.Error as e: + # no way around this - nextset() consumes the previous set + # so we need to just keep flipping + self.cursor.nextset() + + self._lastrowid = int(row[0]) + else: + super(MSExecutionContext_pyodbc, self).post_exec() + + +class MSDialect_pyodbc(PyODBCConnector, MSDialect): + + execution_ctx_cls = MSExecutionContext_pyodbc + + colspecs = util.update_copy( + MSDialect.colspecs, + { + sqltypes.Numeric: _MSNumeric_pyodbc, + sqltypes.Float: _MSFloat_pyodbc, + VARBINARY: _VARBINARY_pyodbc, + sqltypes.LargeBinary: _VARBINARY_pyodbc, + } + ) + + def __init__(self, description_encoding=None, **params): + if 'description_encoding' in params: + self.description_encoding = params.pop('description_encoding') + super(MSDialect_pyodbc, self).__init__(**params) + self.use_scope_identity = self.use_scope_identity and \ + self.dbapi and \ + hasattr(self.dbapi.Cursor, 'nextset') + self._need_decimal_fix = self.dbapi and \ + self._dbapi_version() < (2, 1, 8) + +dialect = MSDialect_pyodbc diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/zxjdbc.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/zxjdbc.py new file mode 100644 index 0000000..0bf68c2 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/zxjdbc.py @@ -0,0 +1,69 @@ +# mssql/zxjdbc.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +.. dialect:: mssql+zxjdbc + :name: zxJDBC for Jython + :dbapi: zxjdbc + :connectstring: mssql+zxjdbc://user:pass@host:port/dbname\ +[?key=value&key=value...] + :driverurl: http://jtds.sourceforge.net/ + + .. note:: Jython is not supported by current versions of SQLAlchemy. The + zxjdbc dialect should be considered as experimental. + +""" +from ...connectors.zxJDBC import ZxJDBCConnector +from .base import MSDialect, MSExecutionContext +from ... import engine + + +class MSExecutionContext_zxjdbc(MSExecutionContext): + + _embedded_scope_identity = False + + def pre_exec(self): + super(MSExecutionContext_zxjdbc, self).pre_exec() + # scope_identity after the fact returns null in jTDS so we must + # embed it + if self._select_lastrowid and self.dialect.use_scope_identity: + self._embedded_scope_identity = True + self.statement += "; SELECT scope_identity()" + + def post_exec(self): + if self._embedded_scope_identity: + while True: + try: + row = self.cursor.fetchall()[0] + break + except self.dialect.dbapi.Error: + self.cursor.nextset() + self._lastrowid = int(row[0]) + + if (self.isinsert or self.isupdate or self.isdelete) and \ + self.compiled.returning: + self._result_proxy = engine.FullyBufferedResultProxy(self) + + if self._enable_identity_insert: + table = self.dialect.identifier_preparer.format_table( + self.compiled.statement.table) + self.cursor.execute("SET IDENTITY_INSERT %s OFF" % table) + + +class MSDialect_zxjdbc(ZxJDBCConnector, MSDialect): + jdbc_db_name = 'jtds:sqlserver' + jdbc_driver_name = 'net.sourceforge.jtds.jdbc.Driver' + + execution_ctx_cls = MSExecutionContext_zxjdbc + + def _get_server_version_info(self, connection): + return tuple( + int(x) + for x in connection.connection.dbversion.split('.') + ) + +dialect = MSDialect_zxjdbc diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/__init__.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/__init__.py new file mode 100644 index 0000000..fabd932 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/__init__.py @@ -0,0 +1,31 @@ +# mysql/__init__.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from . import base, mysqldb, oursql, \ + pyodbc, zxjdbc, mysqlconnector, pymysql,\ + gaerdbms, cymysql + +# default dialect +base.dialect = mysqldb.dialect + +from .base import \ + BIGINT, BINARY, BIT, BLOB, BOOLEAN, CHAR, DATE, DATETIME, \ + DECIMAL, DOUBLE, ENUM, DECIMAL,\ + FLOAT, INTEGER, INTEGER, LONGBLOB, LONGTEXT, MEDIUMBLOB, \ + MEDIUMINT, MEDIUMTEXT, NCHAR, \ + NVARCHAR, NUMERIC, SET, SMALLINT, REAL, TEXT, TIME, TIMESTAMP, \ + TINYBLOB, TINYINT, TINYTEXT,\ + VARBINARY, VARCHAR, YEAR, dialect + +__all__ = ( + 'BIGINT', 'BINARY', 'BIT', 'BLOB', 'BOOLEAN', 'CHAR', 'DATE', 'DATETIME', + 'DECIMAL', 'DOUBLE', 'ENUM', 'DECIMAL', 'FLOAT', 'INTEGER', 'INTEGER', + 'LONGBLOB', 'LONGTEXT', 'MEDIUMBLOB', 'MEDIUMINT', 'MEDIUMTEXT', 'NCHAR', + 'NVARCHAR', 'NUMERIC', 'SET', 'SMALLINT', 'REAL', 'TEXT', 'TIME', + 'TIMESTAMP', 'TINYBLOB', 'TINYINT', 'TINYTEXT', 'VARBINARY', 'VARCHAR', + 'YEAR', 'dialect' +) diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/base.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/base.py new file mode 100644 index 0000000..3f9c599 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/base.py @@ -0,0 +1,3438 @@ +# mysql/base.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" + +.. dialect:: mysql + :name: MySQL + +Supported Versions and Features +------------------------------- + +SQLAlchemy supports MySQL starting with version 4.1 through modern releases. +However, no heroic measures are taken to work around major missing +SQL features - if your server version does not support sub-selects, for +example, they won't work in SQLAlchemy either. + +See the official MySQL documentation for detailed information about features +supported in any given server release. + +.. _mysql_connection_timeouts: + +Connection Timeouts +------------------- + +MySQL features an automatic connection close behavior, for connections that +have been idle for eight hours or more. To circumvent having this issue, use +the ``pool_recycle`` option which controls the maximum age of any connection:: + + engine = create_engine('mysql+mysqldb://...', pool_recycle=3600) + +.. seealso:: + + :ref:`pool_setting_recycle` - full description of the pool recycle feature. + + +.. _mysql_storage_engines: + +CREATE TABLE arguments including Storage Engines +------------------------------------------------ + +MySQL's CREATE TABLE syntax includes a wide array of special options, +including ``ENGINE``, ``CHARSET``, ``MAX_ROWS``, ``ROW_FORMAT``, +``INSERT_METHOD``, and many more. +To accommodate the rendering of these arguments, specify the form +``mysql_argument_name="value"``. For example, to specify a table with +``ENGINE`` of ``InnoDB``, ``CHARSET`` of ``utf8``, and ``KEY_BLOCK_SIZE`` +of ``1024``:: + + Table('mytable', metadata, + Column('data', String(32)), + mysql_engine='InnoDB', + mysql_charset='utf8', + mysql_key_block_size="1024" + ) + +The MySQL dialect will normally transfer any keyword specified as +``mysql_keyword_name`` to be rendered as ``KEYWORD_NAME`` in the +``CREATE TABLE`` statement. A handful of these names will render with a space +instead of an underscore; to support this, the MySQL dialect has awareness of +these particular names, which include ``DATA DIRECTORY`` +(e.g. ``mysql_data_directory``), ``CHARACTER SET`` (e.g. +``mysql_character_set``) and ``INDEX DIRECTORY`` (e.g. +``mysql_index_directory``). + +The most common argument is ``mysql_engine``, which refers to the storage +engine for the table. Historically, MySQL server installations would default +to ``MyISAM`` for this value, although newer versions may be defaulting +to ``InnoDB``. The ``InnoDB`` engine is typically preferred for its support +of transactions and foreign keys. + +A :class:`.Table` that is created in a MySQL database with a storage engine +of ``MyISAM`` will be essentially non-transactional, meaning any +INSERT/UPDATE/DELETE statement referring to this table will be invoked as +autocommit. It also will have no support for foreign key constraints; while +the ``CREATE TABLE`` statement accepts foreign key options, when using the +``MyISAM`` storage engine these arguments are discarded. Reflecting such a +table will also produce no foreign key constraint information. + +For fully atomic transactions as well as support for foreign key +constraints, all participating ``CREATE TABLE`` statements must specify a +transactional engine, which in the vast majority of cases is ``InnoDB``. + +.. seealso:: + + `The InnoDB Storage Engine + `_ - + on the MySQL website. + +Case Sensitivity and Table Reflection +------------------------------------- + +MySQL has inconsistent support for case-sensitive identifier +names, basing support on specific details of the underlying +operating system. However, it has been observed that no matter +what case sensitivity behavior is present, the names of tables in +foreign key declarations are *always* received from the database +as all-lower case, making it impossible to accurately reflect a +schema where inter-related tables use mixed-case identifier names. + +Therefore it is strongly advised that table names be declared as +all lower case both within SQLAlchemy as well as on the MySQL +database itself, especially if database reflection features are +to be used. + +.. _mysql_isolation_level: + +Transaction Isolation Level +--------------------------- + +:func:`.create_engine` accepts an :paramref:`.create_engine.isolation_level` +parameter which results in the command ``SET SESSION +TRANSACTION ISOLATION LEVEL `` being invoked for +every new connection. Valid values for this parameter are +``READ COMMITTED``, ``READ UNCOMMITTED``, +``REPEATABLE READ``, and ``SERIALIZABLE``:: + + engine = create_engine( + "mysql://scott:tiger@localhost/test", + isolation_level="READ UNCOMMITTED" + ) + +.. versionadded:: 0.7.6 + +AUTO_INCREMENT Behavior +----------------------- + +When creating tables, SQLAlchemy will automatically set ``AUTO_INCREMENT`` on +the first :class:`.Integer` primary key column which is not marked as a +foreign key:: + + >>> t = Table('mytable', metadata, + ... Column('mytable_id', Integer, primary_key=True) + ... ) + >>> t.create() + CREATE TABLE mytable ( + id INTEGER NOT NULL AUTO_INCREMENT, + PRIMARY KEY (id) + ) + +You can disable this behavior by passing ``False`` to the +:paramref:`~.Column.autoincrement` argument of :class:`.Column`. This flag +can also be used to enable auto-increment on a secondary column in a +multi-column key for some storage engines:: + + Table('mytable', metadata, + Column('gid', Integer, primary_key=True, autoincrement=False), + Column('id', Integer, primary_key=True) + ) + +.. _mysql_unicode: + +Unicode +------- + +Charset Selection +~~~~~~~~~~~~~~~~~ + +Most MySQL DBAPIs offer the option to set the client character set for +a connection. This is typically delivered using the ``charset`` parameter +in the URL, such as:: + + e = create_engine("mysql+pymysql://scott:tiger@localhost/\ +test?charset=utf8") + +This charset is the **client character set** for the connection. Some +MySQL DBAPIs will default this to a value such as ``latin1``, and some +will make use of the ``default-character-set`` setting in the ``my.cnf`` +file as well. Documentation for the DBAPI in use should be consulted +for specific behavior. + +The encoding used for Unicode has traditionally been ``'utf8'``. However, +for MySQL versions 5.5.3 on forward, a new MySQL-specific encoding +``'utf8mb4'`` has been introduced. The rationale for this new encoding +is due to the fact that MySQL's utf-8 encoding only supports +codepoints up to three bytes instead of four. Therefore, +when communicating with a MySQL database +that includes codepoints more than three bytes in size, +this new charset is preferred, if supported by both the database as well +as the client DBAPI, as in:: + + e = create_engine("mysql+pymysql://scott:tiger@localhost/\ +test?charset=utf8mb4") + +At the moment, up-to-date versions of MySQLdb and PyMySQL support the +``utf8mb4`` charset. Other DBAPIs such as MySQL-Connector and OurSQL +may **not** support it as of yet. + +In order to use ``utf8mb4`` encoding, changes to +the MySQL schema and/or server configuration may be required. + +.. seealso:: + + `The utf8mb4 Character Set \ +`_ - \ +in the MySQL documentation + +Unicode Encoding / Decoding +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +All modern MySQL DBAPIs all offer the service of handling the encoding and +decoding of unicode data between the Python application space and the database. +As this was not always the case, SQLAlchemy also includes a comprehensive system +of performing the encode/decode task as well. As only one of these systems +should be in use at at time, SQLAlchemy has long included functionality +to automatically detect upon first connection whether or not the DBAPI is +automatically handling unicode. + +Whether or not the MySQL DBAPI will handle encoding can usually be configured +using a DBAPI flag ``use_unicode``, which is known to be supported at least +by MySQLdb, PyMySQL, and MySQL-Connector. Setting this value to ``0`` +in the "connect args" or query string will have the effect of disabling the +DBAPI's handling of unicode, such that it instead will return data of the +``str`` type or ``bytes`` type, with data in the configured charset:: + + # connect while disabling the DBAPI's unicode encoding/decoding + e = create_engine("mysql+mysqldb://scott:tiger@localhost/test?charset=utf8&use_unicode=0") + +Current recommendations for modern DBAPIs are as follows: + +* It is generally always safe to leave the ``use_unicode`` flag set at + its default; that is, don't use it at all. +* Under Python 3, the ``use_unicode=0`` flag should **never be used**. + SQLAlchemy under Python 3 generally assumes the DBAPI receives and returns + string values as Python 3 strings, which are inherently unicode objects. +* Under Python 2 with MySQLdb, the ``use_unicode=0`` flag will **offer + superior performance**, as MySQLdb's unicode converters under Python 2 only + have been observed to have unusually slow performance compared to SQLAlchemy's + fast C-based encoders/decoders. + +In short: don't specify ``use_unicode`` *at all*, with the possible +exception of ``use_unicode=0`` on MySQLdb with Python 2 **only** for a +potential performance gain. + +Ansi Quoting Style +------------------ + +MySQL features two varieties of identifier "quoting style", one using +backticks and the other using quotes, e.g. ```some_identifier``` vs. +``"some_identifier"``. All MySQL dialects detect which version +is in use by checking the value of ``sql_mode`` when a connection is first +established with a particular :class:`.Engine`. This quoting style comes +into play when rendering table and column names as well as when reflecting +existing database structures. The detection is entirely automatic and +no special configuration is needed to use either quoting style. + +.. versionchanged:: 0.6 detection of ANSI quoting style is entirely automatic, + there's no longer any end-user ``create_engine()`` options in this regard. + +MySQL SQL Extensions +-------------------- + +Many of the MySQL SQL extensions are handled through SQLAlchemy's generic +function and operator support:: + + table.select(table.c.password==func.md5('plaintext')) + table.select(table.c.username.op('regexp')('^[a-d]')) + +And of course any valid MySQL statement can be executed as a string as well. + +Some limited direct support for MySQL extensions to SQL is currently +available. + +* SELECT pragma:: + + select(..., prefixes=['HIGH_PRIORITY', 'SQL_SMALL_RESULT']) + +* UPDATE with LIMIT:: + + update(..., mysql_limit=10) + +rowcount Support +---------------- + +SQLAlchemy standardizes the DBAPI ``cursor.rowcount`` attribute to be the +usual definition of "number of rows matched by an UPDATE or DELETE" statement. +This is in contradiction to the default setting on most MySQL DBAPI drivers, +which is "number of rows actually modified/deleted". For this reason, the +SQLAlchemy MySQL dialects always add the ``constants.CLIENT.FOUND_ROWS`` +flag, or whatever is equivalent for the target dialect, upon connection. +This setting is currently hardcoded. + +.. seealso:: + + :attr:`.ResultProxy.rowcount` + + +CAST Support +------------ + +MySQL documents the CAST operator as available in version 4.0.2. When using +the SQLAlchemy :func:`.cast` function, SQLAlchemy +will not render the CAST token on MySQL before this version, based on server +version detection, instead rendering the internal expression directly. + +CAST may still not be desirable on an early MySQL version post-4.0.2, as it +didn't add all datatype support until 4.1.1. If your application falls into +this narrow area, the behavior of CAST can be controlled using the +:ref:`sqlalchemy.ext.compiler_toplevel` system, as per the recipe below:: + + from sqlalchemy.sql.expression import Cast + from sqlalchemy.ext.compiler import compiles + + @compiles(Cast, 'mysql') + def _check_mysql_version(element, compiler, **kw): + if compiler.dialect.server_version_info < (4, 1, 0): + return compiler.process(element.clause, **kw) + else: + return compiler.visit_cast(element, **kw) + +The above function, which only needs to be declared once +within an application, overrides the compilation of the +:func:`.cast` construct to check for version 4.1.0 before +fully rendering CAST; else the internal element of the +construct is rendered directly. + + +.. _mysql_indexes: + +MySQL Specific Index Options +---------------------------- + +MySQL-specific extensions to the :class:`.Index` construct are available. + +Index Length +~~~~~~~~~~~~~ + +MySQL provides an option to create index entries with a certain length, where +"length" refers to the number of characters or bytes in each value which will +become part of the index. SQLAlchemy provides this feature via the +``mysql_length`` parameter:: + + Index('my_index', my_table.c.data, mysql_length=10) + + Index('a_b_idx', my_table.c.a, my_table.c.b, mysql_length={'a': 4, + 'b': 9}) + +Prefix lengths are given in characters for nonbinary string types and in bytes +for binary string types. The value passed to the keyword argument *must* be +either an integer (and, thus, specify the same prefix length value for all +columns of the index) or a dict in which keys are column names and values are +prefix length values for corresponding columns. MySQL only allows a length for +a column of an index if it is for a CHAR, VARCHAR, TEXT, BINARY, VARBINARY and +BLOB. + +.. versionadded:: 0.8.2 ``mysql_length`` may now be specified as a dictionary + for use with composite indexes. + +Index Types +~~~~~~~~~~~~~ + +Some MySQL storage engines permit you to specify an index type when creating +an index or primary key constraint. SQLAlchemy provides this feature via the +``mysql_using`` parameter on :class:`.Index`:: + + Index('my_index', my_table.c.data, mysql_using='hash') + +As well as the ``mysql_using`` parameter on :class:`.PrimaryKeyConstraint`:: + + PrimaryKeyConstraint("data", mysql_using='hash') + +The value passed to the keyword argument will be simply passed through to the +underlying CREATE INDEX or PRIMARY KEY clause, so it *must* be a valid index +type for your MySQL storage engine. + +More information can be found at: + +http://dev.mysql.com/doc/refman/5.0/en/create-index.html + +http://dev.mysql.com/doc/refman/5.0/en/create-table.html + +.. _mysql_foreign_keys: + +MySQL Foreign Keys +------------------ + +MySQL's behavior regarding foreign keys has some important caveats. + +Foreign Key Arguments to Avoid +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +MySQL does not support the foreign key arguments "DEFERRABLE", "INITIALLY", +or "MATCH". Using the ``deferrable`` or ``initially`` keyword argument with +:class:`.ForeignKeyConstraint` or :class:`.ForeignKey` will have the effect of +these keywords being rendered in a DDL expression, which will then raise an +error on MySQL. In order to use these keywords on a foreign key while having +them ignored on a MySQL backend, use a custom compile rule:: + + from sqlalchemy.ext.compiler import compiles + from sqlalchemy.schema import ForeignKeyConstraint + + @compiles(ForeignKeyConstraint, "mysql") + def process(element, compiler, **kw): + element.deferrable = element.initially = None + return compiler.visit_foreign_key_constraint(element, **kw) + +.. versionchanged:: 0.9.0 - the MySQL backend no longer silently ignores + the ``deferrable`` or ``initially`` keyword arguments of + :class:`.ForeignKeyConstraint` and :class:`.ForeignKey`. + +The "MATCH" keyword is in fact more insidious, and is explicitly disallowed +by SQLAlchemy in conjunction with the MySQL backend. This argument is +silently ignored by MySQL, but in addition has the effect of ON UPDATE and ON +DELETE options also being ignored by the backend. Therefore MATCH should +never be used with the MySQL backend; as is the case with DEFERRABLE and +INITIALLY, custom compilation rules can be used to correct a MySQL +ForeignKeyConstraint at DDL definition time. + +.. versionadded:: 0.9.0 - the MySQL backend will raise a + :class:`.CompileError` when the ``match`` keyword is used with + :class:`.ForeignKeyConstraint` or :class:`.ForeignKey`. + +Reflection of Foreign Key Constraints +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Not all MySQL storage engines support foreign keys. When using the +very common ``MyISAM`` MySQL storage engine, the information loaded by table +reflection will not include foreign keys. For these tables, you may supply a +:class:`~sqlalchemy.ForeignKeyConstraint` at reflection time:: + + Table('mytable', metadata, + ForeignKeyConstraint(['other_id'], ['othertable.other_id']), + autoload=True + ) + +.. seealso:: + + :ref:`mysql_storage_engines` + +.. _mysql_unique_constraints: + +MySQL Unique Constraints and Reflection +--------------------------------------- + +SQLAlchemy supports both the :class:`.Index` construct with the +flag ``unique=True``, indicating a UNIQUE index, as well as the +:class:`.UniqueConstraint` construct, representing a UNIQUE constraint. +Both objects/syntaxes are supported by MySQL when emitting DDL to create +these constraints. However, MySQL does not have a unique constraint +construct that is separate from a unique index; that is, the "UNIQUE" +constraint on MySQL is equivalent to creating a "UNIQUE INDEX". + +When reflecting these constructs, the :meth:`.Inspector.get_indexes` +and the :meth:`.Inspector.get_unique_constraints` methods will **both** +return an entry for a UNIQUE index in MySQL. However, when performing +full table reflection using ``Table(..., autoload=True)``, +the :class:`.UniqueConstraint` construct is +**not** part of the fully reflected :class:`.Table` construct under any +circumstances; this construct is always represented by a :class:`.Index` +with the ``unique=True`` setting present in the :attr:`.Table.indexes` +collection. + + +.. _mysql_timestamp_null: + +TIMESTAMP Columns and NULL +-------------------------- + +MySQL historically enforces that a column which specifies the +TIMESTAMP datatype implicitly includes a default value of +CURRENT_TIMESTAMP, even though this is not stated, and additionally +sets the column as NOT NULL, the opposite behavior vs. that of all +other datatypes:: + + mysql> CREATE TABLE ts_test ( + -> a INTEGER, + -> b INTEGER NOT NULL, + -> c TIMESTAMP, + -> d TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + -> e TIMESTAMP NULL); + Query OK, 0 rows affected (0.03 sec) + + mysql> SHOW CREATE TABLE ts_test; + +---------+----------------------------------------------------- + | Table | Create Table + +---------+----------------------------------------------------- + | ts_test | CREATE TABLE `ts_test` ( + `a` int(11) DEFAULT NULL, + `b` int(11) NOT NULL, + `c` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + `d` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `e` timestamp NULL DEFAULT NULL + ) ENGINE=MyISAM DEFAULT CHARSET=latin1 + +Above, we see that an INTEGER column defaults to NULL, unless it is specified +with NOT NULL. But when the column is of type TIMESTAMP, an implicit +default of CURRENT_TIMESTAMP is generated which also coerces the column +to be a NOT NULL, even though we did not specify it as such. + +This behavior of MySQL can be changed on the MySQL side using the +`explicit_defaults_for_timestamp +`_ configuration flag introduced in +MySQL 5.6. With this server setting enabled, TIMESTAMP columns behave like +any other datatype on the MySQL side with regards to defaults and nullability. + +However, to accommodate the vast majority of MySQL databases that do not +specify this new flag, SQLAlchemy emits the "NULL" specifier explicitly with +any TIMESTAMP column that does not specify ``nullable=False``. In order +to accommodate newer databases that specify ``explicit_defaults_for_timestamp``, +SQLAlchemy also emits NOT NULL for TIMESTAMP columns that do specify +``nullable=False``. The following example illustrates:: + + from sqlalchemy import MetaData, Integer, Table, Column, text + from sqlalchemy.dialects.mysql import TIMESTAMP + + m = MetaData() + t = Table('ts_test', m, + Column('a', Integer), + Column('b', Integer, nullable=False), + Column('c', TIMESTAMP), + Column('d', TIMESTAMP, nullable=False) + ) + + + from sqlalchemy import create_engine + e = create_engine("mysql://scott:tiger@localhost/test", echo=True) + m.create_all(e) + +output:: + + CREATE TABLE ts_test ( + a INTEGER, + b INTEGER NOT NULL, + c TIMESTAMP NULL, + d TIMESTAMP NOT NULL + ) + +.. versionchanged:: 1.0.0 - SQLAlchemy now renders NULL or NOT NULL in all + cases for TIMESTAMP columns, to accommodate + ``explicit_defaults_for_timestamp``. Prior to this version, it will + not render "NOT NULL" for a TIMESTAMP column that is ``nullable=False``. + +""" + +import datetime +import re +import sys + +from ... import schema as sa_schema +from ... import exc, log, sql, util +from ...sql import compiler +from array import array as _array + +from ...engine import reflection +from ...engine import default +from ... import types as sqltypes +from ...util import topological +from ...types import DATE, BOOLEAN, \ + BLOB, BINARY, VARBINARY + +RESERVED_WORDS = set( + ['accessible', 'add', 'all', 'alter', 'analyze', 'and', 'as', 'asc', + 'asensitive', 'before', 'between', 'bigint', 'binary', 'blob', 'both', + 'by', 'call', 'cascade', 'case', 'change', 'char', 'character', 'check', + 'collate', 'column', 'condition', 'constraint', 'continue', 'convert', + 'create', 'cross', 'current_date', 'current_time', 'current_timestamp', + 'current_user', 'cursor', 'database', 'databases', 'day_hour', + 'day_microsecond', 'day_minute', 'day_second', 'dec', 'decimal', + 'declare', 'default', 'delayed', 'delete', 'desc', 'describe', + 'deterministic', 'distinct', 'distinctrow', 'div', 'double', 'drop', + 'dual', 'each', 'else', 'elseif', 'enclosed', 'escaped', 'exists', + 'exit', 'explain', 'false', 'fetch', 'float', 'float4', 'float8', + 'for', 'force', 'foreign', 'from', 'fulltext', 'grant', 'group', + 'having', 'high_priority', 'hour_microsecond', 'hour_minute', + 'hour_second', 'if', 'ignore', 'in', 'index', 'infile', 'inner', 'inout', + 'insensitive', 'insert', 'int', 'int1', 'int2', 'int3', 'int4', 'int8', + 'integer', 'interval', 'into', 'is', 'iterate', 'join', 'key', 'keys', + 'kill', 'leading', 'leave', 'left', 'like', 'limit', 'linear', 'lines', + 'load', 'localtime', 'localtimestamp', 'lock', 'long', 'longblob', + 'longtext', 'loop', 'low_priority', 'master_ssl_verify_server_cert', + 'match', 'mediumblob', 'mediumint', 'mediumtext', 'middleint', + 'minute_microsecond', 'minute_second', 'mod', 'modifies', 'natural', + 'not', 'no_write_to_binlog', 'null', 'numeric', 'on', 'optimize', + 'option', 'optionally', 'or', 'order', 'out', 'outer', 'outfile', + 'precision', 'primary', 'procedure', 'purge', 'range', 'read', 'reads', + 'read_only', 'read_write', 'real', 'references', 'regexp', 'release', + 'rename', 'repeat', 'replace', 'require', 'restrict', 'return', + 'revoke', 'right', 'rlike', 'schema', 'schemas', 'second_microsecond', + 'select', 'sensitive', 'separator', 'set', 'show', 'smallint', 'spatial', + 'specific', 'sql', 'sqlexception', 'sqlstate', 'sqlwarning', + 'sql_big_result', 'sql_calc_found_rows', 'sql_small_result', 'ssl', + 'starting', 'straight_join', 'table', 'terminated', 'then', 'tinyblob', + 'tinyint', 'tinytext', 'to', 'trailing', 'trigger', 'true', 'undo', + 'union', 'unique', 'unlock', 'unsigned', 'update', 'usage', 'use', + 'using', 'utc_date', 'utc_time', 'utc_timestamp', 'values', 'varbinary', + 'varchar', 'varcharacter', 'varying', 'when', 'where', 'while', 'with', + + 'write', 'x509', 'xor', 'year_month', 'zerofill', # 5.0 + + 'columns', 'fields', 'privileges', 'soname', 'tables', # 4.1 + + 'accessible', 'linear', 'master_ssl_verify_server_cert', 'range', + 'read_only', 'read_write', # 5.1 + + 'general', 'ignore_server_ids', 'master_heartbeat_period', 'maxvalue', + 'resignal', 'signal', 'slow', # 5.5 + + 'get', 'io_after_gtids', 'io_before_gtids', 'master_bind', 'one_shot', + 'partition', 'sql_after_gtids', 'sql_before_gtids', # 5.6 + + 'generated', 'optimizer_costs', 'stored', 'virtual', # 5.7 + + ]) + +AUTOCOMMIT_RE = re.compile( + r'\s*(?:UPDATE|INSERT|CREATE|DELETE|DROP|ALTER|LOAD +DATA|REPLACE)', + re.I | re.UNICODE) +SET_RE = re.compile( + r'\s*SET\s+(?:(?:GLOBAL|SESSION)\s+)?\w', + re.I | re.UNICODE) + + +class _NumericType(object): + """Base for MySQL numeric types. + + This is the base both for NUMERIC as well as INTEGER, hence + it's a mixin. + + """ + + def __init__(self, unsigned=False, zerofill=False, **kw): + self.unsigned = unsigned + self.zerofill = zerofill + super(_NumericType, self).__init__(**kw) + + def __repr__(self): + return util.generic_repr(self, + to_inspect=[_NumericType, sqltypes.Numeric]) + + +class _FloatType(_NumericType, sqltypes.Float): + def __init__(self, precision=None, scale=None, asdecimal=True, **kw): + if isinstance(self, (REAL, DOUBLE)) and \ + ( + (precision is None and scale is not None) or + (precision is not None and scale is None) + ): + raise exc.ArgumentError( + "You must specify both precision and scale or omit " + "both altogether.") + super(_FloatType, self).__init__( + precision=precision, asdecimal=asdecimal, **kw) + self.scale = scale + + def __repr__(self): + return util.generic_repr(self, to_inspect=[_FloatType, + _NumericType, + sqltypes.Float]) + + +class _IntegerType(_NumericType, sqltypes.Integer): + def __init__(self, display_width=None, **kw): + self.display_width = display_width + super(_IntegerType, self).__init__(**kw) + + def __repr__(self): + return util.generic_repr(self, to_inspect=[_IntegerType, + _NumericType, + sqltypes.Integer]) + + +class _StringType(sqltypes.String): + """Base for MySQL string types.""" + + def __init__(self, charset=None, collation=None, + ascii=False, binary=False, unicode=False, + national=False, **kw): + self.charset = charset + + # allow collate= or collation= + kw.setdefault('collation', kw.pop('collate', collation)) + + self.ascii = ascii + self.unicode = unicode + self.binary = binary + self.national = national + super(_StringType, self).__init__(**kw) + + def __repr__(self): + return util.generic_repr(self, + to_inspect=[_StringType, sqltypes.String]) + + +class _MatchType(sqltypes.Float, sqltypes.MatchType): + def __init__(self, **kw): + # TODO: float arguments? + sqltypes.Float.__init__(self) + sqltypes.MatchType.__init__(self) + + + +class NUMERIC(_NumericType, sqltypes.NUMERIC): + """MySQL NUMERIC type.""" + + __visit_name__ = 'NUMERIC' + + def __init__(self, precision=None, scale=None, asdecimal=True, **kw): + """Construct a NUMERIC. + + :param precision: Total digits in this number. If scale and precision + are both None, values are stored to limits allowed by the server. + + :param scale: The number of digits after the decimal point. + + :param unsigned: a boolean, optional. + + :param zerofill: Optional. If true, values will be stored as strings + left-padded with zeros. Note that this does not effect the values + returned by the underlying database API, which continue to be + numeric. + + """ + super(NUMERIC, self).__init__(precision=precision, + scale=scale, asdecimal=asdecimal, **kw) + + +class DECIMAL(_NumericType, sqltypes.DECIMAL): + """MySQL DECIMAL type.""" + + __visit_name__ = 'DECIMAL' + + def __init__(self, precision=None, scale=None, asdecimal=True, **kw): + """Construct a DECIMAL. + + :param precision: Total digits in this number. If scale and precision + are both None, values are stored to limits allowed by the server. + + :param scale: The number of digits after the decimal point. + + :param unsigned: a boolean, optional. + + :param zerofill: Optional. If true, values will be stored as strings + left-padded with zeros. Note that this does not effect the values + returned by the underlying database API, which continue to be + numeric. + + """ + super(DECIMAL, self).__init__(precision=precision, scale=scale, + asdecimal=asdecimal, **kw) + + +class DOUBLE(_FloatType): + """MySQL DOUBLE type.""" + + __visit_name__ = 'DOUBLE' + + def __init__(self, precision=None, scale=None, asdecimal=True, **kw): + """Construct a DOUBLE. + + .. note:: + + The :class:`.DOUBLE` type by default converts from float + to Decimal, using a truncation that defaults to 10 digits. + Specify either ``scale=n`` or ``decimal_return_scale=n`` in order + to change this scale, or ``asdecimal=False`` to return values + directly as Python floating points. + + :param precision: Total digits in this number. If scale and precision + are both None, values are stored to limits allowed by the server. + + :param scale: The number of digits after the decimal point. + + :param unsigned: a boolean, optional. + + :param zerofill: Optional. If true, values will be stored as strings + left-padded with zeros. Note that this does not effect the values + returned by the underlying database API, which continue to be + numeric. + + """ + super(DOUBLE, self).__init__(precision=precision, scale=scale, + asdecimal=asdecimal, **kw) + + +class REAL(_FloatType, sqltypes.REAL): + """MySQL REAL type.""" + + __visit_name__ = 'REAL' + + def __init__(self, precision=None, scale=None, asdecimal=True, **kw): + """Construct a REAL. + + .. note:: + + The :class:`.REAL` type by default converts from float + to Decimal, using a truncation that defaults to 10 digits. + Specify either ``scale=n`` or ``decimal_return_scale=n`` in order + to change this scale, or ``asdecimal=False`` to return values + directly as Python floating points. + + :param precision: Total digits in this number. If scale and precision + are both None, values are stored to limits allowed by the server. + + :param scale: The number of digits after the decimal point. + + :param unsigned: a boolean, optional. + + :param zerofill: Optional. If true, values will be stored as strings + left-padded with zeros. Note that this does not effect the values + returned by the underlying database API, which continue to be + numeric. + + """ + super(REAL, self).__init__(precision=precision, scale=scale, + asdecimal=asdecimal, **kw) + + +class FLOAT(_FloatType, sqltypes.FLOAT): + """MySQL FLOAT type.""" + + __visit_name__ = 'FLOAT' + + def __init__(self, precision=None, scale=None, asdecimal=False, **kw): + """Construct a FLOAT. + + :param precision: Total digits in this number. If scale and precision + are both None, values are stored to limits allowed by the server. + + :param scale: The number of digits after the decimal point. + + :param unsigned: a boolean, optional. + + :param zerofill: Optional. If true, values will be stored as strings + left-padded with zeros. Note that this does not effect the values + returned by the underlying database API, which continue to be + numeric. + + """ + super(FLOAT, self).__init__(precision=precision, scale=scale, + asdecimal=asdecimal, **kw) + + def bind_processor(self, dialect): + return None + + +class INTEGER(_IntegerType, sqltypes.INTEGER): + """MySQL INTEGER type.""" + + __visit_name__ = 'INTEGER' + + def __init__(self, display_width=None, **kw): + """Construct an INTEGER. + + :param display_width: Optional, maximum display width for this number. + + :param unsigned: a boolean, optional. + + :param zerofill: Optional. If true, values will be stored as strings + left-padded with zeros. Note that this does not effect the values + returned by the underlying database API, which continue to be + numeric. + + """ + super(INTEGER, self).__init__(display_width=display_width, **kw) + + +class BIGINT(_IntegerType, sqltypes.BIGINT): + """MySQL BIGINTEGER type.""" + + __visit_name__ = 'BIGINT' + + def __init__(self, display_width=None, **kw): + """Construct a BIGINTEGER. + + :param display_width: Optional, maximum display width for this number. + + :param unsigned: a boolean, optional. + + :param zerofill: Optional. If true, values will be stored as strings + left-padded with zeros. Note that this does not effect the values + returned by the underlying database API, which continue to be + numeric. + + """ + super(BIGINT, self).__init__(display_width=display_width, **kw) + + +class MEDIUMINT(_IntegerType): + """MySQL MEDIUMINTEGER type.""" + + __visit_name__ = 'MEDIUMINT' + + def __init__(self, display_width=None, **kw): + """Construct a MEDIUMINTEGER + + :param display_width: Optional, maximum display width for this number. + + :param unsigned: a boolean, optional. + + :param zerofill: Optional. If true, values will be stored as strings + left-padded with zeros. Note that this does not effect the values + returned by the underlying database API, which continue to be + numeric. + + """ + super(MEDIUMINT, self).__init__(display_width=display_width, **kw) + + +class TINYINT(_IntegerType): + """MySQL TINYINT type.""" + + __visit_name__ = 'TINYINT' + + def __init__(self, display_width=None, **kw): + """Construct a TINYINT. + + :param display_width: Optional, maximum display width for this number. + + :param unsigned: a boolean, optional. + + :param zerofill: Optional. If true, values will be stored as strings + left-padded with zeros. Note that this does not effect the values + returned by the underlying database API, which continue to be + numeric. + + """ + super(TINYINT, self).__init__(display_width=display_width, **kw) + + +class SMALLINT(_IntegerType, sqltypes.SMALLINT): + """MySQL SMALLINTEGER type.""" + + __visit_name__ = 'SMALLINT' + + def __init__(self, display_width=None, **kw): + """Construct a SMALLINTEGER. + + :param display_width: Optional, maximum display width for this number. + + :param unsigned: a boolean, optional. + + :param zerofill: Optional. If true, values will be stored as strings + left-padded with zeros. Note that this does not effect the values + returned by the underlying database API, which continue to be + numeric. + + """ + super(SMALLINT, self).__init__(display_width=display_width, **kw) + + +class BIT(sqltypes.TypeEngine): + """MySQL BIT type. + + This type is for MySQL 5.0.3 or greater for MyISAM, and 5.0.5 or greater + for MyISAM, MEMORY, InnoDB and BDB. For older versions, use a + MSTinyInteger() type. + + """ + + __visit_name__ = 'BIT' + + def __init__(self, length=None): + """Construct a BIT. + + :param length: Optional, number of bits. + + """ + self.length = length + + def result_processor(self, dialect, coltype): + """Convert a MySQL's 64 bit, variable length binary string to a long. + + TODO: this is MySQL-db, pyodbc specific. OurSQL and mysqlconnector + already do this, so this logic should be moved to those dialects. + + """ + + def process(value): + if value is not None: + v = 0 + for i in value: + if not isinstance(i, int): + i = ord(i) # convert byte to int on Python 2 + v = v << 8 | i + return v + return value + return process + + +class TIME(sqltypes.TIME): + """MySQL TIME type. """ + + __visit_name__ = 'TIME' + + def __init__(self, timezone=False, fsp=None): + """Construct a MySQL TIME type. + + :param timezone: not used by the MySQL dialect. + :param fsp: fractional seconds precision value. + MySQL 5.6 supports storage of fractional seconds; + this parameter will be used when emitting DDL + for the TIME type. + + .. note:: + + DBAPI driver support for fractional seconds may + be limited; current support includes + MySQL Connector/Python. + + .. versionadded:: 0.8 The MySQL-specific TIME + type as well as fractional seconds support. + + """ + super(TIME, self).__init__(timezone=timezone) + self.fsp = fsp + + def result_processor(self, dialect, coltype): + time = datetime.time + + def process(value): + # convert from a timedelta value + if value is not None: + microseconds = value.microseconds + seconds = value.seconds + minutes = seconds // 60 + return time(minutes // 60, + minutes % 60, + seconds - minutes * 60, + microsecond=microseconds) + else: + return None + return process + + +class TIMESTAMP(sqltypes.TIMESTAMP): + """MySQL TIMESTAMP type. + + """ + + __visit_name__ = 'TIMESTAMP' + + def __init__(self, timezone=False, fsp=None): + """Construct a MySQL TIMESTAMP type. + + :param timezone: not used by the MySQL dialect. + :param fsp: fractional seconds precision value. + MySQL 5.6.4 supports storage of fractional seconds; + this parameter will be used when emitting DDL + for the TIMESTAMP type. + + .. note:: + + DBAPI driver support for fractional seconds may + be limited; current support includes + MySQL Connector/Python. + + .. versionadded:: 0.8.5 Added MySQL-specific :class:`.mysql.TIMESTAMP` + with fractional seconds support. + + """ + super(TIMESTAMP, self).__init__(timezone=timezone) + self.fsp = fsp + + +class DATETIME(sqltypes.DATETIME): + """MySQL DATETIME type. + + """ + + __visit_name__ = 'DATETIME' + + def __init__(self, timezone=False, fsp=None): + """Construct a MySQL DATETIME type. + + :param timezone: not used by the MySQL dialect. + :param fsp: fractional seconds precision value. + MySQL 5.6.4 supports storage of fractional seconds; + this parameter will be used when emitting DDL + for the DATETIME type. + + .. note:: + + DBAPI driver support for fractional seconds may + be limited; current support includes + MySQL Connector/Python. + + .. versionadded:: 0.8.5 Added MySQL-specific :class:`.mysql.DATETIME` + with fractional seconds support. + + """ + super(DATETIME, self).__init__(timezone=timezone) + self.fsp = fsp + + +class YEAR(sqltypes.TypeEngine): + """MySQL YEAR type, for single byte storage of years 1901-2155.""" + + __visit_name__ = 'YEAR' + + def __init__(self, display_width=None): + self.display_width = display_width + + +class TEXT(_StringType, sqltypes.TEXT): + """MySQL TEXT type, for text up to 2^16 characters.""" + + __visit_name__ = 'TEXT' + + def __init__(self, length=None, **kw): + """Construct a TEXT. + + :param length: Optional, if provided the server may optimize storage + by substituting the smallest TEXT type sufficient to store + ``length`` characters. + + :param charset: Optional, a column-level character set for this string + value. Takes precedence to 'ascii' or 'unicode' short-hand. + + :param collation: Optional, a column-level collation for this string + value. Takes precedence to 'binary' short-hand. + + :param ascii: Defaults to False: short-hand for the ``latin1`` + character set, generates ASCII in schema. + + :param unicode: Defaults to False: short-hand for the ``ucs2`` + character set, generates UNICODE in schema. + + :param national: Optional. If true, use the server's configured + national character set. + + :param binary: Defaults to False: short-hand, pick the binary + collation type that matches the column's character set. Generates + BINARY in schema. This does not affect the type of data stored, + only the collation of character data. + + """ + super(TEXT, self).__init__(length=length, **kw) + + +class TINYTEXT(_StringType): + """MySQL TINYTEXT type, for text up to 2^8 characters.""" + + __visit_name__ = 'TINYTEXT' + + def __init__(self, **kwargs): + """Construct a TINYTEXT. + + :param charset: Optional, a column-level character set for this string + value. Takes precedence to 'ascii' or 'unicode' short-hand. + + :param collation: Optional, a column-level collation for this string + value. Takes precedence to 'binary' short-hand. + + :param ascii: Defaults to False: short-hand for the ``latin1`` + character set, generates ASCII in schema. + + :param unicode: Defaults to False: short-hand for the ``ucs2`` + character set, generates UNICODE in schema. + + :param national: Optional. If true, use the server's configured + national character set. + + :param binary: Defaults to False: short-hand, pick the binary + collation type that matches the column's character set. Generates + BINARY in schema. This does not affect the type of data stored, + only the collation of character data. + + """ + super(TINYTEXT, self).__init__(**kwargs) + + +class MEDIUMTEXT(_StringType): + """MySQL MEDIUMTEXT type, for text up to 2^24 characters.""" + + __visit_name__ = 'MEDIUMTEXT' + + def __init__(self, **kwargs): + """Construct a MEDIUMTEXT. + + :param charset: Optional, a column-level character set for this string + value. Takes precedence to 'ascii' or 'unicode' short-hand. + + :param collation: Optional, a column-level collation for this string + value. Takes precedence to 'binary' short-hand. + + :param ascii: Defaults to False: short-hand for the ``latin1`` + character set, generates ASCII in schema. + + :param unicode: Defaults to False: short-hand for the ``ucs2`` + character set, generates UNICODE in schema. + + :param national: Optional. If true, use the server's configured + national character set. + + :param binary: Defaults to False: short-hand, pick the binary + collation type that matches the column's character set. Generates + BINARY in schema. This does not affect the type of data stored, + only the collation of character data. + + """ + super(MEDIUMTEXT, self).__init__(**kwargs) + + +class LONGTEXT(_StringType): + """MySQL LONGTEXT type, for text up to 2^32 characters.""" + + __visit_name__ = 'LONGTEXT' + + def __init__(self, **kwargs): + """Construct a LONGTEXT. + + :param charset: Optional, a column-level character set for this string + value. Takes precedence to 'ascii' or 'unicode' short-hand. + + :param collation: Optional, a column-level collation for this string + value. Takes precedence to 'binary' short-hand. + + :param ascii: Defaults to False: short-hand for the ``latin1`` + character set, generates ASCII in schema. + + :param unicode: Defaults to False: short-hand for the ``ucs2`` + character set, generates UNICODE in schema. + + :param national: Optional. If true, use the server's configured + national character set. + + :param binary: Defaults to False: short-hand, pick the binary + collation type that matches the column's character set. Generates + BINARY in schema. This does not affect the type of data stored, + only the collation of character data. + + """ + super(LONGTEXT, self).__init__(**kwargs) + + +class VARCHAR(_StringType, sqltypes.VARCHAR): + """MySQL VARCHAR type, for variable-length character data.""" + + __visit_name__ = 'VARCHAR' + + def __init__(self, length=None, **kwargs): + """Construct a VARCHAR. + + :param charset: Optional, a column-level character set for this string + value. Takes precedence to 'ascii' or 'unicode' short-hand. + + :param collation: Optional, a column-level collation for this string + value. Takes precedence to 'binary' short-hand. + + :param ascii: Defaults to False: short-hand for the ``latin1`` + character set, generates ASCII in schema. + + :param unicode: Defaults to False: short-hand for the ``ucs2`` + character set, generates UNICODE in schema. + + :param national: Optional. If true, use the server's configured + national character set. + + :param binary: Defaults to False: short-hand, pick the binary + collation type that matches the column's character set. Generates + BINARY in schema. This does not affect the type of data stored, + only the collation of character data. + + """ + super(VARCHAR, self).__init__(length=length, **kwargs) + + +class CHAR(_StringType, sqltypes.CHAR): + """MySQL CHAR type, for fixed-length character data.""" + + __visit_name__ = 'CHAR' + + def __init__(self, length=None, **kwargs): + """Construct a CHAR. + + :param length: Maximum data length, in characters. + + :param binary: Optional, use the default binary collation for the + national character set. This does not affect the type of data + stored, use a BINARY type for binary data. + + :param collation: Optional, request a particular collation. Must be + compatible with the national character set. + + """ + super(CHAR, self).__init__(length=length, **kwargs) + + @classmethod + def _adapt_string_for_cast(self, type_): + # copy the given string type into a CHAR + # for the purposes of rendering a CAST expression + type_ = sqltypes.to_instance(type_) + if isinstance(type_, sqltypes.CHAR): + return type_ + elif isinstance(type_, _StringType): + return CHAR( + length=type_.length, + charset=type_.charset, + collation=type_.collation, + ascii=type_.ascii, + binary=type_.binary, + unicode=type_.unicode, + national=False # not supported in CAST + ) + else: + return CHAR(length=type_.length) + + +class NVARCHAR(_StringType, sqltypes.NVARCHAR): + """MySQL NVARCHAR type. + + For variable-length character data in the server's configured national + character set. + """ + + __visit_name__ = 'NVARCHAR' + + def __init__(self, length=None, **kwargs): + """Construct an NVARCHAR. + + :param length: Maximum data length, in characters. + + :param binary: Optional, use the default binary collation for the + national character set. This does not affect the type of data + stored, use a BINARY type for binary data. + + :param collation: Optional, request a particular collation. Must be + compatible with the national character set. + + """ + kwargs['national'] = True + super(NVARCHAR, self).__init__(length=length, **kwargs) + + +class NCHAR(_StringType, sqltypes.NCHAR): + """MySQL NCHAR type. + + For fixed-length character data in the server's configured national + character set. + """ + + __visit_name__ = 'NCHAR' + + def __init__(self, length=None, **kwargs): + """Construct an NCHAR. + + :param length: Maximum data length, in characters. + + :param binary: Optional, use the default binary collation for the + national character set. This does not affect the type of data + stored, use a BINARY type for binary data. + + :param collation: Optional, request a particular collation. Must be + compatible with the national character set. + + """ + kwargs['national'] = True + super(NCHAR, self).__init__(length=length, **kwargs) + + +class TINYBLOB(sqltypes._Binary): + """MySQL TINYBLOB type, for binary data up to 2^8 bytes.""" + + __visit_name__ = 'TINYBLOB' + + +class MEDIUMBLOB(sqltypes._Binary): + """MySQL MEDIUMBLOB type, for binary data up to 2^24 bytes.""" + + __visit_name__ = 'MEDIUMBLOB' + + +class LONGBLOB(sqltypes._Binary): + """MySQL LONGBLOB type, for binary data up to 2^32 bytes.""" + + __visit_name__ = 'LONGBLOB' + + +class _EnumeratedValues(_StringType): + def _init_values(self, values, kw): + self.quoting = kw.pop('quoting', 'auto') + + if self.quoting == 'auto' and len(values): + # What quoting character are we using? + q = None + for e in values: + if len(e) == 0: + self.quoting = 'unquoted' + break + elif q is None: + q = e[0] + + if len(e) == 1 or e[0] != q or e[-1] != q: + self.quoting = 'unquoted' + break + else: + self.quoting = 'quoted' + + if self.quoting == 'quoted': + util.warn_deprecated( + 'Manually quoting %s value literals is deprecated. Supply ' + 'unquoted values and use the quoting= option in cases of ' + 'ambiguity.' % self.__class__.__name__) + + values = self._strip_values(values) + + self._enumerated_values = values + length = max([len(v) for v in values] + [0]) + return values, length + + @classmethod + def _strip_values(cls, values): + strip_values = [] + for a in values: + if a[0:1] == '"' or a[0:1] == "'": + # strip enclosing quotes and unquote interior + a = a[1:-1].replace(a[0] * 2, a[0]) + strip_values.append(a) + return strip_values + + +class ENUM(sqltypes.Enum, _EnumeratedValues): + """MySQL ENUM type.""" + + __visit_name__ = 'ENUM' + + def __init__(self, *enums, **kw): + """Construct an ENUM. + + E.g.:: + + Column('myenum', ENUM("foo", "bar", "baz")) + + :param enums: The range of valid values for this ENUM. Values will be + quoted when generating the schema according to the quoting flag (see + below). + + :param strict: Defaults to False: ensure that a given value is in this + ENUM's range of permissible values when inserting or updating rows. + Note that MySQL will not raise a fatal error if you attempt to store + an out of range value- an alternate value will be stored instead. + (See MySQL ENUM documentation.) + + :param charset: Optional, a column-level character set for this string + value. Takes precedence to 'ascii' or 'unicode' short-hand. + + :param collation: Optional, a column-level collation for this string + value. Takes precedence to 'binary' short-hand. + + :param ascii: Defaults to False: short-hand for the ``latin1`` + character set, generates ASCII in schema. + + :param unicode: Defaults to False: short-hand for the ``ucs2`` + character set, generates UNICODE in schema. + + :param binary: Defaults to False: short-hand, pick the binary + collation type that matches the column's character set. Generates + BINARY in schema. This does not affect the type of data stored, + only the collation of character data. + + :param quoting: Defaults to 'auto': automatically determine enum value + quoting. If all enum values are surrounded by the same quoting + character, then use 'quoted' mode. Otherwise, use 'unquoted' mode. + + 'quoted': values in enums are already quoted, they will be used + directly when generating the schema - this usage is deprecated. + + 'unquoted': values in enums are not quoted, they will be escaped and + surrounded by single quotes when generating the schema. + + Previous versions of this type always required manually quoted + values to be supplied; future versions will always quote the string + literals for you. This is a transitional option. + + """ + values, length = self._init_values(enums, kw) + self.strict = kw.pop('strict', False) + kw.pop('metadata', None) + kw.pop('schema', None) + kw.pop('name', None) + kw.pop('quote', None) + kw.pop('native_enum', None) + kw.pop('inherit_schema', None) + kw.pop('_create_events', None) + _StringType.__init__(self, length=length, **kw) + sqltypes.Enum.__init__(self, *values) + + def __repr__(self): + return util.generic_repr( + self, to_inspect=[ENUM, _StringType, sqltypes.Enum]) + + def bind_processor(self, dialect): + super_convert = super(ENUM, self).bind_processor(dialect) + + def process(value): + if self.strict and value is not None and value not in self.enums: + raise exc.InvalidRequestError('"%s" not a valid value for ' + 'this enum' % value) + if super_convert: + return super_convert(value) + else: + return value + return process + + def adapt(self, cls, **kw): + if issubclass(cls, ENUM): + kw['strict'] = self.strict + return sqltypes.Enum.adapt(self, cls, **kw) + + +class SET(_EnumeratedValues): + """MySQL SET type.""" + + __visit_name__ = 'SET' + + def __init__(self, *values, **kw): + """Construct a SET. + + E.g.:: + + Column('myset', SET("foo", "bar", "baz")) + + + The list of potential values is required in the case that this + set will be used to generate DDL for a table, or if the + :paramref:`.SET.retrieve_as_bitwise` flag is set to True. + + :param values: The range of valid values for this SET. + + :param convert_unicode: Same flag as that of + :paramref:`.String.convert_unicode`. + + :param collation: same as that of :paramref:`.String.collation` + + :param charset: same as that of :paramref:`.VARCHAR.charset`. + + :param ascii: same as that of :paramref:`.VARCHAR.ascii`. + + :param unicode: same as that of :paramref:`.VARCHAR.unicode`. + + :param binary: same as that of :paramref:`.VARCHAR.binary`. + + :param quoting: Defaults to 'auto': automatically determine set value + quoting. If all values are surrounded by the same quoting + character, then use 'quoted' mode. Otherwise, use 'unquoted' mode. + + 'quoted': values in enums are already quoted, they will be used + directly when generating the schema - this usage is deprecated. + + 'unquoted': values in enums are not quoted, they will be escaped and + surrounded by single quotes when generating the schema. + + Previous versions of this type always required manually quoted + values to be supplied; future versions will always quote the string + literals for you. This is a transitional option. + + .. versionadded:: 0.9.0 + + :param retrieve_as_bitwise: if True, the data for the set type will be + persisted and selected using an integer value, where a set is coerced + into a bitwise mask for persistence. MySQL allows this mode which + has the advantage of being able to store values unambiguously, + such as the blank string ``''``. The datatype will appear + as the expression ``col + 0`` in a SELECT statement, so that the + value is coerced into an integer value in result sets. + This flag is required if one wishes + to persist a set that can store the blank string ``''`` as a value. + + .. warning:: + + When using :paramref:`.mysql.SET.retrieve_as_bitwise`, it is + essential that the list of set values is expressed in the + **exact same order** as exists on the MySQL database. + + .. versionadded:: 1.0.0 + + + """ + self.retrieve_as_bitwise = kw.pop('retrieve_as_bitwise', False) + values, length = self._init_values(values, kw) + self.values = tuple(values) + if not self.retrieve_as_bitwise and '' in values: + raise exc.ArgumentError( + "Can't use the blank value '' in a SET without " + "setting retrieve_as_bitwise=True") + if self.retrieve_as_bitwise: + self._bitmap = dict( + (value, 2 ** idx) + for idx, value in enumerate(self.values) + ) + self._bitmap.update( + (2 ** idx, value) + for idx, value in enumerate(self.values) + ) + kw.setdefault('length', length) + super(SET, self).__init__(**kw) + + def column_expression(self, colexpr): + if self.retrieve_as_bitwise: + return colexpr + 0 + else: + return colexpr + + def result_processor(self, dialect, coltype): + if self.retrieve_as_bitwise: + def process(value): + if value is not None: + value = int(value) + + return set( + util.map_bits(self._bitmap.__getitem__, value) + ) + else: + return None + else: + super_convert = super(SET, self).result_processor(dialect, coltype) + + def process(value): + if isinstance(value, util.string_types): + # MySQLdb returns a string, let's parse + if super_convert: + value = super_convert(value) + return set(re.findall(r'[^,]+', value)) + else: + # mysql-connector-python does a naive + # split(",") which throws in an empty string + if value is not None: + value.discard('') + return value + return process + + def bind_processor(self, dialect): + super_convert = super(SET, self).bind_processor(dialect) + if self.retrieve_as_bitwise: + def process(value): + if value is None: + return None + elif isinstance(value, util.int_types + util.string_types): + if super_convert: + return super_convert(value) + else: + return value + else: + int_value = 0 + for v in value: + int_value |= self._bitmap[v] + return int_value + else: + + def process(value): + # accept strings and int (actually bitflag) values directly + if value is not None and not isinstance( + value, util.int_types + util.string_types): + value = ",".join(value) + + if super_convert: + return super_convert(value) + else: + return value + return process + + def adapt(self, impltype, **kw): + kw['retrieve_as_bitwise'] = self.retrieve_as_bitwise + return util.constructor_copy( + self, impltype, + *self.values, + **kw + ) + +# old names +MSTime = TIME +MSSet = SET +MSEnum = ENUM +MSLongBlob = LONGBLOB +MSMediumBlob = MEDIUMBLOB +MSTinyBlob = TINYBLOB +MSBlob = BLOB +MSBinary = BINARY +MSVarBinary = VARBINARY +MSNChar = NCHAR +MSNVarChar = NVARCHAR +MSChar = CHAR +MSString = VARCHAR +MSLongText = LONGTEXT +MSMediumText = MEDIUMTEXT +MSTinyText = TINYTEXT +MSText = TEXT +MSYear = YEAR +MSTimeStamp = TIMESTAMP +MSBit = BIT +MSSmallInteger = SMALLINT +MSTinyInteger = TINYINT +MSMediumInteger = MEDIUMINT +MSBigInteger = BIGINT +MSNumeric = NUMERIC +MSDecimal = DECIMAL +MSDouble = DOUBLE +MSReal = REAL +MSFloat = FLOAT +MSInteger = INTEGER + +colspecs = { + _IntegerType: _IntegerType, + _NumericType: _NumericType, + _FloatType: _FloatType, + sqltypes.Numeric: NUMERIC, + sqltypes.Float: FLOAT, + sqltypes.Time: TIME, + sqltypes.Enum: ENUM, + sqltypes.MatchType: _MatchType +} + +# Everything 3.23 through 5.1 excepting OpenGIS types. +ischema_names = { + 'bigint': BIGINT, + 'binary': BINARY, + 'bit': BIT, + 'blob': BLOB, + 'boolean': BOOLEAN, + 'char': CHAR, + 'date': DATE, + 'datetime': DATETIME, + 'decimal': DECIMAL, + 'double': DOUBLE, + 'enum': ENUM, + 'fixed': DECIMAL, + 'float': FLOAT, + 'int': INTEGER, + 'integer': INTEGER, + 'longblob': LONGBLOB, + 'longtext': LONGTEXT, + 'mediumblob': MEDIUMBLOB, + 'mediumint': MEDIUMINT, + 'mediumtext': MEDIUMTEXT, + 'nchar': NCHAR, + 'nvarchar': NVARCHAR, + 'numeric': NUMERIC, + 'set': SET, + 'smallint': SMALLINT, + 'text': TEXT, + 'time': TIME, + 'timestamp': TIMESTAMP, + 'tinyblob': TINYBLOB, + 'tinyint': TINYINT, + 'tinytext': TINYTEXT, + 'varbinary': VARBINARY, + 'varchar': VARCHAR, + 'year': YEAR, +} + + +class MySQLExecutionContext(default.DefaultExecutionContext): + + def should_autocommit_text(self, statement): + return AUTOCOMMIT_RE.match(statement) + + +class MySQLCompiler(compiler.SQLCompiler): + + render_table_with_column_in_update_from = True + """Overridden from base SQLCompiler value""" + + extract_map = compiler.SQLCompiler.extract_map.copy() + extract_map.update({'milliseconds': 'millisecond'}) + + def visit_random_func(self, fn, **kw): + return "rand%s" % self.function_argspec(fn) + + def visit_utc_timestamp_func(self, fn, **kw): + return "UTC_TIMESTAMP" + + def visit_sysdate_func(self, fn, **kw): + return "SYSDATE()" + + def visit_concat_op_binary(self, binary, operator, **kw): + return "concat(%s, %s)" % (self.process(binary.left), + self.process(binary.right)) + + def visit_match_op_binary(self, binary, operator, **kw): + return "MATCH (%s) AGAINST (%s IN BOOLEAN MODE)" % \ + (self.process(binary.left), self.process(binary.right)) + + def get_from_hint_text(self, table, text): + return text + + def visit_typeclause(self, typeclause, type_=None): + if type_ is None: + type_ = typeclause.type.dialect_impl(self.dialect) + if isinstance(type_, sqltypes.TypeDecorator): + return self.visit_typeclause(typeclause, type_.impl) + elif isinstance(type_, sqltypes.Integer): + if getattr(type_, 'unsigned', False): + return 'UNSIGNED INTEGER' + else: + return 'SIGNED INTEGER' + elif isinstance(type_, sqltypes.TIMESTAMP): + return 'DATETIME' + elif isinstance(type_, (sqltypes.DECIMAL, sqltypes.DateTime, + sqltypes.Date, sqltypes.Time)): + return self.dialect.type_compiler.process(type_) + elif isinstance(type_, sqltypes.String) \ + and not isinstance(type_, (ENUM, SET)): + adapted = CHAR._adapt_string_for_cast(type_) + return self.dialect.type_compiler.process(adapted) + elif isinstance(type_, sqltypes._Binary): + return 'BINARY' + elif isinstance(type_, sqltypes.NUMERIC): + return self.dialect.type_compiler.process( + type_).replace('NUMERIC', 'DECIMAL') + else: + return None + + def visit_cast(self, cast, **kwargs): + # No cast until 4, no decimals until 5. + if not self.dialect._supports_cast: + util.warn( + "Current MySQL version does not support " + "CAST; the CAST will be skipped.") + return self.process(cast.clause.self_group()) + + type_ = self.process(cast.typeclause) + if type_ is None: + util.warn( + "Datatype %s does not support CAST on MySQL; " + "the CAST will be skipped." % + self.dialect.type_compiler.process(cast.typeclause.type)) + return self.process(cast.clause.self_group()) + + return 'CAST(%s AS %s)' % (self.process(cast.clause), type_) + + def render_literal_value(self, value, type_): + value = super(MySQLCompiler, self).render_literal_value(value, type_) + if self.dialect._backslash_escapes: + value = value.replace('\\', '\\\\') + return value + + # override native_boolean=False behavior here, as + # MySQL still supports native boolean + def visit_true(self, element, **kw): + return "true" + + def visit_false(self, element, **kw): + return "false" + + def get_select_precolumns(self, select, **kw): + """Add special MySQL keywords in place of DISTINCT. + + .. note:: + + this usage is deprecated. :meth:`.Select.prefix_with` + should be used for special keywords at the start + of a SELECT. + + """ + if isinstance(select._distinct, util.string_types): + return select._distinct.upper() + " " + elif select._distinct: + return "DISTINCT " + else: + return "" + + def visit_join(self, join, asfrom=False, **kwargs): + return ''.join( + (self.process(join.left, asfrom=True, **kwargs), + (join.isouter and " LEFT OUTER JOIN " or " INNER JOIN "), + self.process(join.right, asfrom=True, **kwargs), + " ON ", + self.process(join.onclause, **kwargs))) + + def for_update_clause(self, select, **kw): + if select._for_update_arg.read: + return " LOCK IN SHARE MODE" + else: + return " FOR UPDATE" + + def limit_clause(self, select, **kw): + # MySQL supports: + # LIMIT + # LIMIT , + # and in server versions > 3.3: + # LIMIT OFFSET + # The latter is more readable for offsets but we're stuck with the + # former until we can refine dialects by server revision. + + limit_clause, offset_clause = select._limit_clause, \ + select._offset_clause + + if limit_clause is None and offset_clause is None: + return '' + elif offset_clause is not None: + # As suggested by the MySQL docs, need to apply an + # artificial limit if one wasn't provided + # http://dev.mysql.com/doc/refman/5.0/en/select.html + if limit_clause is None: + # hardwire the upper limit. Currently + # needed by OurSQL with Python 3 + # (https://bugs.launchpad.net/oursql/+bug/686232), + # but also is consistent with the usage of the upper + # bound as part of MySQL's "syntax" for OFFSET with + # no LIMIT + return ' \n LIMIT %s, %s' % ( + self.process(offset_clause, **kw), + "18446744073709551615") + else: + return ' \n LIMIT %s, %s' % ( + self.process(offset_clause, **kw), + self.process(limit_clause, **kw)) + else: + # No offset provided, so just use the limit + return ' \n LIMIT %s' % (self.process(limit_clause, **kw),) + + def update_limit_clause(self, update_stmt): + limit = update_stmt.kwargs.get('%s_limit' % self.dialect.name, None) + if limit: + return "LIMIT %s" % limit + else: + return None + + def update_tables_clause(self, update_stmt, from_table, + extra_froms, **kw): + return ', '.join(t._compiler_dispatch(self, asfrom=True, **kw) + for t in [from_table] + list(extra_froms)) + + def update_from_clause(self, update_stmt, from_table, + extra_froms, from_hints, **kw): + return None + + +# ug. "InnoDB needs indexes on foreign keys and referenced keys [...]. +# Starting with MySQL 4.1.2, these indexes are created automatically. +# In older versions, the indexes must be created explicitly or the +# creation of foreign key constraints fails." + +class MySQLDDLCompiler(compiler.DDLCompiler): + def create_table_constraints(self, table, **kw): + """Get table constraints.""" + constraint_string = super( + MySQLDDLCompiler, self).create_table_constraints(table, **kw) + + # why self.dialect.name and not 'mysql'? because of drizzle + is_innodb = 'engine' in table.dialect_options[self.dialect.name] and \ + table.dialect_options[self.dialect.name][ + 'engine'].lower() == 'innodb' + + auto_inc_column = table._autoincrement_column + + if is_innodb and \ + auto_inc_column is not None and \ + auto_inc_column is not list(table.primary_key)[0]: + if constraint_string: + constraint_string += ", \n\t" + constraint_string += "KEY %s (%s)" % ( + self.preparer.quote( + "idx_autoinc_%s" % auto_inc_column.name + ), + self.preparer.format_column(auto_inc_column) + ) + + return constraint_string + + def get_column_specification(self, column, **kw): + """Builds column DDL.""" + + colspec = [ + self.preparer.format_column(column), + self.dialect.type_compiler.process( + column.type, type_expression=column) + ] + + is_timestamp = isinstance(column.type, sqltypes.TIMESTAMP) + + if not column.nullable: + colspec.append('NOT NULL') + + # see: http://docs.sqlalchemy.org/en/latest/dialects/ + # mysql.html#mysql_timestamp_null + elif column.nullable and is_timestamp: + colspec.append('NULL') + + default = self.get_column_default_string(column) + if default is not None: + colspec.append('DEFAULT ' + default) + + if column.table is not None \ + and column is column.table._autoincrement_column and \ + column.server_default is None: + colspec.append('AUTO_INCREMENT') + + return ' '.join(colspec) + + def post_create_table(self, table): + """Build table-level CREATE options like ENGINE and COLLATE.""" + + table_opts = [] + + opts = dict( + ( + k[len(self.dialect.name) + 1:].upper(), + v + ) + for k, v in table.kwargs.items() + if k.startswith('%s_' % self.dialect.name) + ) + + for opt in topological.sort([ + ('DEFAULT_CHARSET', 'COLLATE'), + ('DEFAULT_CHARACTER_SET', 'COLLATE'), + ('PARTITION_BY', 'PARTITIONS'), # only for test consistency + ], opts): + arg = opts[opt] + if opt in _options_of_type_string: + arg = "'%s'" % arg.replace("\\", "\\\\").replace("'", "''") + + if opt in ('DATA_DIRECTORY', 'INDEX_DIRECTORY', + 'DEFAULT_CHARACTER_SET', 'CHARACTER_SET', + 'DEFAULT_CHARSET', + 'DEFAULT_COLLATE', 'PARTITION_BY'): + opt = opt.replace('_', ' ') + + joiner = '=' + if opt in ('TABLESPACE', 'DEFAULT CHARACTER SET', + 'CHARACTER SET', 'COLLATE', + 'PARTITION BY', 'PARTITIONS'): + joiner = ' ' + + table_opts.append(joiner.join((opt, arg))) + return ' '.join(table_opts) + + def visit_create_index(self, create): + index = create.element + self._verify_index_table(index) + preparer = self.preparer + table = preparer.format_table(index.table) + columns = [self.sql_compiler.process(expr, include_table=False, + literal_binds=True) + for expr in index.expressions] + + name = self._prepared_index_name(index) + + text = "CREATE " + if index.unique: + text += "UNIQUE " + text += "INDEX %s ON %s " % (name, table) + + length = index.dialect_options['mysql']['length'] + if length is not None: + + if isinstance(length, dict): + # length value can be a (column_name --> integer value) + # mapping specifying the prefix length for each column of the + # index + columns = ', '.join( + '%s(%d)' % (expr, length[col.name]) if col.name in length + else + ( + '%s(%d)' % (expr, length[expr]) if expr in length + else '%s' % expr + ) + for col, expr in zip(index.expressions, columns) + ) + else: + # or can be an integer value specifying the same + # prefix length for all columns of the index + columns = ', '.join( + '%s(%d)' % (col, length) + for col in columns + ) + else: + columns = ', '.join(columns) + text += '(%s)' % columns + + using = index.dialect_options['mysql']['using'] + if using is not None: + text += " USING %s" % (preparer.quote(using)) + + return text + + def visit_primary_key_constraint(self, constraint): + text = super(MySQLDDLCompiler, self).\ + visit_primary_key_constraint(constraint) + using = constraint.dialect_options['mysql']['using'] + if using: + text += " USING %s" % (self.preparer.quote(using)) + return text + + def visit_drop_index(self, drop): + index = drop.element + + return "\nDROP INDEX %s ON %s" % ( + self._prepared_index_name(index, + include_schema=False), + self.preparer.format_table(index.table)) + + def visit_drop_constraint(self, drop): + constraint = drop.element + if isinstance(constraint, sa_schema.ForeignKeyConstraint): + qual = "FOREIGN KEY " + const = self.preparer.format_constraint(constraint) + elif isinstance(constraint, sa_schema.PrimaryKeyConstraint): + qual = "PRIMARY KEY " + const = "" + elif isinstance(constraint, sa_schema.UniqueConstraint): + qual = "INDEX " + const = self.preparer.format_constraint(constraint) + else: + qual = "" + const = self.preparer.format_constraint(constraint) + return "ALTER TABLE %s DROP %s%s" % \ + (self.preparer.format_table(constraint.table), + qual, const) + + def define_constraint_match(self, constraint): + if constraint.match is not None: + raise exc.CompileError( + "MySQL ignores the 'MATCH' keyword while at the same time " + "causes ON UPDATE/ON DELETE clauses to be ignored.") + return "" + + +class MySQLTypeCompiler(compiler.GenericTypeCompiler): + def _extend_numeric(self, type_, spec): + "Extend a numeric-type declaration with MySQL specific extensions." + + if not self._mysql_type(type_): + return spec + + if type_.unsigned: + spec += ' UNSIGNED' + if type_.zerofill: + spec += ' ZEROFILL' + return spec + + def _extend_string(self, type_, defaults, spec): + """Extend a string-type declaration with standard SQL CHARACTER SET / + COLLATE annotations and MySQL specific extensions. + + """ + + def attr(name): + return getattr(type_, name, defaults.get(name)) + + if attr('charset'): + charset = 'CHARACTER SET %s' % attr('charset') + elif attr('ascii'): + charset = 'ASCII' + elif attr('unicode'): + charset = 'UNICODE' + else: + charset = None + + if attr('collation'): + collation = 'COLLATE %s' % type_.collation + elif attr('binary'): + collation = 'BINARY' + else: + collation = None + + if attr('national'): + # NATIONAL (aka NCHAR/NVARCHAR) trumps charsets. + return ' '.join([c for c in ('NATIONAL', spec, collation) + if c is not None]) + return ' '.join([c for c in (spec, charset, collation) + if c is not None]) + + def _mysql_type(self, type_): + return isinstance(type_, (_StringType, _NumericType)) + + def visit_NUMERIC(self, type_, **kw): + if type_.precision is None: + return self._extend_numeric(type_, "NUMERIC") + elif type_.scale is None: + return self._extend_numeric(type_, + "NUMERIC(%(precision)s)" % + {'precision': type_.precision}) + else: + return self._extend_numeric(type_, + "NUMERIC(%(precision)s, %(scale)s)" % + {'precision': type_.precision, + 'scale': type_.scale}) + + def visit_DECIMAL(self, type_, **kw): + if type_.precision is None: + return self._extend_numeric(type_, "DECIMAL") + elif type_.scale is None: + return self._extend_numeric(type_, + "DECIMAL(%(precision)s)" % + {'precision': type_.precision}) + else: + return self._extend_numeric(type_, + "DECIMAL(%(precision)s, %(scale)s)" % + {'precision': type_.precision, + 'scale': type_.scale}) + + def visit_DOUBLE(self, type_, **kw): + if type_.precision is not None and type_.scale is not None: + return self._extend_numeric(type_, + "DOUBLE(%(precision)s, %(scale)s)" % + {'precision': type_.precision, + 'scale': type_.scale}) + else: + return self._extend_numeric(type_, 'DOUBLE') + + def visit_REAL(self, type_, **kw): + if type_.precision is not None and type_.scale is not None: + return self._extend_numeric(type_, + "REAL(%(precision)s, %(scale)s)" % + {'precision': type_.precision, + 'scale': type_.scale}) + else: + return self._extend_numeric(type_, 'REAL') + + def visit_FLOAT(self, type_, **kw): + if self._mysql_type(type_) and \ + type_.scale is not None and \ + type_.precision is not None: + return self._extend_numeric( + type_, "FLOAT(%s, %s)" % (type_.precision, type_.scale)) + elif type_.precision is not None: + return self._extend_numeric(type_, + "FLOAT(%s)" % (type_.precision,)) + else: + return self._extend_numeric(type_, "FLOAT") + + def visit_INTEGER(self, type_, **kw): + if self._mysql_type(type_) and type_.display_width is not None: + return self._extend_numeric( + type_, "INTEGER(%(display_width)s)" % + {'display_width': type_.display_width}) + else: + return self._extend_numeric(type_, "INTEGER") + + def visit_BIGINT(self, type_, **kw): + if self._mysql_type(type_) and type_.display_width is not None: + return self._extend_numeric( + type_, "BIGINT(%(display_width)s)" % + {'display_width': type_.display_width}) + else: + return self._extend_numeric(type_, "BIGINT") + + def visit_MEDIUMINT(self, type_, **kw): + if self._mysql_type(type_) and type_.display_width is not None: + return self._extend_numeric( + type_, "MEDIUMINT(%(display_width)s)" % + {'display_width': type_.display_width}) + else: + return self._extend_numeric(type_, "MEDIUMINT") + + def visit_TINYINT(self, type_, **kw): + if self._mysql_type(type_) and type_.display_width is not None: + return self._extend_numeric(type_, + "TINYINT(%s)" % type_.display_width) + else: + return self._extend_numeric(type_, "TINYINT") + + def visit_SMALLINT(self, type_, **kw): + if self._mysql_type(type_) and type_.display_width is not None: + return self._extend_numeric(type_, + "SMALLINT(%(display_width)s)" % + {'display_width': type_.display_width} + ) + else: + return self._extend_numeric(type_, "SMALLINT") + + def visit_BIT(self, type_, **kw): + if type_.length is not None: + return "BIT(%s)" % type_.length + else: + return "BIT" + + def visit_DATETIME(self, type_, **kw): + if getattr(type_, 'fsp', None): + return "DATETIME(%d)" % type_.fsp + else: + return "DATETIME" + + def visit_DATE(self, type_, **kw): + return "DATE" + + def visit_TIME(self, type_, **kw): + if getattr(type_, 'fsp', None): + return "TIME(%d)" % type_.fsp + else: + return "TIME" + + def visit_TIMESTAMP(self, type_, **kw): + if getattr(type_, 'fsp', None): + return "TIMESTAMP(%d)" % type_.fsp + else: + return "TIMESTAMP" + + def visit_YEAR(self, type_, **kw): + if type_.display_width is None: + return "YEAR" + else: + return "YEAR(%s)" % type_.display_width + + def visit_TEXT(self, type_, **kw): + if type_.length: + return self._extend_string(type_, {}, "TEXT(%d)" % type_.length) + else: + return self._extend_string(type_, {}, "TEXT") + + def visit_TINYTEXT(self, type_, **kw): + return self._extend_string(type_, {}, "TINYTEXT") + + def visit_MEDIUMTEXT(self, type_, **kw): + return self._extend_string(type_, {}, "MEDIUMTEXT") + + def visit_LONGTEXT(self, type_, **kw): + return self._extend_string(type_, {}, "LONGTEXT") + + def visit_VARCHAR(self, type_, **kw): + if type_.length: + return self._extend_string( + type_, {}, "VARCHAR(%d)" % type_.length) + else: + raise exc.CompileError( + "VARCHAR requires a length on dialect %s" % + self.dialect.name) + + def visit_CHAR(self, type_, **kw): + if type_.length: + return self._extend_string(type_, {}, "CHAR(%(length)s)" % + {'length': type_.length}) + else: + return self._extend_string(type_, {}, "CHAR") + + def visit_NVARCHAR(self, type_, **kw): + # We'll actually generate the equiv. "NATIONAL VARCHAR" instead + # of "NVARCHAR". + if type_.length: + return self._extend_string( + type_, {'national': True}, + "VARCHAR(%(length)s)" % {'length': type_.length}) + else: + raise exc.CompileError( + "NVARCHAR requires a length on dialect %s" % + self.dialect.name) + + def visit_NCHAR(self, type_, **kw): + # We'll actually generate the equiv. + # "NATIONAL CHAR" instead of "NCHAR". + if type_.length: + return self._extend_string( + type_, {'national': True}, + "CHAR(%(length)s)" % {'length': type_.length}) + else: + return self._extend_string(type_, {'national': True}, "CHAR") + + def visit_VARBINARY(self, type_, **kw): + return "VARBINARY(%d)" % type_.length + + def visit_large_binary(self, type_, **kw): + return self.visit_BLOB(type_) + + def visit_enum(self, type_, **kw): + if not type_.native_enum: + return super(MySQLTypeCompiler, self).visit_enum(type_) + else: + return self._visit_enumerated_values("ENUM", type_, type_.enums) + + def visit_BLOB(self, type_, **kw): + if type_.length: + return "BLOB(%d)" % type_.length + else: + return "BLOB" + + def visit_TINYBLOB(self, type_, **kw): + return "TINYBLOB" + + def visit_MEDIUMBLOB(self, type_, **kw): + return "MEDIUMBLOB" + + def visit_LONGBLOB(self, type_, **kw): + return "LONGBLOB" + + def _visit_enumerated_values(self, name, type_, enumerated_values): + quoted_enums = [] + for e in enumerated_values: + quoted_enums.append("'%s'" % e.replace("'", "''")) + return self._extend_string(type_, {}, "%s(%s)" % ( + name, ",".join(quoted_enums)) + ) + + def visit_ENUM(self, type_, **kw): + return self._visit_enumerated_values("ENUM", type_, + type_._enumerated_values) + + def visit_SET(self, type_, **kw): + return self._visit_enumerated_values("SET", type_, + type_._enumerated_values) + + def visit_BOOLEAN(self, type, **kw): + return "BOOL" + + +class MySQLIdentifierPreparer(compiler.IdentifierPreparer): + + reserved_words = RESERVED_WORDS + + def __init__(self, dialect, server_ansiquotes=False, **kw): + if not server_ansiquotes: + quote = "`" + else: + quote = '"' + + super(MySQLIdentifierPreparer, self).__init__( + dialect, + initial_quote=quote, + escape_quote=quote) + + def _quote_free_identifiers(self, *ids): + """Unilaterally identifier-quote any number of strings.""" + + return tuple([self.quote_identifier(i) for i in ids if i is not None]) + + +@log.class_logger +class MySQLDialect(default.DefaultDialect): + """Details of the MySQL dialect. + Not used directly in application code. + """ + + name = 'mysql' + supports_alter = True + + # MySQL has no true "boolean" type; we + # allow for the "true" and "false" keywords, however + supports_native_boolean = False + + # identifiers are 64, however aliases can be 255... + max_identifier_length = 255 + max_index_name_length = 64 + + supports_native_enum = True + + supports_sane_rowcount = True + supports_sane_multi_rowcount = False + supports_multivalues_insert = True + + default_paramstyle = 'format' + colspecs = colspecs + + statement_compiler = MySQLCompiler + ddl_compiler = MySQLDDLCompiler + type_compiler = MySQLTypeCompiler + ischema_names = ischema_names + preparer = MySQLIdentifierPreparer + + # default SQL compilation settings - + # these are modified upon initialize(), + # i.e. first connect + _backslash_escapes = True + _server_ansiquotes = False + + construct_arguments = [ + (sa_schema.Table, { + "*": None + }), + (sql.Update, { + "limit": None + }), + (sa_schema.PrimaryKeyConstraint, { + "using": None + }), + (sa_schema.Index, { + "using": None, + "length": None, + }) + ] + + def __init__(self, isolation_level=None, **kwargs): + kwargs.pop('use_ansiquotes', None) # legacy + default.DefaultDialect.__init__(self, **kwargs) + self.isolation_level = isolation_level + + def on_connect(self): + if self.isolation_level is not None: + def connect(conn): + self.set_isolation_level(conn, self.isolation_level) + return connect + else: + return None + + _isolation_lookup = set(['SERIALIZABLE', 'READ UNCOMMITTED', + 'READ COMMITTED', 'REPEATABLE READ']) + + def set_isolation_level(self, connection, level): + level = level.replace('_', ' ') + if level not in self._isolation_lookup: + raise exc.ArgumentError( + "Invalid value '%s' for isolation_level. " + "Valid isolation levels for %s are %s" % + (level, self.name, ", ".join(self._isolation_lookup)) + ) + cursor = connection.cursor() + cursor.execute("SET SESSION TRANSACTION ISOLATION LEVEL %s" % level) + cursor.execute("COMMIT") + cursor.close() + + def get_isolation_level(self, connection): + cursor = connection.cursor() + cursor.execute('SELECT @@tx_isolation') + val = cursor.fetchone()[0] + cursor.close() + if util.py3k and isinstance(val, bytes): + val = val.decode() + return val.upper().replace("-", " ") + + def do_commit(self, dbapi_connection): + """Execute a COMMIT.""" + + # COMMIT/ROLLBACK were introduced in 3.23.15. + # Yes, we have at least one user who has to talk to these old + # versions! + # + # Ignore commit/rollback if support isn't present, otherwise even + # basic operations via autocommit fail. + try: + dbapi_connection.commit() + except Exception: + if self.server_version_info < (3, 23, 15): + args = sys.exc_info()[1].args + if args and args[0] == 1064: + return + raise + + def do_rollback(self, dbapi_connection): + """Execute a ROLLBACK.""" + + try: + dbapi_connection.rollback() + except Exception: + if self.server_version_info < (3, 23, 15): + args = sys.exc_info()[1].args + if args and args[0] == 1064: + return + raise + + def do_begin_twophase(self, connection, xid): + connection.execute(sql.text("XA BEGIN :xid"), xid=xid) + + def do_prepare_twophase(self, connection, xid): + connection.execute(sql.text("XA END :xid"), xid=xid) + connection.execute(sql.text("XA PREPARE :xid"), xid=xid) + + def do_rollback_twophase(self, connection, xid, is_prepared=True, + recover=False): + if not is_prepared: + connection.execute(sql.text("XA END :xid"), xid=xid) + connection.execute(sql.text("XA ROLLBACK :xid"), xid=xid) + + def do_commit_twophase(self, connection, xid, is_prepared=True, + recover=False): + if not is_prepared: + self.do_prepare_twophase(connection, xid) + connection.execute(sql.text("XA COMMIT :xid"), xid=xid) + + def do_recover_twophase(self, connection): + resultset = connection.execute("XA RECOVER") + return [row['data'][0:row['gtrid_length']] for row in resultset] + + def is_disconnect(self, e, connection, cursor): + if isinstance(e, (self.dbapi.OperationalError, + self.dbapi.ProgrammingError)): + return self._extract_error_code(e) in \ + (2006, 2013, 2014, 2045, 2055) + elif isinstance(e, self.dbapi.InterfaceError): + # if underlying connection is closed, + # this is the error you get + return "(0, '')" in str(e) + else: + return False + + def _compat_fetchall(self, rp, charset=None): + """Proxy result rows to smooth over MySQL-Python driver + inconsistencies.""" + + return [_DecodingRowProxy(row, charset) for row in rp.fetchall()] + + def _compat_fetchone(self, rp, charset=None): + """Proxy a result row to smooth over MySQL-Python driver + inconsistencies.""" + + return _DecodingRowProxy(rp.fetchone(), charset) + + def _compat_first(self, rp, charset=None): + """Proxy a result row to smooth over MySQL-Python driver + inconsistencies.""" + + return _DecodingRowProxy(rp.first(), charset) + + def _extract_error_code(self, exception): + raise NotImplementedError() + + def _get_default_schema_name(self, connection): + return connection.execute('SELECT DATABASE()').scalar() + + def has_table(self, connection, table_name, schema=None): + # SHOW TABLE STATUS LIKE and SHOW TABLES LIKE do not function properly + # on macosx (and maybe win?) with multibyte table names. + # + # TODO: if this is not a problem on win, make the strategy swappable + # based on platform. DESCRIBE is slower. + + # [ticket:726] + # full_name = self.identifier_preparer.format_table(table, + # use_schema=True) + + full_name = '.'.join(self.identifier_preparer._quote_free_identifiers( + schema, table_name)) + + st = "DESCRIBE %s" % full_name + rs = None + try: + try: + rs = connection.execution_options( + skip_user_error_events=True).execute(st) + have = rs.fetchone() is not None + rs.close() + return have + except exc.DBAPIError as e: + if self._extract_error_code(e.orig) == 1146: + return False + raise + finally: + if rs: + rs.close() + + def initialize(self, connection): + self._connection_charset = self._detect_charset(connection) + self._detect_ansiquotes(connection) + if self._server_ansiquotes: + # if ansiquotes == True, build a new IdentifierPreparer + # with the new setting + self.identifier_preparer = self.preparer( + self, server_ansiquotes=self._server_ansiquotes) + + default.DefaultDialect.initialize(self, connection) + + @property + def _supports_cast(self): + return self.server_version_info is None or \ + self.server_version_info >= (4, 0, 2) + + @reflection.cache + def get_schema_names(self, connection, **kw): + rp = connection.execute("SHOW schemas") + return [r[0] for r in rp] + + @reflection.cache + def get_table_names(self, connection, schema=None, **kw): + """Return a Unicode SHOW TABLES from a given schema.""" + if schema is not None: + current_schema = schema + else: + current_schema = self.default_schema_name + + charset = self._connection_charset + if self.server_version_info < (5, 0, 2): + rp = connection.execute( + "SHOW TABLES FROM %s" % + self.identifier_preparer.quote_identifier(current_schema)) + return [row[0] for + row in self._compat_fetchall(rp, charset=charset)] + else: + rp = connection.execute( + "SHOW FULL TABLES FROM %s" % + self.identifier_preparer.quote_identifier(current_schema)) + + return [row[0] + for row in self._compat_fetchall(rp, charset=charset) + if row[1] == 'BASE TABLE'] + + @reflection.cache + def get_view_names(self, connection, schema=None, **kw): + if self.server_version_info < (5, 0, 2): + raise NotImplementedError + if schema is None: + schema = self.default_schema_name + if self.server_version_info < (5, 0, 2): + return self.get_table_names(connection, schema) + charset = self._connection_charset + rp = connection.execute( + "SHOW FULL TABLES FROM %s" % + self.identifier_preparer.quote_identifier(schema)) + return [row[0] + for row in self._compat_fetchall(rp, charset=charset) + if row[1] in ('VIEW', 'SYSTEM VIEW')] + + @reflection.cache + def get_table_options(self, connection, table_name, schema=None, **kw): + + parsed_state = self._parsed_state_or_create( + connection, table_name, schema, **kw) + return parsed_state.table_options + + @reflection.cache + def get_columns(self, connection, table_name, schema=None, **kw): + parsed_state = self._parsed_state_or_create( + connection, table_name, schema, **kw) + return parsed_state.columns + + @reflection.cache + def get_pk_constraint(self, connection, table_name, schema=None, **kw): + parsed_state = self._parsed_state_or_create( + connection, table_name, schema, **kw) + for key in parsed_state.keys: + if key['type'] == 'PRIMARY': + # There can be only one. + cols = [s[0] for s in key['columns']] + return {'constrained_columns': cols, 'name': None} + return {'constrained_columns': [], 'name': None} + + @reflection.cache + def get_foreign_keys(self, connection, table_name, schema=None, **kw): + + parsed_state = self._parsed_state_or_create( + connection, table_name, schema, **kw) + default_schema = None + + fkeys = [] + + for spec in parsed_state.constraints: + # only FOREIGN KEYs + ref_name = spec['table'][-1] + ref_schema = len(spec['table']) > 1 and \ + spec['table'][-2] or schema + + if not ref_schema: + if default_schema is None: + default_schema = \ + connection.dialect.default_schema_name + if schema == default_schema: + ref_schema = schema + + loc_names = spec['local'] + ref_names = spec['foreign'] + + con_kw = {} + for opt in ('onupdate', 'ondelete'): + if spec.get(opt, False): + con_kw[opt] = spec[opt] + + fkey_d = { + 'name': spec['name'], + 'constrained_columns': loc_names, + 'referred_schema': ref_schema, + 'referred_table': ref_name, + 'referred_columns': ref_names, + 'options': con_kw + } + fkeys.append(fkey_d) + return fkeys + + @reflection.cache + def get_indexes(self, connection, table_name, schema=None, **kw): + + parsed_state = self._parsed_state_or_create( + connection, table_name, schema, **kw) + + indexes = [] + for spec in parsed_state.keys: + unique = False + flavor = spec['type'] + if flavor == 'PRIMARY': + continue + if flavor == 'UNIQUE': + unique = True + elif flavor in (None, 'FULLTEXT', 'SPATIAL'): + pass + else: + self.logger.info( + "Converting unknown KEY type %s to a plain KEY", flavor) + pass + index_d = {} + index_d['name'] = spec['name'] + index_d['column_names'] = [s[0] for s in spec['columns']] + index_d['unique'] = unique + if flavor: + index_d['type'] = flavor + indexes.append(index_d) + return indexes + + @reflection.cache + def get_unique_constraints(self, connection, table_name, + schema=None, **kw): + parsed_state = self._parsed_state_or_create( + connection, table_name, schema, **kw) + + return [ + { + 'name': key['name'], + 'column_names': [col[0] for col in key['columns']], + 'duplicates_index': key['name'], + } + for key in parsed_state.keys + if key['type'] == 'UNIQUE' + ] + + @reflection.cache + def get_view_definition(self, connection, view_name, schema=None, **kw): + + charset = self._connection_charset + full_name = '.'.join(self.identifier_preparer._quote_free_identifiers( + schema, view_name)) + sql = self._show_create_table(connection, None, charset, + full_name=full_name) + return sql + + def _parsed_state_or_create(self, connection, table_name, + schema=None, **kw): + return self._setup_parser( + connection, + table_name, + schema, + info_cache=kw.get('info_cache', None) + ) + + @util.memoized_property + def _tabledef_parser(self): + """return the MySQLTableDefinitionParser, generate if needed. + + The deferred creation ensures that the dialect has + retrieved server version information first. + + """ + if (self.server_version_info < (4, 1) and self._server_ansiquotes): + # ANSI_QUOTES doesn't affect SHOW CREATE TABLE on < 4.1 + preparer = self.preparer(self, server_ansiquotes=False) + else: + preparer = self.identifier_preparer + return MySQLTableDefinitionParser(self, preparer) + + @reflection.cache + def _setup_parser(self, connection, table_name, schema=None, **kw): + charset = self._connection_charset + parser = self._tabledef_parser + full_name = '.'.join(self.identifier_preparer._quote_free_identifiers( + schema, table_name)) + sql = self._show_create_table(connection, None, charset, + full_name=full_name) + if re.match(r'^CREATE (?:ALGORITHM)?.* VIEW', sql): + # Adapt views to something table-like. + columns = self._describe_table(connection, None, charset, + full_name=full_name) + sql = parser._describe_to_create(table_name, columns) + return parser.parse(sql, charset) + + def _detect_charset(self, connection): + raise NotImplementedError() + + def _detect_casing(self, connection): + """Sniff out identifier case sensitivity. + + Cached per-connection. This value can not change without a server + restart. + + """ + # http://dev.mysql.com/doc/refman/5.0/en/name-case-sensitivity.html + + charset = self._connection_charset + row = self._compat_first(connection.execute( + "SHOW VARIABLES LIKE 'lower_case_table_names'"), + charset=charset) + if not row: + cs = 0 + else: + # 4.0.15 returns OFF or ON according to [ticket:489] + # 3.23 doesn't, 4.0.27 doesn't.. + if row[1] == 'OFF': + cs = 0 + elif row[1] == 'ON': + cs = 1 + else: + cs = int(row[1]) + return cs + + def _detect_collations(self, connection): + """Pull the active COLLATIONS list from the server. + + Cached per-connection. + """ + + collations = {} + if self.server_version_info < (4, 1, 0): + pass + else: + charset = self._connection_charset + rs = connection.execute('SHOW COLLATION') + for row in self._compat_fetchall(rs, charset): + collations[row[0]] = row[1] + return collations + + def _detect_ansiquotes(self, connection): + """Detect and adjust for the ANSI_QUOTES sql mode.""" + + row = self._compat_first( + connection.execute("SHOW VARIABLES LIKE 'sql_mode'"), + charset=self._connection_charset) + + if not row: + mode = '' + else: + mode = row[1] or '' + # 4.0 + if mode.isdigit(): + mode_no = int(mode) + mode = (mode_no | 4 == mode_no) and 'ANSI_QUOTES' or '' + + self._server_ansiquotes = 'ANSI_QUOTES' in mode + + # as of MySQL 5.0.1 + self._backslash_escapes = 'NO_BACKSLASH_ESCAPES' not in mode + + def _show_create_table(self, connection, table, charset=None, + full_name=None): + """Run SHOW CREATE TABLE for a ``Table``.""" + + if full_name is None: + full_name = self.identifier_preparer.format_table(table) + st = "SHOW CREATE TABLE %s" % full_name + + rp = None + try: + rp = connection.execution_options( + skip_user_error_events=True).execute(st) + except exc.DBAPIError as e: + if self._extract_error_code(e.orig) == 1146: + raise exc.NoSuchTableError(full_name) + else: + raise + row = self._compat_first(rp, charset=charset) + if not row: + raise exc.NoSuchTableError(full_name) + return row[1].strip() + + return sql + + def _describe_table(self, connection, table, charset=None, + full_name=None): + """Run DESCRIBE for a ``Table`` and return processed rows.""" + + if full_name is None: + full_name = self.identifier_preparer.format_table(table) + st = "DESCRIBE %s" % full_name + + rp, rows = None, None + try: + try: + rp = connection.execution_options( + skip_user_error_events=True).execute(st) + except exc.DBAPIError as e: + if self._extract_error_code(e.orig) == 1146: + raise exc.NoSuchTableError(full_name) + else: + raise + rows = self._compat_fetchall(rp, charset=charset) + finally: + if rp: + rp.close() + return rows + + +class ReflectedState(object): + """Stores raw information about a SHOW CREATE TABLE statement.""" + + def __init__(self): + self.columns = [] + self.table_options = {} + self.table_name = None + self.keys = [] + self.constraints = [] + + +@log.class_logger +class MySQLTableDefinitionParser(object): + """Parses the results of a SHOW CREATE TABLE statement.""" + + def __init__(self, dialect, preparer): + self.dialect = dialect + self.preparer = preparer + self._prep_regexes() + + def parse(self, show_create, charset): + state = ReflectedState() + state.charset = charset + for line in re.split(r'\r?\n', show_create): + if line.startswith(' ' + self.preparer.initial_quote): + self._parse_column(line, state) + # a regular table options line + elif line.startswith(') '): + self._parse_table_options(line, state) + # an ANSI-mode table options line + elif line == ')': + pass + elif line.startswith('CREATE '): + self._parse_table_name(line, state) + # Not present in real reflection, but may be if + # loading from a file. + elif not line: + pass + else: + type_, spec = self._parse_constraints(line) + if type_ is None: + util.warn("Unknown schema content: %r" % line) + elif type_ == 'key': + state.keys.append(spec) + elif type_ == 'constraint': + state.constraints.append(spec) + else: + pass + return state + + def _parse_constraints(self, line): + """Parse a KEY or CONSTRAINT line. + + :param line: A line of SHOW CREATE TABLE output + """ + + # KEY + m = self._re_key.match(line) + if m: + spec = m.groupdict() + # convert columns into name, length pairs + spec['columns'] = self._parse_keyexprs(spec['columns']) + return 'key', spec + + # CONSTRAINT + m = self._re_constraint.match(line) + if m: + spec = m.groupdict() + spec['table'] = \ + self.preparer.unformat_identifiers(spec['table']) + spec['local'] = [c[0] + for c in self._parse_keyexprs(spec['local'])] + spec['foreign'] = [c[0] + for c in self._parse_keyexprs(spec['foreign'])] + return 'constraint', spec + + # PARTITION and SUBPARTITION + m = self._re_partition.match(line) + if m: + # Punt! + return 'partition', line + + # No match. + return (None, line) + + def _parse_table_name(self, line, state): + """Extract the table name. + + :param line: The first line of SHOW CREATE TABLE + """ + + regex, cleanup = self._pr_name + m = regex.match(line) + if m: + state.table_name = cleanup(m.group('name')) + + def _parse_table_options(self, line, state): + """Build a dictionary of all reflected table-level options. + + :param line: The final line of SHOW CREATE TABLE output. + """ + + options = {} + + if not line or line == ')': + pass + + else: + rest_of_line = line[:] + for regex, cleanup in self._pr_options: + m = regex.search(rest_of_line) + if not m: + continue + directive, value = m.group('directive'), m.group('val') + if cleanup: + value = cleanup(value) + options[directive.lower()] = value + rest_of_line = regex.sub('', rest_of_line) + + for nope in ('auto_increment', 'data directory', 'index directory'): + options.pop(nope, None) + + for opt, val in options.items(): + state.table_options['%s_%s' % (self.dialect.name, opt)] = val + + def _parse_column(self, line, state): + """Extract column details. + + Falls back to a 'minimal support' variant if full parse fails. + + :param line: Any column-bearing line from SHOW CREATE TABLE + """ + + spec = None + m = self._re_column.match(line) + if m: + spec = m.groupdict() + spec['full'] = True + else: + m = self._re_column_loose.match(line) + if m: + spec = m.groupdict() + spec['full'] = False + if not spec: + util.warn("Unknown column definition %r" % line) + return + if not spec['full']: + util.warn("Incomplete reflection of column definition %r" % line) + + name, type_, args = spec['name'], spec['coltype'], spec['arg'] + + try: + col_type = self.dialect.ischema_names[type_] + except KeyError: + util.warn("Did not recognize type '%s' of column '%s'" % + (type_, name)) + col_type = sqltypes.NullType + + # Column type positional arguments eg. varchar(32) + if args is None or args == '': + type_args = [] + elif args[0] == "'" and args[-1] == "'": + type_args = self._re_csv_str.findall(args) + else: + type_args = [int(v) for v in self._re_csv_int.findall(args)] + + # Column type keyword options + type_kw = {} + + if issubclass(col_type, (DATETIME, TIME, TIMESTAMP)): + if type_args: + type_kw['fsp'] = type_args.pop(0) + + for kw in ('unsigned', 'zerofill'): + if spec.get(kw, False): + type_kw[kw] = True + for kw in ('charset', 'collate'): + if spec.get(kw, False): + type_kw[kw] = spec[kw] + if issubclass(col_type, _EnumeratedValues): + type_args = _EnumeratedValues._strip_values(type_args) + + if issubclass(col_type, SET) and '' in type_args: + type_kw['retrieve_as_bitwise'] = True + + type_instance = col_type(*type_args, **type_kw) + + col_kw = {} + + # NOT NULL + col_kw['nullable'] = True + # this can be "NULL" in the case of TIMESTAMP + if spec.get('notnull', False) == 'NOT NULL': + col_kw['nullable'] = False + + # AUTO_INCREMENT + if spec.get('autoincr', False): + col_kw['autoincrement'] = True + elif issubclass(col_type, sqltypes.Integer): + col_kw['autoincrement'] = False + + # DEFAULT + default = spec.get('default', None) + + if default == 'NULL': + # eliminates the need to deal with this later. + default = None + + col_d = dict(name=name, type=type_instance, default=default) + col_d.update(col_kw) + state.columns.append(col_d) + + def _describe_to_create(self, table_name, columns): + """Re-format DESCRIBE output as a SHOW CREATE TABLE string. + + DESCRIBE is a much simpler reflection and is sufficient for + reflecting views for runtime use. This method formats DDL + for columns only- keys are omitted. + + :param columns: A sequence of DESCRIBE or SHOW COLUMNS 6-tuples. + SHOW FULL COLUMNS FROM rows must be rearranged for use with + this function. + """ + + buffer = [] + for row in columns: + (name, col_type, nullable, default, extra) = \ + [row[i] for i in (0, 1, 2, 4, 5)] + + line = [' '] + line.append(self.preparer.quote_identifier(name)) + line.append(col_type) + if not nullable: + line.append('NOT NULL') + if default: + if 'auto_increment' in default: + pass + elif (col_type.startswith('timestamp') and + default.startswith('C')): + line.append('DEFAULT') + line.append(default) + elif default == 'NULL': + line.append('DEFAULT') + line.append(default) + else: + line.append('DEFAULT') + line.append("'%s'" % default.replace("'", "''")) + if extra: + line.append(extra) + + buffer.append(' '.join(line)) + + return ''.join([('CREATE TABLE %s (\n' % + self.preparer.quote_identifier(table_name)), + ',\n'.join(buffer), + '\n) ']) + + def _parse_keyexprs(self, identifiers): + """Unpack '"col"(2),"col" ASC'-ish strings into components.""" + + return self._re_keyexprs.findall(identifiers) + + def _prep_regexes(self): + """Pre-compile regular expressions.""" + + self._re_columns = [] + self._pr_options = [] + + _final = self.preparer.final_quote + + quotes = dict(zip(('iq', 'fq', 'esc_fq'), + [re.escape(s) for s in + (self.preparer.initial_quote, + _final, + self.preparer._escape_identifier(_final))])) + + self._pr_name = _pr_compile( + r'^CREATE (?:\w+ +)?TABLE +' + r'%(iq)s(?P(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +\($' % quotes, + self.preparer._unescape_identifier) + + # `col`,`col2`(32),`col3`(15) DESC + # + # Note: ASC and DESC aren't reflected, so we'll punt... + self._re_keyexprs = _re_compile( + r'(?:' + r'(?:%(iq)s((?:%(esc_fq)s|[^%(fq)s])+)%(fq)s)' + r'(?:\((\d+)\))?(?=\,|$))+' % quotes) + + # 'foo' or 'foo','bar' or 'fo,o','ba''a''r' + self._re_csv_str = _re_compile(r'\x27(?:\x27\x27|[^\x27])*\x27') + + # 123 or 123,456 + self._re_csv_int = _re_compile(r'\d+') + + # `colname` [type opts] + # (NOT NULL | NULL) + # DEFAULT ('value' | CURRENT_TIMESTAMP...) + # COMMENT 'comment' + # COLUMN_FORMAT (FIXED|DYNAMIC|DEFAULT) + # STORAGE (DISK|MEMORY) + self._re_column = _re_compile( + r' ' + r'%(iq)s(?P(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +' + r'(?P\w+)' + r'(?:\((?P(?:\d+|\d+,\d+|' + r'(?:\x27(?:\x27\x27|[^\x27])*\x27,?)+))\))?' + r'(?: +(?PUNSIGNED))?' + r'(?: +(?PZEROFILL))?' + r'(?: +CHARACTER SET +(?P[\w_]+))?' + r'(?: +COLLATE +(?P[\w_]+))?' + r'(?: +(?P(?:NOT )?NULL))?' + r'(?: +DEFAULT +(?P' + r'(?:NULL|\x27(?:\x27\x27|[^\x27])*\x27|\w+' + r'(?: +ON UPDATE \w+)?)' + r'))?' + r'(?: +(?PAUTO_INCREMENT))?' + r'(?: +COMMENT +(P(?:\x27\x27|[^\x27])+))?' + r'(?: +COLUMN_FORMAT +(?P\w+))?' + r'(?: +STORAGE +(?P\w+))?' + r'(?: +(?P.*))?' + r',?$' + % quotes + ) + + # Fallback, try to parse as little as possible + self._re_column_loose = _re_compile( + r' ' + r'%(iq)s(?P(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +' + r'(?P\w+)' + r'(?:\((?P(?:\d+|\d+,\d+|\x27(?:\x27\x27|[^\x27])+\x27))\))?' + r'.*?(?P(?:NOT )NULL)?' + % quotes + ) + + # (PRIMARY|UNIQUE|FULLTEXT|SPATIAL) INDEX `name` (USING (BTREE|HASH))? + # (`col` (ASC|DESC)?, `col` (ASC|DESC)?) + # KEY_BLOCK_SIZE size | WITH PARSER name + self._re_key = _re_compile( + r' ' + r'(?:(?P\S+) )?KEY' + r'(?: +%(iq)s(?P(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s)?' + r'(?: +USING +(?P\S+))?' + r' +\((?P.+?)\)' + r'(?: +USING +(?P\S+))?' + r'(?: +KEY_BLOCK_SIZE *[ =]? *(?P\S+))?' + r'(?: +WITH PARSER +(?P\S+))?' + r',?$' + % quotes + ) + + # CONSTRAINT `name` FOREIGN KEY (`local_col`) + # REFERENCES `remote` (`remote_col`) + # MATCH FULL | MATCH PARTIAL | MATCH SIMPLE + # ON DELETE CASCADE ON UPDATE RESTRICT + # + # unique constraints come back as KEYs + kw = quotes.copy() + kw['on'] = 'RESTRICT|CASCADE|SET NULL|NOACTION' + self._re_constraint = _re_compile( + r' ' + r'CONSTRAINT +' + r'%(iq)s(?P(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +' + r'FOREIGN KEY +' + r'\((?P[^\)]+?)\) REFERENCES +' + r'(?P%(iq)s[^%(fq)s]+%(fq)s' + r'(?:\.%(iq)s[^%(fq)s]+%(fq)s)?) +' + r'\((?P[^\)]+?)\)' + r'(?: +(?PMATCH \w+))?' + r'(?: +ON DELETE (?P%(on)s))?' + r'(?: +ON UPDATE (?P%(on)s))?' + % kw + ) + + # PARTITION + # + # punt! + self._re_partition = _re_compile(r'(?:.*)(?:SUB)?PARTITION(?:.*)') + + # Table-level options (COLLATE, ENGINE, etc.) + # Do the string options first, since they have quoted + # strings we need to get rid of. + for option in _options_of_type_string: + self._add_option_string(option) + + for option in ('ENGINE', 'TYPE', 'AUTO_INCREMENT', + 'AVG_ROW_LENGTH', 'CHARACTER SET', + 'DEFAULT CHARSET', 'CHECKSUM', + 'COLLATE', 'DELAY_KEY_WRITE', 'INSERT_METHOD', + 'MAX_ROWS', 'MIN_ROWS', 'PACK_KEYS', 'ROW_FORMAT', + 'KEY_BLOCK_SIZE'): + self._add_option_word(option) + + self._add_option_regex('UNION', r'\([^\)]+\)') + self._add_option_regex('TABLESPACE', r'.*? STORAGE DISK') + self._add_option_regex( + 'RAID_TYPE', + r'\w+\s+RAID_CHUNKS\s*\=\s*\w+RAID_CHUNKSIZE\s*=\s*\w+') + + _optional_equals = r'(?:\s*(?:=\s*)|\s+)' + + def _add_option_string(self, directive): + regex = (r'(?P%s)%s' + r"'(?P(?:[^']|'')*?)'(?!')" % + (re.escape(directive), self._optional_equals)) + self._pr_options.append(_pr_compile( + regex, lambda v: v.replace("\\\\", "\\").replace("''", "'") + )) + + def _add_option_word(self, directive): + regex = (r'(?P%s)%s' + r'(?P\w+)' % + (re.escape(directive), self._optional_equals)) + self._pr_options.append(_pr_compile(regex)) + + def _add_option_regex(self, directive, regex): + regex = (r'(?P%s)%s' + r'(?P%s)' % + (re.escape(directive), self._optional_equals, regex)) + self._pr_options.append(_pr_compile(regex)) + +_options_of_type_string = ('COMMENT', 'DATA DIRECTORY', 'INDEX DIRECTORY', + 'PASSWORD', 'CONNECTION') + + +class _DecodingRowProxy(object): + """Return unicode-decoded values based on type inspection. + + Smooth over data type issues (esp. with alpha driver versions) and + normalize strings as Unicode regardless of user-configured driver + encoding settings. + + """ + + # Some MySQL-python versions can return some columns as + # sets.Set(['value']) (seriously) but thankfully that doesn't + # seem to come up in DDL queries. + + _encoding_compat = { + 'koi8r': 'koi8_r', + 'koi8u': 'koi8_u', + 'utf16': 'utf-16-be', # MySQL's uft16 is always bigendian + 'utf8mb4': 'utf8', # real utf8 + 'eucjpms': 'ujis', + } + + def __init__(self, rowproxy, charset): + self.rowproxy = rowproxy + self.charset = self._encoding_compat.get(charset, charset) + + def __getitem__(self, index): + item = self.rowproxy[index] + if isinstance(item, _array): + item = item.tostring() + + if self.charset and isinstance(item, util.binary_type): + return item.decode(self.charset) + else: + return item + + def __getattr__(self, attr): + item = getattr(self.rowproxy, attr) + if isinstance(item, _array): + item = item.tostring() + if self.charset and isinstance(item, util.binary_type): + return item.decode(self.charset) + else: + return item + + +def _pr_compile(regex, cleanup=None): + """Prepare a 2-tuple of compiled regex and callable.""" + + return (_re_compile(regex), cleanup) + + +def _re_compile(regex): + """Compile a string to regex, I and UNICODE.""" + + return re.compile(regex, re.I | re.UNICODE) diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/cymysql.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/cymysql.py new file mode 100644 index 0000000..8bc0ae3 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/cymysql.py @@ -0,0 +1,87 @@ +# mysql/cymysql.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" + +.. dialect:: mysql+cymysql + :name: CyMySQL + :dbapi: cymysql + :connectstring: mysql+cymysql://:@/\ +[?] + :url: https://github.com/nakagami/CyMySQL + +""" +import re + +from .mysqldb import MySQLDialect_mysqldb +from .base import (BIT, MySQLDialect) +from ... import util + + +class _cymysqlBIT(BIT): + def result_processor(self, dialect, coltype): + """Convert a MySQL's 64 bit, variable length binary string to a long. + """ + + def process(value): + if value is not None: + v = 0 + for i in util.iterbytes(value): + v = v << 8 | i + return v + return value + return process + + +class MySQLDialect_cymysql(MySQLDialect_mysqldb): + driver = 'cymysql' + + description_encoding = None + supports_sane_rowcount = True + supports_sane_multi_rowcount = False + supports_unicode_statements = True + + colspecs = util.update_copy( + MySQLDialect.colspecs, + { + BIT: _cymysqlBIT, + } + ) + + @classmethod + def dbapi(cls): + return __import__('cymysql') + + def _get_server_version_info(self, connection): + dbapi_con = connection.connection + version = [] + r = re.compile('[.\-]') + for n in r.split(dbapi_con.server_version): + try: + version.append(int(n)) + except ValueError: + version.append(n) + return tuple(version) + + def _detect_charset(self, connection): + return connection.connection.charset + + def _extract_error_code(self, exception): + return exception.errno + + def is_disconnect(self, e, connection, cursor): + if isinstance(e, self.dbapi.OperationalError): + return self._extract_error_code(e) in \ + (2006, 2013, 2014, 2045, 2055) + elif isinstance(e, self.dbapi.InterfaceError): + # if underlying connection is closed, + # this is the error you get + return True + else: + return False + +dialect = MySQLDialect_cymysql diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/gaerdbms.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/gaerdbms.py new file mode 100644 index 0000000..4e36588 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/gaerdbms.py @@ -0,0 +1,102 @@ +# mysql/gaerdbms.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +""" +.. dialect:: mysql+gaerdbms + :name: Google Cloud SQL + :dbapi: rdbms + :connectstring: mysql+gaerdbms:///?instance= + :url: https://developers.google.com/appengine/docs/python/cloud-sql/\ +developers-guide + + This dialect is based primarily on the :mod:`.mysql.mysqldb` dialect with + minimal changes. + + .. versionadded:: 0.7.8 + + .. deprecated:: 1.0 This dialect is **no longer necessary** for + Google Cloud SQL; the MySQLdb dialect can be used directly. + Cloud SQL now recommends creating connections via the + mysql dialect using the URL format + + ``mysql+mysqldb://root@/?unix_socket=/cloudsql/:`` + + +Pooling +------- + +Google App Engine connections appear to be randomly recycled, +so the dialect does not pool connections. The :class:`.NullPool` +implementation is installed within the :class:`.Engine` by +default. + +""" + +import os + +from .mysqldb import MySQLDialect_mysqldb +from ...pool import NullPool +import re +from sqlalchemy.util import warn_deprecated + + +def _is_dev_environment(): + return os.environ.get('SERVER_SOFTWARE', '').startswith('Development/') + + +class MySQLDialect_gaerdbms(MySQLDialect_mysqldb): + + @classmethod + def dbapi(cls): + + warn_deprecated( + "Google Cloud SQL now recommends creating connections via the " + "MySQLdb dialect directly, using the URL format " + "mysql+mysqldb://root@/?unix_socket=/cloudsql/" + ":" + ) + + # from django: + # http://code.google.com/p/googleappengine/source/ + # browse/trunk/python/google/storage/speckle/ + # python/django/backend/base.py#118 + # see also [ticket:2649] + # see also http://stackoverflow.com/q/14224679/34549 + from google.appengine.api import apiproxy_stub_map + + if _is_dev_environment(): + from google.appengine.api import rdbms_mysqldb + return rdbms_mysqldb + elif apiproxy_stub_map.apiproxy.GetStub('rdbms'): + from google.storage.speckle.python.api import rdbms_apiproxy + return rdbms_apiproxy + else: + from google.storage.speckle.python.api import rdbms_googleapi + return rdbms_googleapi + + @classmethod + def get_pool_class(cls, url): + # Cloud SQL connections die at any moment + return NullPool + + def create_connect_args(self, url): + opts = url.translate_connect_args() + if not _is_dev_environment(): + # 'dsn' and 'instance' are because we are skipping + # the traditional google.api.rdbms wrapper + opts['dsn'] = '' + opts['instance'] = url.query['instance'] + return [], opts + + def _extract_error_code(self, exception): + match = re.compile(r"^(\d+)L?:|^\((\d+)L?,").match(str(exception)) + # The rdbms api will wrap then re-raise some types of errors + # making this regex return no matches. + code = match.group(1) or match.group(2) if match else None + if code: + return int(code) + +dialect = MySQLDialect_gaerdbms diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/mysqlconnector.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/mysqlconnector.py new file mode 100644 index 0000000..a3a3f2b --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/mysqlconnector.py @@ -0,0 +1,176 @@ +# mysql/mysqlconnector.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +.. dialect:: mysql+mysqlconnector + :name: MySQL Connector/Python + :dbapi: myconnpy + :connectstring: mysql+mysqlconnector://:@\ +[:]/ + :url: http://dev.mysql.com/downloads/connector/python/ + + +Unicode +------- + +Please see :ref:`mysql_unicode` for current recommendations on unicode +handling. + +""" + +from .base import (MySQLDialect, MySQLExecutionContext, + MySQLCompiler, MySQLIdentifierPreparer, + BIT) + +from ... import util +import re + + +class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext): + + def get_lastrowid(self): + return self.cursor.lastrowid + + +class MySQLCompiler_mysqlconnector(MySQLCompiler): + def visit_mod_binary(self, binary, operator, **kw): + if self.dialect._mysqlconnector_double_percents: + return self.process(binary.left, **kw) + " %% " + \ + self.process(binary.right, **kw) + else: + return self.process(binary.left, **kw) + " % " + \ + self.process(binary.right, **kw) + + def post_process_text(self, text): + if self.dialect._mysqlconnector_double_percents: + return text.replace('%', '%%') + else: + return text + + def escape_literal_column(self, text): + if self.dialect._mysqlconnector_double_percents: + return text.replace('%', '%%') + else: + return text + + +class MySQLIdentifierPreparer_mysqlconnector(MySQLIdentifierPreparer): + + def _escape_identifier(self, value): + value = value.replace(self.escape_quote, self.escape_to_quote) + if self.dialect._mysqlconnector_double_percents: + return value.replace("%", "%%") + else: + return value + + +class _myconnpyBIT(BIT): + def result_processor(self, dialect, coltype): + """MySQL-connector already converts mysql bits, so.""" + + return None + + +class MySQLDialect_mysqlconnector(MySQLDialect): + driver = 'mysqlconnector' + + supports_unicode_binds = True + + supports_sane_rowcount = True + supports_sane_multi_rowcount = True + + supports_native_decimal = True + + default_paramstyle = 'format' + execution_ctx_cls = MySQLExecutionContext_mysqlconnector + statement_compiler = MySQLCompiler_mysqlconnector + + preparer = MySQLIdentifierPreparer_mysqlconnector + + colspecs = util.update_copy( + MySQLDialect.colspecs, + { + BIT: _myconnpyBIT, + } + ) + + @util.memoized_property + def supports_unicode_statements(self): + return util.py3k or self._mysqlconnector_version_info > (2, 0) + + @classmethod + def dbapi(cls): + from mysql import connector + return connector + + def create_connect_args(self, url): + opts = url.translate_connect_args(username='user') + + opts.update(url.query) + + util.coerce_kw_type(opts, 'buffered', bool) + util.coerce_kw_type(opts, 'raise_on_warnings', bool) + + # unfortunately, MySQL/connector python refuses to release a + # cursor without reading fully, so non-buffered isn't an option + opts.setdefault('buffered', True) + + # FOUND_ROWS must be set in ClientFlag to enable + # supports_sane_rowcount. + if self.dbapi is not None: + try: + from mysql.connector.constants import ClientFlag + client_flags = opts.get( + 'client_flags', ClientFlag.get_default()) + client_flags |= ClientFlag.FOUND_ROWS + opts['client_flags'] = client_flags + except Exception: + pass + return [[], opts] + + @util.memoized_property + def _mysqlconnector_version_info(self): + if self.dbapi and hasattr(self.dbapi, '__version__'): + m = re.match(r'(\d+)\.(\d+)(?:\.(\d+))?', + self.dbapi.__version__) + if m: + return tuple( + int(x) + for x in m.group(1, 2, 3) + if x is not None) + + @util.memoized_property + def _mysqlconnector_double_percents(self): + return not util.py3k and self._mysqlconnector_version_info < (2, 0) + + def _get_server_version_info(self, connection): + dbapi_con = connection.connection + version = dbapi_con.get_server_version() + return tuple(version) + + def _detect_charset(self, connection): + return connection.connection.charset + + def _extract_error_code(self, exception): + return exception.errno + + def is_disconnect(self, e, connection, cursor): + errnos = (2006, 2013, 2014, 2045, 2055, 2048) + exceptions = (self.dbapi.OperationalError, self.dbapi.InterfaceError) + if isinstance(e, exceptions): + return e.errno in errnos or \ + "MySQL Connection not available." in str(e) + else: + return False + + def _compat_fetchall(self, rp, charset=None): + return rp.fetchall() + + def _compat_fetchone(self, rp, charset=None): + return rp.fetchone() + +dialect = MySQLDialect_mysqlconnector diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/mysqldb.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/mysqldb.py new file mode 100644 index 0000000..9c35eb7 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/mysqldb.py @@ -0,0 +1,198 @@ +# mysql/mysqldb.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" + +.. dialect:: mysql+mysqldb + :name: MySQL-Python + :dbapi: mysqldb + :connectstring: mysql+mysqldb://:@[:]/ + :url: http://sourceforge.net/projects/mysql-python + +.. _mysqldb_unicode: + +Unicode +------- + +Please see :ref:`mysql_unicode` for current recommendations on unicode +handling. + +Py3K Support +------------ + +Currently, MySQLdb only runs on Python 2 and development has been stopped. +`mysqlclient`_ is fork of MySQLdb and provides Python 3 support as well +as some bugfixes. + +.. _mysqlclient: https://github.com/PyMySQL/mysqlclient-python + +Using MySQLdb with Google Cloud SQL +----------------------------------- + +Google Cloud SQL now recommends use of the MySQLdb dialect. Connect +using a URL like the following:: + + mysql+mysqldb://root@/?unix_socket=/cloudsql/: + +""" + +from .base import (MySQLDialect, MySQLExecutionContext, + MySQLCompiler, MySQLIdentifierPreparer) +from .base import TEXT +from ... import sql +from ... import util +import re + + +class MySQLExecutionContext_mysqldb(MySQLExecutionContext): + + @property + def rowcount(self): + if hasattr(self, '_rowcount'): + return self._rowcount + else: + return self.cursor.rowcount + + +class MySQLCompiler_mysqldb(MySQLCompiler): + def visit_mod_binary(self, binary, operator, **kw): + return self.process(binary.left, **kw) + " %% " + \ + self.process(binary.right, **kw) + + def post_process_text(self, text): + return text.replace('%', '%%') + + +class MySQLIdentifierPreparer_mysqldb(MySQLIdentifierPreparer): + + def _escape_identifier(self, value): + value = value.replace(self.escape_quote, self.escape_to_quote) + return value.replace("%", "%%") + + +class MySQLDialect_mysqldb(MySQLDialect): + driver = 'mysqldb' + supports_unicode_statements = True + supports_sane_rowcount = True + supports_sane_multi_rowcount = True + + supports_native_decimal = True + + default_paramstyle = 'format' + execution_ctx_cls = MySQLExecutionContext_mysqldb + statement_compiler = MySQLCompiler_mysqldb + preparer = MySQLIdentifierPreparer_mysqldb + + @classmethod + def dbapi(cls): + return __import__('MySQLdb') + + def do_executemany(self, cursor, statement, parameters, context=None): + rowcount = cursor.executemany(statement, parameters) + if context is not None: + context._rowcount = rowcount + + def _check_unicode_returns(self, connection): + # work around issue fixed in + # https://github.com/farcepest/MySQLdb1/commit/cd44524fef63bd3fcb71947392326e9742d520e8 + # specific issue w/ the utf8_bin collation and unicode returns + + has_utf8_bin = self.server_version_info > (5, ) and \ + connection.scalar( + "show collation where %s = 'utf8' and %s = 'utf8_bin'" + % ( + self.identifier_preparer.quote("Charset"), + self.identifier_preparer.quote("Collation") + )) + if has_utf8_bin: + additional_tests = [ + sql.collate(sql.cast( + sql.literal_column( + "'test collated returns'"), + TEXT(charset='utf8')), "utf8_bin") + ] + else: + additional_tests = [] + return super(MySQLDialect_mysqldb, self)._check_unicode_returns( + connection, additional_tests) + + def create_connect_args(self, url): + opts = url.translate_connect_args(database='db', username='user', + password='passwd') + opts.update(url.query) + + util.coerce_kw_type(opts, 'compress', bool) + util.coerce_kw_type(opts, 'connect_timeout', int) + util.coerce_kw_type(opts, 'read_timeout', int) + util.coerce_kw_type(opts, 'client_flag', int) + util.coerce_kw_type(opts, 'local_infile', int) + # Note: using either of the below will cause all strings to be + # returned as Unicode, both in raw SQL operations and with column + # types like String and MSString. + util.coerce_kw_type(opts, 'use_unicode', bool) + util.coerce_kw_type(opts, 'charset', str) + + # Rich values 'cursorclass' and 'conv' are not supported via + # query string. + + ssl = {} + keys = ['ssl_ca', 'ssl_key', 'ssl_cert', 'ssl_capath', 'ssl_cipher'] + for key in keys: + if key in opts: + ssl[key[4:]] = opts[key] + util.coerce_kw_type(ssl, key[4:], str) + del opts[key] + if ssl: + opts['ssl'] = ssl + + # FOUND_ROWS must be set in CLIENT_FLAGS to enable + # supports_sane_rowcount. + client_flag = opts.get('client_flag', 0) + if self.dbapi is not None: + try: + CLIENT_FLAGS = __import__( + self.dbapi.__name__ + '.constants.CLIENT' + ).constants.CLIENT + client_flag |= CLIENT_FLAGS.FOUND_ROWS + except (AttributeError, ImportError): + self.supports_sane_rowcount = False + opts['client_flag'] = client_flag + return [[], opts] + + def _get_server_version_info(self, connection): + dbapi_con = connection.connection + version = [] + r = re.compile('[.\-]') + for n in r.split(dbapi_con.get_server_info()): + try: + version.append(int(n)) + except ValueError: + version.append(n) + return tuple(version) + + def _extract_error_code(self, exception): + return exception.args[0] + + def _detect_charset(self, connection): + """Sniff out the character set in use for connection results.""" + + try: + # note: the SQL here would be + # "SHOW VARIABLES LIKE 'character_set%%'" + cset_name = connection.connection.character_set_name + except AttributeError: + util.warn( + "No 'character_set_name' can be detected with " + "this MySQL-Python version; " + "please upgrade to a recent version of MySQL-Python. " + "Assuming latin1.") + return 'latin1' + else: + return cset_name() + + +dialect = MySQLDialect_mysqldb diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/oursql.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/oursql.py new file mode 100644 index 0000000..b91db18 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/oursql.py @@ -0,0 +1,254 @@ +# mysql/oursql.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" + +.. dialect:: mysql+oursql + :name: OurSQL + :dbapi: oursql + :connectstring: mysql+oursql://:@[:]/ + :url: http://packages.python.org/oursql/ + +Unicode +------- + +Please see :ref:`mysql_unicode` for current recommendations on unicode +handling. + + +""" + +import re + +from .base import (BIT, MySQLDialect, MySQLExecutionContext) +from ... import types as sqltypes, util + + +class _oursqlBIT(BIT): + def result_processor(self, dialect, coltype): + """oursql already converts mysql bits, so.""" + + return None + + +class MySQLExecutionContext_oursql(MySQLExecutionContext): + + @property + def plain_query(self): + return self.execution_options.get('_oursql_plain_query', False) + + +class MySQLDialect_oursql(MySQLDialect): + driver = 'oursql' + + if util.py2k: + supports_unicode_binds = True + supports_unicode_statements = True + + supports_native_decimal = True + + supports_sane_rowcount = True + supports_sane_multi_rowcount = True + execution_ctx_cls = MySQLExecutionContext_oursql + + colspecs = util.update_copy( + MySQLDialect.colspecs, + { + sqltypes.Time: sqltypes.Time, + BIT: _oursqlBIT, + } + ) + + @classmethod + def dbapi(cls): + return __import__('oursql') + + def do_execute(self, cursor, statement, parameters, context=None): + """Provide an implementation of + *cursor.execute(statement, parameters)*.""" + + if context and context.plain_query: + cursor.execute(statement, plain_query=True) + else: + cursor.execute(statement, parameters) + + def do_begin(self, connection): + connection.cursor().execute('BEGIN', plain_query=True) + + def _xa_query(self, connection, query, xid): + if util.py2k: + arg = connection.connection._escape_string(xid) + else: + charset = self._connection_charset + arg = connection.connection._escape_string( + xid.encode(charset)).decode(charset) + arg = "'%s'" % arg + connection.execution_options( + _oursql_plain_query=True).execute(query % arg) + + # Because mysql is bad, these methods have to be + # reimplemented to use _PlainQuery. Basically, some queries + # refuse to return any data if they're run through + # the parameterized query API, or refuse to be parameterized + # in the first place. + def do_begin_twophase(self, connection, xid): + self._xa_query(connection, 'XA BEGIN %s', xid) + + def do_prepare_twophase(self, connection, xid): + self._xa_query(connection, 'XA END %s', xid) + self._xa_query(connection, 'XA PREPARE %s', xid) + + def do_rollback_twophase(self, connection, xid, is_prepared=True, + recover=False): + if not is_prepared: + self._xa_query(connection, 'XA END %s', xid) + self._xa_query(connection, 'XA ROLLBACK %s', xid) + + def do_commit_twophase(self, connection, xid, is_prepared=True, + recover=False): + if not is_prepared: + self.do_prepare_twophase(connection, xid) + self._xa_query(connection, 'XA COMMIT %s', xid) + + # Q: why didn't we need all these "plain_query" overrides earlier ? + # am i on a newer/older version of OurSQL ? + def has_table(self, connection, table_name, schema=None): + return MySQLDialect.has_table( + self, + connection.connect().execution_options(_oursql_plain_query=True), + table_name, + schema + ) + + def get_table_options(self, connection, table_name, schema=None, **kw): + return MySQLDialect.get_table_options( + self, + connection.connect().execution_options(_oursql_plain_query=True), + table_name, + schema=schema, + **kw + ) + + def get_columns(self, connection, table_name, schema=None, **kw): + return MySQLDialect.get_columns( + self, + connection.connect().execution_options(_oursql_plain_query=True), + table_name, + schema=schema, + **kw + ) + + def get_view_names(self, connection, schema=None, **kw): + return MySQLDialect.get_view_names( + self, + connection.connect().execution_options(_oursql_plain_query=True), + schema=schema, + **kw + ) + + def get_table_names(self, connection, schema=None, **kw): + return MySQLDialect.get_table_names( + self, + connection.connect().execution_options(_oursql_plain_query=True), + schema + ) + + def get_schema_names(self, connection, **kw): + return MySQLDialect.get_schema_names( + self, + connection.connect().execution_options(_oursql_plain_query=True), + **kw + ) + + def initialize(self, connection): + return MySQLDialect.initialize( + self, + connection.execution_options(_oursql_plain_query=True) + ) + + def _show_create_table(self, connection, table, charset=None, + full_name=None): + return MySQLDialect._show_create_table( + self, + connection.contextual_connect(close_with_result=True). + execution_options(_oursql_plain_query=True), + table, charset, full_name + ) + + def is_disconnect(self, e, connection, cursor): + if isinstance(e, self.dbapi.ProgrammingError): + return e.errno is None and 'cursor' not in e.args[1] \ + and e.args[1].endswith('closed') + else: + return e.errno in (2006, 2013, 2014, 2045, 2055) + + def create_connect_args(self, url): + opts = url.translate_connect_args(database='db', username='user', + password='passwd') + opts.update(url.query) + + util.coerce_kw_type(opts, 'port', int) + util.coerce_kw_type(opts, 'compress', bool) + util.coerce_kw_type(opts, 'autoping', bool) + util.coerce_kw_type(opts, 'raise_on_warnings', bool) + + util.coerce_kw_type(opts, 'default_charset', bool) + if opts.pop('default_charset', False): + opts['charset'] = None + else: + util.coerce_kw_type(opts, 'charset', str) + opts['use_unicode'] = opts.get('use_unicode', True) + util.coerce_kw_type(opts, 'use_unicode', bool) + + # FOUND_ROWS must be set in CLIENT_FLAGS to enable + # supports_sane_rowcount. + opts.setdefault('found_rows', True) + + ssl = {} + for key in ['ssl_ca', 'ssl_key', 'ssl_cert', + 'ssl_capath', 'ssl_cipher']: + if key in opts: + ssl[key[4:]] = opts[key] + util.coerce_kw_type(ssl, key[4:], str) + del opts[key] + if ssl: + opts['ssl'] = ssl + + return [[], opts] + + def _get_server_version_info(self, connection): + dbapi_con = connection.connection + version = [] + r = re.compile('[.\-]') + for n in r.split(dbapi_con.server_info): + try: + version.append(int(n)) + except ValueError: + version.append(n) + return tuple(version) + + def _extract_error_code(self, exception): + return exception.errno + + def _detect_charset(self, connection): + """Sniff out the character set in use for connection results.""" + + return connection.connection.charset + + def _compat_fetchall(self, rp, charset=None): + """oursql isn't super-broken like MySQLdb, yaaay.""" + return rp.fetchall() + + def _compat_fetchone(self, rp, charset=None): + """oursql isn't super-broken like MySQLdb, yaaay.""" + return rp.fetchone() + + def _compat_first(self, rp, charset=None): + return rp.first() + + +dialect = MySQLDialect_oursql diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/pymysql.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/pymysql.py new file mode 100644 index 0000000..3c493fb --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/pymysql.py @@ -0,0 +1,57 @@ +# mysql/pymysql.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" + +.. dialect:: mysql+pymysql + :name: PyMySQL + :dbapi: pymysql + :connectstring: mysql+pymysql://:@/\ +[?] + :url: http://www.pymysql.org/ + +Unicode +------- + +Please see :ref:`mysql_unicode` for current recommendations on unicode +handling. + +MySQL-Python Compatibility +-------------------------- + +The pymysql DBAPI is a pure Python port of the MySQL-python (MySQLdb) driver, +and targets 100% compatibility. Most behavioral notes for MySQL-python apply +to the pymysql driver as well. + +""" + +from .mysqldb import MySQLDialect_mysqldb +from ...util import py3k + + +class MySQLDialect_pymysql(MySQLDialect_mysqldb): + driver = 'pymysql' + + description_encoding = None + + # generally, these two values should be both True + # or both False. PyMySQL unicode tests pass all the way back + # to 0.4 either way. See [ticket:3337] + supports_unicode_statements = True + supports_unicode_binds = True + + @classmethod + def dbapi(cls): + return __import__('pymysql') + + if py3k: + def _extract_error_code(self, exception): + if isinstance(exception.args[0], Exception): + exception = exception.args[0] + return exception.args[0] + +dialect = MySQLDialect_pymysql diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/pyodbc.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/pyodbc.py new file mode 100644 index 0000000..882d3ea --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/pyodbc.py @@ -0,0 +1,79 @@ +# mysql/pyodbc.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" + + +.. dialect:: mysql+pyodbc + :name: PyODBC + :dbapi: pyodbc + :connectstring: mysql+pyodbc://:@ + :url: http://pypi.python.org/pypi/pyodbc/ + + .. note:: The PyODBC for MySQL dialect is not well supported, and + is subject to unresolved character encoding issues + which exist within the current ODBC drivers available. + (see http://code.google.com/p/pyodbc/issues/detail?id=25). + Other dialects for MySQL are recommended. + +""" + +from .base import MySQLDialect, MySQLExecutionContext +from ...connectors.pyodbc import PyODBCConnector +from ... import util +import re + + +class MySQLExecutionContext_pyodbc(MySQLExecutionContext): + + def get_lastrowid(self): + cursor = self.create_cursor() + cursor.execute("SELECT LAST_INSERT_ID()") + lastrowid = cursor.fetchone()[0] + cursor.close() + return lastrowid + + +class MySQLDialect_pyodbc(PyODBCConnector, MySQLDialect): + supports_unicode_statements = False + execution_ctx_cls = MySQLExecutionContext_pyodbc + + pyodbc_driver_name = "MySQL" + + def __init__(self, **kw): + # deal with http://code.google.com/p/pyodbc/issues/detail?id=25 + kw.setdefault('convert_unicode', True) + super(MySQLDialect_pyodbc, self).__init__(**kw) + + def _detect_charset(self, connection): + """Sniff out the character set in use for connection results.""" + + # Prefer 'character_set_results' for the current connection over the + # value in the driver. SET NAMES or individual variable SETs will + # change the charset without updating the driver's view of the world. + # + # If it's decided that issuing that sort of SQL leaves you SOL, then + # this can prefer the driver value. + rs = connection.execute("SHOW VARIABLES LIKE 'character_set%%'") + opts = dict([(row[0], row[1]) for row in self._compat_fetchall(rs)]) + for key in ('character_set_connection', 'character_set'): + if opts.get(key, None): + return opts[key] + + util.warn("Could not detect the connection character set. " + "Assuming latin1.") + return 'latin1' + + def _extract_error_code(self, exception): + m = re.compile(r"\((\d+)\)").search(str(exception.args)) + c = m.group(1) + if c: + return int(c) + else: + return None + +dialect = MySQLDialect_pyodbc diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/zxjdbc.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/zxjdbc.py new file mode 100644 index 0000000..fe4c137 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/zxjdbc.py @@ -0,0 +1,117 @@ +# mysql/zxjdbc.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" + +.. dialect:: mysql+zxjdbc + :name: zxjdbc for Jython + :dbapi: zxjdbc + :connectstring: mysql+zxjdbc://:@[:]/\ + + :driverurl: http://dev.mysql.com/downloads/connector/j/ + + .. note:: Jython is not supported by current versions of SQLAlchemy. The + zxjdbc dialect should be considered as experimental. + +Character Sets +-------------- + +SQLAlchemy zxjdbc dialects pass unicode straight through to the +zxjdbc/JDBC layer. To allow multiple character sets to be sent from the +MySQL Connector/J JDBC driver, by default SQLAlchemy sets its +``characterEncoding`` connection property to ``UTF-8``. It may be +overridden via a ``create_engine`` URL parameter. + +""" +import re + +from ... import types as sqltypes, util +from ...connectors.zxJDBC import ZxJDBCConnector +from .base import BIT, MySQLDialect, MySQLExecutionContext + + +class _ZxJDBCBit(BIT): + def result_processor(self, dialect, coltype): + """Converts boolean or byte arrays from MySQL Connector/J to longs.""" + def process(value): + if value is None: + return value + if isinstance(value, bool): + return int(value) + v = 0 + for i in value: + v = v << 8 | (i & 0xff) + value = v + return value + return process + + +class MySQLExecutionContext_zxjdbc(MySQLExecutionContext): + def get_lastrowid(self): + cursor = self.create_cursor() + cursor.execute("SELECT LAST_INSERT_ID()") + lastrowid = cursor.fetchone()[0] + cursor.close() + return lastrowid + + +class MySQLDialect_zxjdbc(ZxJDBCConnector, MySQLDialect): + jdbc_db_name = 'mysql' + jdbc_driver_name = 'com.mysql.jdbc.Driver' + + execution_ctx_cls = MySQLExecutionContext_zxjdbc + + colspecs = util.update_copy( + MySQLDialect.colspecs, + { + sqltypes.Time: sqltypes.Time, + BIT: _ZxJDBCBit + } + ) + + def _detect_charset(self, connection): + """Sniff out the character set in use for connection results.""" + # Prefer 'character_set_results' for the current connection over the + # value in the driver. SET NAMES or individual variable SETs will + # change the charset without updating the driver's view of the world. + # + # If it's decided that issuing that sort of SQL leaves you SOL, then + # this can prefer the driver value. + rs = connection.execute("SHOW VARIABLES LIKE 'character_set%%'") + opts = dict((row[0], row[1]) for row in self._compat_fetchall(rs)) + for key in ('character_set_connection', 'character_set'): + if opts.get(key, None): + return opts[key] + + util.warn("Could not detect the connection character set. " + "Assuming latin1.") + return 'latin1' + + def _driver_kwargs(self): + """return kw arg dict to be sent to connect().""" + return dict(characterEncoding='UTF-8', yearIsDateType='false') + + def _extract_error_code(self, exception): + # e.g.: DBAPIError: (Error) Table 'test.u2' doesn't exist + # [SQLCode: 1146], [SQLState: 42S02] 'DESCRIBE `u2`' () + m = re.compile(r"\[SQLCode\: (\d+)\]").search(str(exception.args)) + c = m.group(1) + if c: + return int(c) + + def _get_server_version_info(self, connection): + dbapi_con = connection.connection + version = [] + r = re.compile('[.\-]') + for n in r.split(dbapi_con.dbversion): + try: + version.append(int(n)) + except ValueError: + version.append(n) + return tuple(version) + +dialect = MySQLDialect_zxjdbc diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/oracle/__init__.py b/lib/python3.4/site-packages/sqlalchemy/dialects/oracle/__init__.py new file mode 100644 index 0000000..0c5c317 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/oracle/__init__.py @@ -0,0 +1,24 @@ +# oracle/__init__.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from sqlalchemy.dialects.oracle import base, cx_oracle, zxjdbc + +base.dialect = cx_oracle.dialect + +from sqlalchemy.dialects.oracle.base import \ + VARCHAR, NVARCHAR, CHAR, DATE, NUMBER,\ + BLOB, BFILE, CLOB, NCLOB, TIMESTAMP, RAW,\ + FLOAT, DOUBLE_PRECISION, LONG, dialect, INTERVAL,\ + VARCHAR2, NVARCHAR2, ROWID, dialect + + +__all__ = ( + 'VARCHAR', 'NVARCHAR', 'CHAR', 'DATE', 'NUMBER', + 'BLOB', 'BFILE', 'CLOB', 'NCLOB', 'TIMESTAMP', 'RAW', + 'FLOAT', 'DOUBLE_PRECISION', 'LONG', 'dialect', 'INTERVAL', + 'VARCHAR2', 'NVARCHAR2', 'ROWID' +) diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/oracle/base.py b/lib/python3.4/site-packages/sqlalchemy/dialects/oracle/base.py new file mode 100644 index 0000000..eb63983 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/oracle/base.py @@ -0,0 +1,1546 @@ +# oracle/base.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +.. dialect:: oracle + :name: Oracle + + Oracle version 8 through current (11g at the time of this writing) are + supported. + +Connect Arguments +----------------- + +The dialect supports several :func:`~sqlalchemy.create_engine()` arguments +which affect the behavior of the dialect regardless of driver in use. + +* ``use_ansi`` - Use ANSI JOIN constructs (see the section on Oracle 8). + Defaults to ``True``. If ``False``, Oracle-8 compatible constructs are used + for joins. + +* ``optimize_limits`` - defaults to ``False``. see the section on + LIMIT/OFFSET. + +* ``use_binds_for_limits`` - defaults to ``True``. see the section on + LIMIT/OFFSET. + +Auto Increment Behavior +----------------------- + +SQLAlchemy Table objects which include integer primary keys are usually +assumed to have "autoincrementing" behavior, meaning they can generate their +own primary key values upon INSERT. Since Oracle has no "autoincrement" +feature, SQLAlchemy relies upon sequences to produce these values. With the +Oracle dialect, *a sequence must always be explicitly specified to enable +autoincrement*. This is divergent with the majority of documentation +examples which assume the usage of an autoincrement-capable database. To +specify sequences, use the sqlalchemy.schema.Sequence object which is passed +to a Column construct:: + + t = Table('mytable', metadata, + Column('id', Integer, Sequence('id_seq'), primary_key=True), + Column(...), ... + ) + +This step is also required when using table reflection, i.e. autoload=True:: + + t = Table('mytable', metadata, + Column('id', Integer, Sequence('id_seq'), primary_key=True), + autoload=True + ) + +Identifier Casing +----------------- + +In Oracle, the data dictionary represents all case insensitive identifier +names using UPPERCASE text. SQLAlchemy on the other hand considers an +all-lower case identifier name to be case insensitive. The Oracle dialect +converts all case insensitive identifiers to and from those two formats during +schema level communication, such as reflection of tables and indexes. Using +an UPPERCASE name on the SQLAlchemy side indicates a case sensitive +identifier, and SQLAlchemy will quote the name - this will cause mismatches +against data dictionary data received from Oracle, so unless identifier names +have been truly created as case sensitive (i.e. using quoted names), all +lowercase names should be used on the SQLAlchemy side. + + +LIMIT/OFFSET Support +-------------------- + +Oracle has no support for the LIMIT or OFFSET keywords. SQLAlchemy uses +a wrapped subquery approach in conjunction with ROWNUM. The exact methodology +is taken from +http://www.oracle.com/technology/oramag/oracle/06-sep/o56asktom.html . + +There are two options which affect its behavior: + +* the "FIRST ROWS()" optimization keyword is not used by default. To enable + the usage of this optimization directive, specify ``optimize_limits=True`` + to :func:`.create_engine`. +* the values passed for the limit/offset are sent as bound parameters. Some + users have observed that Oracle produces a poor query plan when the values + are sent as binds and not rendered literally. To render the limit/offset + values literally within the SQL statement, specify + ``use_binds_for_limits=False`` to :func:`.create_engine`. + +Some users have reported better performance when the entirely different +approach of a window query is used, i.e. ROW_NUMBER() OVER (ORDER BY), to +provide LIMIT/OFFSET (note that the majority of users don't observe this). +To suit this case the method used for LIMIT/OFFSET can be replaced entirely. +See the recipe at +http://www.sqlalchemy.org/trac/wiki/UsageRecipes/WindowFunctionsByDefault +which installs a select compiler that overrides the generation of limit/offset +with a window function. + +.. _oracle_returning: + +RETURNING Support +----------------- + +The Oracle database supports a limited form of RETURNING, in order to retrieve +result sets of matched rows from INSERT, UPDATE and DELETE statements. +Oracle's RETURNING..INTO syntax only supports one row being returned, as it +relies upon OUT parameters in order to function. In addition, supported +DBAPIs have further limitations (see :ref:`cx_oracle_returning`). + +SQLAlchemy's "implicit returning" feature, which employs RETURNING within an +INSERT and sometimes an UPDATE statement in order to fetch newly generated +primary key values and other SQL defaults and expressions, is normally enabled +on the Oracle backend. By default, "implicit returning" typically only +fetches the value of a single ``nextval(some_seq)`` expression embedded into +an INSERT in order to increment a sequence within an INSERT statement and get +the value back at the same time. To disable this feature across the board, +specify ``implicit_returning=False`` to :func:`.create_engine`:: + + engine = create_engine("oracle://scott:tiger@dsn", + implicit_returning=False) + +Implicit returning can also be disabled on a table-by-table basis as a table +option:: + + # Core Table + my_table = Table("my_table", metadata, ..., implicit_returning=False) + + + # declarative + class MyClass(Base): + __tablename__ = 'my_table' + __table_args__ = {"implicit_returning": False} + +.. seealso:: + + :ref:`cx_oracle_returning` - additional cx_oracle-specific restrictions on + implicit returning. + +ON UPDATE CASCADE +----------------- + +Oracle doesn't have native ON UPDATE CASCADE functionality. A trigger based +solution is available at +http://asktom.oracle.com/tkyte/update_cascade/index.html . + +When using the SQLAlchemy ORM, the ORM has limited ability to manually issue +cascading updates - specify ForeignKey objects using the +"deferrable=True, initially='deferred'" keyword arguments, +and specify "passive_updates=False" on each relationship(). + +Oracle 8 Compatibility +---------------------- + +When Oracle 8 is detected, the dialect internally configures itself to the +following behaviors: + +* the use_ansi flag is set to False. This has the effect of converting all + JOIN phrases into the WHERE clause, and in the case of LEFT OUTER JOIN + makes use of Oracle's (+) operator. + +* the NVARCHAR2 and NCLOB datatypes are no longer generated as DDL when + the :class:`~sqlalchemy.types.Unicode` is used - VARCHAR2 and CLOB are + issued instead. This because these types don't seem to work correctly on + Oracle 8 even though they are available. The + :class:`~sqlalchemy.types.NVARCHAR` and + :class:`~sqlalchemy.dialects.oracle.NCLOB` types will always generate + NVARCHAR2 and NCLOB. + +* the "native unicode" mode is disabled when using cx_oracle, i.e. SQLAlchemy + encodes all Python unicode objects to "string" before passing in as bind + parameters. + +Synonym/DBLINK Reflection +------------------------- + +When using reflection with Table objects, the dialect can optionally search +for tables indicated by synonyms, either in local or remote schemas or +accessed over DBLINK, by passing the flag ``oracle_resolve_synonyms=True`` as +a keyword argument to the :class:`.Table` construct:: + + some_table = Table('some_table', autoload=True, + autoload_with=some_engine, + oracle_resolve_synonyms=True) + +When this flag is set, the given name (such as ``some_table`` above) will +be searched not just in the ``ALL_TABLES`` view, but also within the +``ALL_SYNONYMS`` view to see if this name is actually a synonym to another +name. If the synonym is located and refers to a DBLINK, the oracle dialect +knows how to locate the table's information using DBLINK syntax(e.g. +``@dblink``). + +``oracle_resolve_synonyms`` is accepted wherever reflection arguments are +accepted, including methods such as :meth:`.MetaData.reflect` and +:meth:`.Inspector.get_columns`. + +If synonyms are not in use, this flag should be left disabled. + +DateTime Compatibility +---------------------- + +Oracle has no datatype known as ``DATETIME``, it instead has only ``DATE``, +which can actually store a date and time value. For this reason, the Oracle +dialect provides a type :class:`.oracle.DATE` which is a subclass of +:class:`.DateTime`. This type has no special behavior, and is only +present as a "marker" for this type; additionally, when a database column +is reflected and the type is reported as ``DATE``, the time-supporting +:class:`.oracle.DATE` type is used. + +.. versionchanged:: 0.9.4 Added :class:`.oracle.DATE` to subclass + :class:`.DateTime`. This is a change as previous versions + would reflect a ``DATE`` column as :class:`.types.DATE`, which subclasses + :class:`.Date`. The only significance here is for schemes that are + examining the type of column for use in special Python translations or + for migrating schemas to other database backends. + +.. _oracle_table_options: + +Oracle Table Options +------------------------- + +The CREATE TABLE phrase supports the following options with Oracle +in conjunction with the :class:`.Table` construct: + + +* ``ON COMMIT``:: + + Table( + "some_table", metadata, ..., + prefixes=['GLOBAL TEMPORARY'], oracle_on_commit='PRESERVE ROWS') + +.. versionadded:: 1.0.0 + +* ``COMPRESS``:: + + Table('mytable', metadata, Column('data', String(32)), + oracle_compress=True) + + Table('mytable', metadata, Column('data', String(32)), + oracle_compress=6) + + The ``oracle_compress`` parameter accepts either an integer compression + level, or ``True`` to use the default compression level. + +.. versionadded:: 1.0.0 + +.. _oracle_index_options: + +Oracle Specific Index Options +----------------------------- + +Bitmap Indexes +~~~~~~~~~~~~~~ + +You can specify the ``oracle_bitmap`` parameter to create a bitmap index +instead of a B-tree index:: + + Index('my_index', my_table.c.data, oracle_bitmap=True) + +Bitmap indexes cannot be unique and cannot be compressed. SQLAlchemy will not +check for such limitations, only the database will. + +.. versionadded:: 1.0.0 + +Index compression +~~~~~~~~~~~~~~~~~ + +Oracle has a more efficient storage mode for indexes containing lots of +repeated values. Use the ``oracle_compress`` parameter to turn on key c +ompression:: + + Index('my_index', my_table.c.data, oracle_compress=True) + + Index('my_index', my_table.c.data1, my_table.c.data2, unique=True, + oracle_compress=1) + +The ``oracle_compress`` parameter accepts either an integer specifying the +number of prefix columns to compress, or ``True`` to use the default (all +columns for non-unique indexes, all but the last column for unique indexes). + +.. versionadded:: 1.0.0 + +""" + +import re + +from sqlalchemy import util, sql +from sqlalchemy.engine import default, reflection +from sqlalchemy.sql import compiler, visitors, expression +from sqlalchemy.sql import operators as sql_operators +from sqlalchemy.sql.elements import quoted_name +from sqlalchemy import types as sqltypes, schema as sa_schema +from sqlalchemy.types import VARCHAR, NVARCHAR, CHAR, \ + BLOB, CLOB, TIMESTAMP, FLOAT + +RESERVED_WORDS = \ + set('SHARE RAW DROP BETWEEN FROM DESC OPTION PRIOR LONG THEN ' + 'DEFAULT ALTER IS INTO MINUS INTEGER NUMBER GRANT IDENTIFIED ' + 'ALL TO ORDER ON FLOAT DATE HAVING CLUSTER NOWAIT RESOURCE ' + 'ANY TABLE INDEX FOR UPDATE WHERE CHECK SMALLINT WITH DELETE ' + 'BY ASC REVOKE LIKE SIZE RENAME NOCOMPRESS NULL GROUP VALUES ' + 'AS IN VIEW EXCLUSIVE COMPRESS SYNONYM SELECT INSERT EXISTS ' + 'NOT TRIGGER ELSE CREATE INTERSECT PCTFREE DISTINCT USER ' + 'CONNECT SET MODE OF UNIQUE VARCHAR2 VARCHAR LOCK OR CHAR ' + 'DECIMAL UNION PUBLIC AND START UID COMMENT CURRENT LEVEL'.split()) + +NO_ARG_FNS = set('UID CURRENT_DATE SYSDATE USER ' + 'CURRENT_TIME CURRENT_TIMESTAMP'.split()) + + +class RAW(sqltypes._Binary): + __visit_name__ = 'RAW' +OracleRaw = RAW + + +class NCLOB(sqltypes.Text): + __visit_name__ = 'NCLOB' + + +class VARCHAR2(VARCHAR): + __visit_name__ = 'VARCHAR2' + +NVARCHAR2 = NVARCHAR + + +class NUMBER(sqltypes.Numeric, sqltypes.Integer): + __visit_name__ = 'NUMBER' + + def __init__(self, precision=None, scale=None, asdecimal=None): + if asdecimal is None: + asdecimal = bool(scale and scale > 0) + + super(NUMBER, self).__init__( + precision=precision, scale=scale, asdecimal=asdecimal) + + def adapt(self, impltype): + ret = super(NUMBER, self).adapt(impltype) + # leave a hint for the DBAPI handler + ret._is_oracle_number = True + return ret + + @property + def _type_affinity(self): + if bool(self.scale and self.scale > 0): + return sqltypes.Numeric + else: + return sqltypes.Integer + + +class DOUBLE_PRECISION(sqltypes.Numeric): + __visit_name__ = 'DOUBLE_PRECISION' + + def __init__(self, precision=None, scale=None, asdecimal=None): + if asdecimal is None: + asdecimal = False + + super(DOUBLE_PRECISION, self).__init__( + precision=precision, scale=scale, asdecimal=asdecimal) + + +class BFILE(sqltypes.LargeBinary): + __visit_name__ = 'BFILE' + + +class LONG(sqltypes.Text): + __visit_name__ = 'LONG' + + +class DATE(sqltypes.DateTime): + """Provide the oracle DATE type. + + This type has no special Python behavior, except that it subclasses + :class:`.types.DateTime`; this is to suit the fact that the Oracle + ``DATE`` type supports a time value. + + .. versionadded:: 0.9.4 + + """ + __visit_name__ = 'DATE' + + def _compare_type_affinity(self, other): + return other._type_affinity in (sqltypes.DateTime, sqltypes.Date) + + +class INTERVAL(sqltypes.TypeEngine): + __visit_name__ = 'INTERVAL' + + def __init__(self, + day_precision=None, + second_precision=None): + """Construct an INTERVAL. + + Note that only DAY TO SECOND intervals are currently supported. + This is due to a lack of support for YEAR TO MONTH intervals + within available DBAPIs (cx_oracle and zxjdbc). + + :param day_precision: the day precision value. this is the number of + digits to store for the day field. Defaults to "2" + :param second_precision: the second precision value. this is the + number of digits to store for the fractional seconds field. + Defaults to "6". + + """ + self.day_precision = day_precision + self.second_precision = second_precision + + @classmethod + def _adapt_from_generic_interval(cls, interval): + return INTERVAL(day_precision=interval.day_precision, + second_precision=interval.second_precision) + + @property + def _type_affinity(self): + return sqltypes.Interval + + +class ROWID(sqltypes.TypeEngine): + """Oracle ROWID type. + + When used in a cast() or similar, generates ROWID. + + """ + __visit_name__ = 'ROWID' + + +class _OracleBoolean(sqltypes.Boolean): + def get_dbapi_type(self, dbapi): + return dbapi.NUMBER + +colspecs = { + sqltypes.Boolean: _OracleBoolean, + sqltypes.Interval: INTERVAL, + sqltypes.DateTime: DATE +} + +ischema_names = { + 'VARCHAR2': VARCHAR, + 'NVARCHAR2': NVARCHAR, + 'CHAR': CHAR, + 'DATE': DATE, + 'NUMBER': NUMBER, + 'BLOB': BLOB, + 'BFILE': BFILE, + 'CLOB': CLOB, + 'NCLOB': NCLOB, + 'TIMESTAMP': TIMESTAMP, + 'TIMESTAMP WITH TIME ZONE': TIMESTAMP, + 'INTERVAL DAY TO SECOND': INTERVAL, + 'RAW': RAW, + 'FLOAT': FLOAT, + 'DOUBLE PRECISION': DOUBLE_PRECISION, + 'LONG': LONG, +} + + +class OracleTypeCompiler(compiler.GenericTypeCompiler): + # Note: + # Oracle DATE == DATETIME + # Oracle does not allow milliseconds in DATE + # Oracle does not support TIME columns + + def visit_datetime(self, type_, **kw): + return self.visit_DATE(type_, **kw) + + def visit_float(self, type_, **kw): + return self.visit_FLOAT(type_, **kw) + + def visit_unicode(self, type_, **kw): + if self.dialect._supports_nchar: + return self.visit_NVARCHAR2(type_, **kw) + else: + return self.visit_VARCHAR2(type_, **kw) + + def visit_INTERVAL(self, type_, **kw): + return "INTERVAL DAY%s TO SECOND%s" % ( + type_.day_precision is not None and + "(%d)" % type_.day_precision or + "", + type_.second_precision is not None and + "(%d)" % type_.second_precision or + "", + ) + + def visit_LONG(self, type_, **kw): + return "LONG" + + def visit_TIMESTAMP(self, type_, **kw): + if type_.timezone: + return "TIMESTAMP WITH TIME ZONE" + else: + return "TIMESTAMP" + + def visit_DOUBLE_PRECISION(self, type_, **kw): + return self._generate_numeric(type_, "DOUBLE PRECISION", **kw) + + def visit_NUMBER(self, type_, **kw): + return self._generate_numeric(type_, "NUMBER", **kw) + + def _generate_numeric(self, type_, name, precision=None, scale=None, **kw): + if precision is None: + precision = type_.precision + + if scale is None: + scale = getattr(type_, 'scale', None) + + if precision is None: + return name + elif scale is None: + n = "%(name)s(%(precision)s)" + return n % {'name': name, 'precision': precision} + else: + n = "%(name)s(%(precision)s, %(scale)s)" + return n % {'name': name, 'precision': precision, 'scale': scale} + + def visit_string(self, type_, **kw): + return self.visit_VARCHAR2(type_, **kw) + + def visit_VARCHAR2(self, type_, **kw): + return self._visit_varchar(type_, '', '2') + + def visit_NVARCHAR2(self, type_, **kw): + return self._visit_varchar(type_, 'N', '2') + visit_NVARCHAR = visit_NVARCHAR2 + + def visit_VARCHAR(self, type_, **kw): + return self._visit_varchar(type_, '', '') + + def _visit_varchar(self, type_, n, num): + if not type_.length: + return "%(n)sVARCHAR%(two)s" % {'two': num, 'n': n} + elif not n and self.dialect._supports_char_length: + varchar = "VARCHAR%(two)s(%(length)s CHAR)" + return varchar % {'length': type_.length, 'two': num} + else: + varchar = "%(n)sVARCHAR%(two)s(%(length)s)" + return varchar % {'length': type_.length, 'two': num, 'n': n} + + def visit_text(self, type_, **kw): + return self.visit_CLOB(type_, **kw) + + def visit_unicode_text(self, type_, **kw): + if self.dialect._supports_nchar: + return self.visit_NCLOB(type_, **kw) + else: + return self.visit_CLOB(type_, **kw) + + def visit_large_binary(self, type_, **kw): + return self.visit_BLOB(type_, **kw) + + def visit_big_integer(self, type_, **kw): + return self.visit_NUMBER(type_, precision=19, **kw) + + def visit_boolean(self, type_, **kw): + return self.visit_SMALLINT(type_, **kw) + + def visit_RAW(self, type_, **kw): + if type_.length: + return "RAW(%(length)s)" % {'length': type_.length} + else: + return "RAW" + + def visit_ROWID(self, type_, **kw): + return "ROWID" + + +class OracleCompiler(compiler.SQLCompiler): + """Oracle compiler modifies the lexical structure of Select + statements to work under non-ANSI configured Oracle databases, if + the use_ansi flag is False. + """ + + compound_keywords = util.update_copy( + compiler.SQLCompiler.compound_keywords, + { + expression.CompoundSelect.EXCEPT: 'MINUS' + } + ) + + def __init__(self, *args, **kwargs): + self.__wheres = {} + self._quoted_bind_names = {} + super(OracleCompiler, self).__init__(*args, **kwargs) + + def visit_mod_binary(self, binary, operator, **kw): + return "mod(%s, %s)" % (self.process(binary.left, **kw), + self.process(binary.right, **kw)) + + def visit_now_func(self, fn, **kw): + return "CURRENT_TIMESTAMP" + + def visit_char_length_func(self, fn, **kw): + return "LENGTH" + self.function_argspec(fn, **kw) + + def visit_match_op_binary(self, binary, operator, **kw): + return "CONTAINS (%s, %s)" % (self.process(binary.left), + self.process(binary.right)) + + def visit_true(self, expr, **kw): + return '1' + + def visit_false(self, expr, **kw): + return '0' + + def get_cte_preamble(self, recursive): + return "WITH" + + def get_select_hint_text(self, byfroms): + return " ".join( + "/*+ %s */" % text for table, text in byfroms.items() + ) + + def function_argspec(self, fn, **kw): + if len(fn.clauses) > 0 or fn.name.upper() not in NO_ARG_FNS: + return compiler.SQLCompiler.function_argspec(self, fn, **kw) + else: + return "" + + def default_from(self): + """Called when a ``SELECT`` statement has no froms, + and no ``FROM`` clause is to be appended. + + The Oracle compiler tacks a "FROM DUAL" to the statement. + """ + + return " FROM DUAL" + + def visit_join(self, join, **kwargs): + if self.dialect.use_ansi: + return compiler.SQLCompiler.visit_join(self, join, **kwargs) + else: + kwargs['asfrom'] = True + if isinstance(join.right, expression.FromGrouping): + right = join.right.element + else: + right = join.right + return self.process(join.left, **kwargs) + \ + ", " + self.process(right, **kwargs) + + def _get_nonansi_join_whereclause(self, froms): + clauses = [] + + def visit_join(join): + if join.isouter: + def visit_binary(binary): + if binary.operator == sql_operators.eq: + if join.right.is_derived_from(binary.left.table): + binary.left = _OuterJoinColumn(binary.left) + elif join.right.is_derived_from(binary.right.table): + binary.right = _OuterJoinColumn(binary.right) + clauses.append(visitors.cloned_traverse( + join.onclause, {}, {'binary': visit_binary})) + else: + clauses.append(join.onclause) + + for j in join.left, join.right: + if isinstance(j, expression.Join): + visit_join(j) + elif isinstance(j, expression.FromGrouping): + visit_join(j.element) + + for f in froms: + if isinstance(f, expression.Join): + visit_join(f) + + if not clauses: + return None + else: + return sql.and_(*clauses) + + def visit_outer_join_column(self, vc, **kw): + return self.process(vc.column, **kw) + "(+)" + + def visit_sequence(self, seq): + return (self.dialect.identifier_preparer.format_sequence(seq) + + ".nextval") + + def get_render_as_alias_suffix(self, alias_name_text): + """Oracle doesn't like ``FROM table AS alias``""" + + return " " + alias_name_text + + def returning_clause(self, stmt, returning_cols): + columns = [] + binds = [] + for i, column in enumerate( + expression._select_iterables(returning_cols)): + if column.type._has_column_expression: + col_expr = column.type.column_expression(column) + else: + col_expr = column + outparam = sql.outparam("ret_%d" % i, type_=column.type) + self.binds[outparam.key] = outparam + binds.append( + self.bindparam_string(self._truncate_bindparam(outparam))) + columns.append( + self.process(col_expr, within_columns_clause=False)) + + self._add_to_result_map( + outparam.key, outparam.key, + (column, getattr(column, 'name', None), + getattr(column, 'key', None)), + column.type + ) + + return 'RETURNING ' + ', '.join(columns) + " INTO " + ", ".join(binds) + + def _TODO_visit_compound_select(self, select): + """Need to determine how to get ``LIMIT``/``OFFSET`` into a + ``UNION`` for Oracle. + """ + pass + + def visit_select(self, select, **kwargs): + """Look for ``LIMIT`` and OFFSET in a select statement, and if + so tries to wrap it in a subquery with ``rownum`` criterion. + """ + + if not getattr(select, '_oracle_visit', None): + if not self.dialect.use_ansi: + froms = self._display_froms_for_select( + select, kwargs.get('asfrom', False)) + whereclause = self._get_nonansi_join_whereclause(froms) + if whereclause is not None: + select = select.where(whereclause) + select._oracle_visit = True + + limit_clause = select._limit_clause + offset_clause = select._offset_clause + if limit_clause is not None or offset_clause is not None: + # See http://www.oracle.com/technology/oramag/oracle/06-sep/\ + # o56asktom.html + # + # Generalized form of an Oracle pagination query: + # select ... from ( + # select /*+ FIRST_ROWS(N) */ ...., rownum as ora_rn from + # ( select distinct ... where ... order by ... + # ) where ROWNUM <= :limit+:offset + # ) where ora_rn > :offset + # Outer select and "ROWNUM as ora_rn" can be dropped if + # limit=0 + + kwargs['select_wraps_for'] = select + select = select._generate() + select._oracle_visit = True + + # Wrap the middle select and add the hint + limitselect = sql.select([c for c in select.c]) + if limit_clause is not None and \ + self.dialect.optimize_limits and \ + select._simple_int_limit: + limitselect = limitselect.prefix_with( + "/*+ FIRST_ROWS(%d) */" % + select._limit) + + limitselect._oracle_visit = True + limitselect._is_wrapper = True + + # If needed, add the limiting clause + if limit_clause is not None: + if not self.dialect.use_binds_for_limits: + # use simple int limits, will raise an exception + # if the limit isn't specified this way + max_row = select._limit + + if offset_clause is not None: + max_row += select._offset + max_row = sql.literal_column("%d" % max_row) + else: + max_row = limit_clause + if offset_clause is not None: + max_row = max_row + offset_clause + limitselect.append_whereclause( + sql.literal_column("ROWNUM") <= max_row) + + # If needed, add the ora_rn, and wrap again with offset. + if offset_clause is None: + limitselect._for_update_arg = select._for_update_arg + select = limitselect + else: + limitselect = limitselect.column( + sql.literal_column("ROWNUM").label("ora_rn")) + limitselect._oracle_visit = True + limitselect._is_wrapper = True + + offsetselect = sql.select( + [c for c in limitselect.c if c.key != 'ora_rn']) + offsetselect._oracle_visit = True + offsetselect._is_wrapper = True + + if not self.dialect.use_binds_for_limits: + offset_clause = sql.literal_column( + "%d" % select._offset) + offsetselect.append_whereclause( + sql.literal_column("ora_rn") > offset_clause) + + offsetselect._for_update_arg = select._for_update_arg + select = offsetselect + + return compiler.SQLCompiler.visit_select(self, select, **kwargs) + + def limit_clause(self, select, **kw): + return "" + + def for_update_clause(self, select, **kw): + if self.is_subquery(): + return "" + + tmp = ' FOR UPDATE' + + if select._for_update_arg.of: + tmp += ' OF ' + ', '.join( + self.process(elem, **kw) for elem in + select._for_update_arg.of + ) + + if select._for_update_arg.nowait: + tmp += " NOWAIT" + + return tmp + + +class OracleDDLCompiler(compiler.DDLCompiler): + + def define_constraint_cascades(self, constraint): + text = "" + if constraint.ondelete is not None: + text += " ON DELETE %s" % constraint.ondelete + + # oracle has no ON UPDATE CASCADE - + # its only available via triggers + # http://asktom.oracle.com/tkyte/update_cascade/index.html + if constraint.onupdate is not None: + util.warn( + "Oracle does not contain native UPDATE CASCADE " + "functionality - onupdates will not be rendered for foreign " + "keys. Consider using deferrable=True, initially='deferred' " + "or triggers.") + + return text + + def visit_create_index(self, create): + index = create.element + self._verify_index_table(index) + preparer = self.preparer + text = "CREATE " + if index.unique: + text += "UNIQUE " + if index.dialect_options['oracle']['bitmap']: + text += "BITMAP " + text += "INDEX %s ON %s (%s)" % ( + self._prepared_index_name(index, include_schema=True), + preparer.format_table(index.table, use_schema=True), + ', '.join( + self.sql_compiler.process( + expr, + include_table=False, literal_binds=True) + for expr in index.expressions) + ) + if index.dialect_options['oracle']['compress'] is not False: + if index.dialect_options['oracle']['compress'] is True: + text += " COMPRESS" + else: + text += " COMPRESS %d" % ( + index.dialect_options['oracle']['compress'] + ) + return text + + def post_create_table(self, table): + table_opts = [] + opts = table.dialect_options['oracle'] + + if opts['on_commit']: + on_commit_options = opts['on_commit'].replace("_", " ").upper() + table_opts.append('\n ON COMMIT %s' % on_commit_options) + + if opts['compress']: + if opts['compress'] is True: + table_opts.append("\n COMPRESS") + else: + table_opts.append("\n COMPRESS FOR %s" % ( + opts['compress'] + )) + + return ''.join(table_opts) + + +class OracleIdentifierPreparer(compiler.IdentifierPreparer): + + reserved_words = set([x.lower() for x in RESERVED_WORDS]) + illegal_initial_characters = set( + (str(dig) for dig in range(0, 10))).union(["_", "$"]) + + def _bindparam_requires_quotes(self, value): + """Return True if the given identifier requires quoting.""" + lc_value = value.lower() + return (lc_value in self.reserved_words + or value[0] in self.illegal_initial_characters + or not self.legal_characters.match(util.text_type(value)) + ) + + def format_savepoint(self, savepoint): + name = re.sub(r'^_+', '', savepoint.ident) + return super( + OracleIdentifierPreparer, self).format_savepoint(savepoint, name) + + +class OracleExecutionContext(default.DefaultExecutionContext): + def fire_sequence(self, seq, type_): + return self._execute_scalar( + "SELECT " + + self.dialect.identifier_preparer.format_sequence(seq) + + ".nextval FROM DUAL", type_) + + +class OracleDialect(default.DefaultDialect): + name = 'oracle' + supports_alter = True + supports_unicode_statements = False + supports_unicode_binds = False + max_identifier_length = 30 + supports_sane_rowcount = True + supports_sane_multi_rowcount = False + + supports_simple_order_by_label = False + + supports_sequences = True + sequences_optional = False + postfetch_lastrowid = False + + default_paramstyle = 'named' + colspecs = colspecs + ischema_names = ischema_names + requires_name_normalize = True + + supports_default_values = False + supports_empty_insert = False + + statement_compiler = OracleCompiler + ddl_compiler = OracleDDLCompiler + type_compiler = OracleTypeCompiler + preparer = OracleIdentifierPreparer + execution_ctx_cls = OracleExecutionContext + + reflection_options = ('oracle_resolve_synonyms', ) + + construct_arguments = [ + (sa_schema.Table, { + "resolve_synonyms": False, + "on_commit": None, + "compress": False + }), + (sa_schema.Index, { + "bitmap": False, + "compress": False + }) + ] + + def __init__(self, + use_ansi=True, + optimize_limits=False, + use_binds_for_limits=True, + **kwargs): + default.DefaultDialect.__init__(self, **kwargs) + self.use_ansi = use_ansi + self.optimize_limits = optimize_limits + self.use_binds_for_limits = use_binds_for_limits + + def initialize(self, connection): + super(OracleDialect, self).initialize(connection) + self.implicit_returning = self.__dict__.get( + 'implicit_returning', + self.server_version_info > (10, ) + ) + + if self._is_oracle_8: + self.colspecs = self.colspecs.copy() + self.colspecs.pop(sqltypes.Interval) + self.use_ansi = False + + @property + def _is_oracle_8(self): + return self.server_version_info and \ + self.server_version_info < (9, ) + + @property + def _supports_table_compression(self): + return self.server_version_info and \ + self.server_version_info >= (9, 2, ) + + @property + def _supports_table_compress_for(self): + return self.server_version_info and \ + self.server_version_info >= (11, ) + + @property + def _supports_char_length(self): + return not self._is_oracle_8 + + @property + def _supports_nchar(self): + return not self._is_oracle_8 + + def do_release_savepoint(self, connection, name): + # Oracle does not support RELEASE SAVEPOINT + pass + + def has_table(self, connection, table_name, schema=None): + if not schema: + schema = self.default_schema_name + cursor = connection.execute( + sql.text("SELECT table_name FROM all_tables " + "WHERE table_name = :name AND owner = :schema_name"), + name=self.denormalize_name(table_name), + schema_name=self.denormalize_name(schema)) + return cursor.first() is not None + + def has_sequence(self, connection, sequence_name, schema=None): + if not schema: + schema = self.default_schema_name + cursor = connection.execute( + sql.text("SELECT sequence_name FROM all_sequences " + "WHERE sequence_name = :name AND " + "sequence_owner = :schema_name"), + name=self.denormalize_name(sequence_name), + schema_name=self.denormalize_name(schema)) + return cursor.first() is not None + + def normalize_name(self, name): + if name is None: + return None + if util.py2k: + if isinstance(name, str): + name = name.decode(self.encoding) + if name.upper() == name and not \ + self.identifier_preparer._requires_quotes(name.lower()): + return name.lower() + elif name.lower() == name: + return quoted_name(name, quote=True) + else: + return name + + def denormalize_name(self, name): + if name is None: + return None + elif name.lower() == name and not \ + self.identifier_preparer._requires_quotes(name.lower()): + name = name.upper() + if util.py2k: + if not self.supports_unicode_binds: + name = name.encode(self.encoding) + else: + name = unicode(name) + return name + + def _get_default_schema_name(self, connection): + return self.normalize_name( + connection.execute('SELECT USER FROM DUAL').scalar()) + + def _resolve_synonym(self, connection, desired_owner=None, + desired_synonym=None, desired_table=None): + """search for a local synonym matching the given desired owner/name. + + if desired_owner is None, attempts to locate a distinct owner. + + returns the actual name, owner, dblink name, and synonym name if + found. + """ + + q = "SELECT owner, table_owner, table_name, db_link, "\ + "synonym_name FROM all_synonyms WHERE " + clauses = [] + params = {} + if desired_synonym: + clauses.append("synonym_name = :synonym_name") + params['synonym_name'] = desired_synonym + if desired_owner: + clauses.append("owner = :desired_owner") + params['desired_owner'] = desired_owner + if desired_table: + clauses.append("table_name = :tname") + params['tname'] = desired_table + + q += " AND ".join(clauses) + + result = connection.execute(sql.text(q), **params) + if desired_owner: + row = result.first() + if row: + return (row['table_name'], row['table_owner'], + row['db_link'], row['synonym_name']) + else: + return None, None, None, None + else: + rows = result.fetchall() + if len(rows) > 1: + raise AssertionError( + "There are multiple tables visible to the schema, you " + "must specify owner") + elif len(rows) == 1: + row = rows[0] + return (row['table_name'], row['table_owner'], + row['db_link'], row['synonym_name']) + else: + return None, None, None, None + + @reflection.cache + def _prepare_reflection_args(self, connection, table_name, schema=None, + resolve_synonyms=False, dblink='', **kw): + + if resolve_synonyms: + actual_name, owner, dblink, synonym = self._resolve_synonym( + connection, + desired_owner=self.denormalize_name(schema), + desired_synonym=self.denormalize_name(table_name) + ) + else: + actual_name, owner, dblink, synonym = None, None, None, None + if not actual_name: + actual_name = self.denormalize_name(table_name) + + if dblink: + # using user_db_links here since all_db_links appears + # to have more restricted permissions. + # http://docs.oracle.com/cd/B28359_01/server.111/b28310/ds_admin005.htm + # will need to hear from more users if we are doing + # the right thing here. See [ticket:2619] + owner = connection.scalar( + sql.text("SELECT username FROM user_db_links " + "WHERE db_link=:link"), link=dblink) + dblink = "@" + dblink + elif not owner: + owner = self.denormalize_name(schema or self.default_schema_name) + + return (actual_name, owner, dblink or '', synonym) + + @reflection.cache + def get_schema_names(self, connection, **kw): + s = "SELECT username FROM all_users ORDER BY username" + cursor = connection.execute(s,) + return [self.normalize_name(row[0]) for row in cursor] + + @reflection.cache + def get_table_names(self, connection, schema=None, **kw): + schema = self.denormalize_name(schema or self.default_schema_name) + + # note that table_names() isn't loading DBLINKed or synonym'ed tables + if schema is None: + schema = self.default_schema_name + s = sql.text( + "SELECT table_name FROM all_tables " + "WHERE nvl(tablespace_name, 'no tablespace') NOT IN " + "('SYSTEM', 'SYSAUX') " + "AND OWNER = :owner " + "AND IOT_NAME IS NULL " + "AND DURATION IS NULL") + cursor = connection.execute(s, owner=schema) + return [self.normalize_name(row[0]) for row in cursor] + + @reflection.cache + def get_temp_table_names(self, connection, **kw): + schema = self.denormalize_name(self.default_schema_name) + s = sql.text( + "SELECT table_name FROM all_tables " + "WHERE nvl(tablespace_name, 'no tablespace') NOT IN " + "('SYSTEM', 'SYSAUX') " + "AND OWNER = :owner " + "AND IOT_NAME IS NULL " + "AND DURATION IS NOT NULL") + cursor = connection.execute(s, owner=schema) + return [self.normalize_name(row[0]) for row in cursor] + + @reflection.cache + def get_view_names(self, connection, schema=None, **kw): + schema = self.denormalize_name(schema or self.default_schema_name) + s = sql.text("SELECT view_name FROM all_views WHERE owner = :owner") + cursor = connection.execute(s, owner=self.denormalize_name(schema)) + return [self.normalize_name(row[0]) for row in cursor] + + @reflection.cache + def get_table_options(self, connection, table_name, schema=None, **kw): + options = {} + + resolve_synonyms = kw.get('oracle_resolve_synonyms', False) + dblink = kw.get('dblink', '') + info_cache = kw.get('info_cache') + + (table_name, schema, dblink, synonym) = \ + self._prepare_reflection_args(connection, table_name, schema, + resolve_synonyms, dblink, + info_cache=info_cache) + + params = {"table_name": table_name} + + columns = ["table_name"] + if self._supports_table_compression: + columns.append("compression") + if self._supports_table_compress_for: + columns.append("compress_for") + + text = "SELECT %(columns)s "\ + "FROM ALL_TABLES%(dblink)s "\ + "WHERE table_name = :table_name" + + if schema is not None: + params['owner'] = schema + text += " AND owner = :owner " + text = text % {'dblink': dblink, 'columns': ", ".join(columns)} + + result = connection.execute(sql.text(text), **params) + + enabled = dict(DISABLED=False, ENABLED=True) + + row = result.first() + if row: + if "compression" in row and enabled.get(row.compression, False): + if "compress_for" in row: + options['oracle_compress'] = row.compress_for + else: + options['oracle_compress'] = True + + return options + + @reflection.cache + def get_columns(self, connection, table_name, schema=None, **kw): + """ + + kw arguments can be: + + oracle_resolve_synonyms + + dblink + + """ + + resolve_synonyms = kw.get('oracle_resolve_synonyms', False) + dblink = kw.get('dblink', '') + info_cache = kw.get('info_cache') + + (table_name, schema, dblink, synonym) = \ + self._prepare_reflection_args(connection, table_name, schema, + resolve_synonyms, dblink, + info_cache=info_cache) + columns = [] + if self._supports_char_length: + char_length_col = 'char_length' + else: + char_length_col = 'data_length' + + params = {"table_name": table_name} + text = "SELECT column_name, data_type, %(char_length_col)s, "\ + "data_precision, data_scale, "\ + "nullable, data_default FROM ALL_TAB_COLUMNS%(dblink)s "\ + "WHERE table_name = :table_name" + if schema is not None: + params['owner'] = schema + text += " AND owner = :owner " + text += " ORDER BY column_id" + text = text % {'dblink': dblink, 'char_length_col': char_length_col} + + c = connection.execute(sql.text(text), **params) + + for row in c: + (colname, orig_colname, coltype, length, precision, scale, nullable, default) = \ + (self.normalize_name(row[0]), row[0], row[1], row[ + 2], row[3], row[4], row[5] == 'Y', row[6]) + + if coltype == 'NUMBER': + coltype = NUMBER(precision, scale) + elif coltype in ('VARCHAR2', 'NVARCHAR2', 'CHAR'): + coltype = self.ischema_names.get(coltype)(length) + elif 'WITH TIME ZONE' in coltype: + coltype = TIMESTAMP(timezone=True) + else: + coltype = re.sub(r'\(\d+\)', '', coltype) + try: + coltype = self.ischema_names[coltype] + except KeyError: + util.warn("Did not recognize type '%s' of column '%s'" % + (coltype, colname)) + coltype = sqltypes.NULLTYPE + + cdict = { + 'name': colname, + 'type': coltype, + 'nullable': nullable, + 'default': default, + 'autoincrement': default is None + } + if orig_colname.lower() == orig_colname: + cdict['quote'] = True + + columns.append(cdict) + return columns + + @reflection.cache + def get_indexes(self, connection, table_name, schema=None, + resolve_synonyms=False, dblink='', **kw): + + info_cache = kw.get('info_cache') + (table_name, schema, dblink, synonym) = \ + self._prepare_reflection_args(connection, table_name, schema, + resolve_synonyms, dblink, + info_cache=info_cache) + indexes = [] + + params = {'table_name': table_name} + text = \ + "SELECT a.index_name, a.column_name, "\ + "\nb.index_type, b.uniqueness, b.compression, b.prefix_length "\ + "\nFROM ALL_IND_COLUMNS%(dblink)s a, "\ + "\nALL_INDEXES%(dblink)s b "\ + "\nWHERE "\ + "\na.index_name = b.index_name "\ + "\nAND a.table_owner = b.table_owner "\ + "\nAND a.table_name = b.table_name "\ + "\nAND a.table_name = :table_name " + + if schema is not None: + params['schema'] = schema + text += "AND a.table_owner = :schema " + + text += "ORDER BY a.index_name, a.column_position" + + text = text % {'dblink': dblink} + + q = sql.text(text) + rp = connection.execute(q, **params) + indexes = [] + last_index_name = None + pk_constraint = self.get_pk_constraint( + connection, table_name, schema, resolve_synonyms=resolve_synonyms, + dblink=dblink, info_cache=kw.get('info_cache')) + pkeys = pk_constraint['constrained_columns'] + uniqueness = dict(NONUNIQUE=False, UNIQUE=True) + enabled = dict(DISABLED=False, ENABLED=True) + + oracle_sys_col = re.compile(r'SYS_NC\d+\$', re.IGNORECASE) + + def upper_name_set(names): + return set([i.upper() for i in names]) + + pk_names = upper_name_set(pkeys) + + def remove_if_primary_key(index): + # don't include the primary key index + if index is not None and \ + upper_name_set(index['column_names']) == pk_names: + indexes.pop() + + index = None + for rset in rp: + if rset.index_name != last_index_name: + remove_if_primary_key(index) + index = dict(name=self.normalize_name(rset.index_name), + column_names=[], dialect_options={}) + indexes.append(index) + index['unique'] = uniqueness.get(rset.uniqueness, False) + + if rset.index_type in ('BITMAP', 'FUNCTION-BASED BITMAP'): + index['dialect_options']['oracle_bitmap'] = True + if enabled.get(rset.compression, False): + index['dialect_options']['oracle_compress'] = rset.prefix_length + + # filter out Oracle SYS_NC names. could also do an outer join + # to the all_tab_columns table and check for real col names there. + if not oracle_sys_col.match(rset.column_name): + index['column_names'].append( + self.normalize_name(rset.column_name)) + last_index_name = rset.index_name + remove_if_primary_key(index) + return indexes + + @reflection.cache + def _get_constraint_data(self, connection, table_name, schema=None, + dblink='', **kw): + + params = {'table_name': table_name} + + text = \ + "SELECT"\ + "\nac.constraint_name,"\ + "\nac.constraint_type,"\ + "\nloc.column_name AS local_column,"\ + "\nrem.table_name AS remote_table,"\ + "\nrem.column_name AS remote_column,"\ + "\nrem.owner AS remote_owner,"\ + "\nloc.position as loc_pos,"\ + "\nrem.position as rem_pos"\ + "\nFROM all_constraints%(dblink)s ac,"\ + "\nall_cons_columns%(dblink)s loc,"\ + "\nall_cons_columns%(dblink)s rem"\ + "\nWHERE ac.table_name = :table_name"\ + "\nAND ac.constraint_type IN ('R','P')" + + if schema is not None: + params['owner'] = schema + text += "\nAND ac.owner = :owner" + + text += \ + "\nAND ac.owner = loc.owner"\ + "\nAND ac.constraint_name = loc.constraint_name"\ + "\nAND ac.r_owner = rem.owner(+)"\ + "\nAND ac.r_constraint_name = rem.constraint_name(+)"\ + "\nAND (rem.position IS NULL or loc.position=rem.position)"\ + "\nORDER BY ac.constraint_name, loc.position" + + text = text % {'dblink': dblink} + rp = connection.execute(sql.text(text), **params) + constraint_data = rp.fetchall() + return constraint_data + + @reflection.cache + def get_pk_constraint(self, connection, table_name, schema=None, **kw): + resolve_synonyms = kw.get('oracle_resolve_synonyms', False) + dblink = kw.get('dblink', '') + info_cache = kw.get('info_cache') + + (table_name, schema, dblink, synonym) = \ + self._prepare_reflection_args(connection, table_name, schema, + resolve_synonyms, dblink, + info_cache=info_cache) + pkeys = [] + constraint_name = None + constraint_data = self._get_constraint_data( + connection, table_name, schema, dblink, + info_cache=kw.get('info_cache')) + + for row in constraint_data: + (cons_name, cons_type, local_column, remote_table, remote_column, remote_owner) = \ + row[0:2] + tuple([self.normalize_name(x) for x in row[2:6]]) + if cons_type == 'P': + if constraint_name is None: + constraint_name = self.normalize_name(cons_name) + pkeys.append(local_column) + return {'constrained_columns': pkeys, 'name': constraint_name} + + @reflection.cache + def get_foreign_keys(self, connection, table_name, schema=None, **kw): + """ + + kw arguments can be: + + oracle_resolve_synonyms + + dblink + + """ + + requested_schema = schema # to check later on + resolve_synonyms = kw.get('oracle_resolve_synonyms', False) + dblink = kw.get('dblink', '') + info_cache = kw.get('info_cache') + + (table_name, schema, dblink, synonym) = \ + self._prepare_reflection_args(connection, table_name, schema, + resolve_synonyms, dblink, + info_cache=info_cache) + + constraint_data = self._get_constraint_data( + connection, table_name, schema, dblink, + info_cache=kw.get('info_cache')) + + def fkey_rec(): + return { + 'name': None, + 'constrained_columns': [], + 'referred_schema': None, + 'referred_table': None, + 'referred_columns': [] + } + + fkeys = util.defaultdict(fkey_rec) + + for row in constraint_data: + (cons_name, cons_type, local_column, remote_table, remote_column, remote_owner) = \ + row[0:2] + tuple([self.normalize_name(x) for x in row[2:6]]) + + if cons_type == 'R': + if remote_table is None: + # ticket 363 + util.warn( + ("Got 'None' querying 'table_name' from " + "all_cons_columns%(dblink)s - does the user have " + "proper rights to the table?") % {'dblink': dblink}) + continue + + rec = fkeys[cons_name] + rec['name'] = cons_name + local_cols, remote_cols = rec[ + 'constrained_columns'], rec['referred_columns'] + + if not rec['referred_table']: + if resolve_synonyms: + ref_remote_name, ref_remote_owner, ref_dblink, ref_synonym = \ + self._resolve_synonym( + connection, + desired_owner=self.denormalize_name( + remote_owner), + desired_table=self.denormalize_name( + remote_table) + ) + if ref_synonym: + remote_table = self.normalize_name(ref_synonym) + remote_owner = self.normalize_name( + ref_remote_owner) + + rec['referred_table'] = remote_table + + if requested_schema is not None or \ + self.denormalize_name(remote_owner) != schema: + rec['referred_schema'] = remote_owner + + local_cols.append(local_column) + remote_cols.append(remote_column) + + return list(fkeys.values()) + + @reflection.cache + def get_view_definition(self, connection, view_name, schema=None, + resolve_synonyms=False, dblink='', **kw): + info_cache = kw.get('info_cache') + (view_name, schema, dblink, synonym) = \ + self._prepare_reflection_args(connection, view_name, schema, + resolve_synonyms, dblink, + info_cache=info_cache) + + params = {'view_name': view_name} + text = "SELECT text FROM all_views WHERE view_name=:view_name" + + if schema is not None: + text += " AND owner = :schema" + params['schema'] = schema + + rp = connection.execute(sql.text(text), **params).scalar() + if rp: + if util.py2k: + rp = rp.decode(self.encoding) + return rp + else: + return None + + +class _OuterJoinColumn(sql.ClauseElement): + __visit_name__ = 'outer_join_column' + + def __init__(self, column): + self.column = column diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/python3.4/site-packages/sqlalchemy/dialects/oracle/cx_oracle.py new file mode 100644 index 0000000..cfbb87e --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/oracle/cx_oracle.py @@ -0,0 +1,989 @@ +# oracle/cx_oracle.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" + +.. dialect:: oracle+cx_oracle + :name: cx-Oracle + :dbapi: cx_oracle + :connectstring: oracle+cx_oracle://user:pass@host:port/dbname\ +[?key=value&key=value...] + :url: http://cx-oracle.sourceforge.net/ + +Additional Connect Arguments +---------------------------- + +When connecting with ``dbname`` present, the host, port, and dbname tokens are +converted to a TNS name using +the cx_oracle ``makedsn()`` function. Otherwise, the host token is taken +directly as a TNS name. + +Additional arguments which may be specified either as query string arguments +on the URL, or as keyword arguments to :func:`.create_engine()` are: + +* ``allow_twophase`` - enable two-phase transactions. Defaults to ``True``. + +* ``arraysize`` - set the cx_oracle.arraysize value on cursors, defaulted + to 50. This setting is significant with cx_Oracle as the contents of LOB + objects are only readable within a "live" row (e.g. within a batch of + 50 rows). + +* ``auto_convert_lobs`` - defaults to True; See :ref:`cx_oracle_lob`. + +* ``auto_setinputsizes`` - the cx_oracle.setinputsizes() call is issued for + all bind parameters. This is required for LOB datatypes but can be + disabled to reduce overhead. Defaults to ``True``. Specific types + can be excluded from this process using the ``exclude_setinputsizes`` + parameter. + +* ``coerce_to_unicode`` - see :ref:`cx_oracle_unicode` for detail. + +* ``coerce_to_decimal`` - see :ref:`cx_oracle_numeric` for detail. + +* ``exclude_setinputsizes`` - a tuple or list of string DBAPI type names to + be excluded from the "auto setinputsizes" feature. The type names here + must match DBAPI types that are found in the "cx_Oracle" module namespace, + such as cx_Oracle.UNICODE, cx_Oracle.NCLOB, etc. Defaults to + ``(STRING, UNICODE)``. + + .. versionadded:: 0.8 specific DBAPI types can be excluded from the + auto_setinputsizes feature via the exclude_setinputsizes attribute. + +* ``mode`` - This is given the string value of SYSDBA or SYSOPER, or + alternatively an integer value. This value is only available as a URL query + string argument. + +* ``threaded`` - enable multithreaded access to cx_oracle connections. + Defaults to ``True``. Note that this is the opposite default of the + cx_Oracle DBAPI itself. + +* ``service_name`` - An option to use connection string (DSN) with + ``SERVICE_NAME`` instead of ``SID``. It can't be passed when a ``database`` + part is given. + E.g. ``oracle+cx_oracle://scott:tiger@host:1521/?service_name=hr`` + is a valid url. This value is only available as a URL query string argument. + + .. versionadded:: 1.0.0 + +.. _cx_oracle_unicode: + +Unicode +------- + +The cx_Oracle DBAPI as of version 5 fully supports unicode, and has the +ability to return string results as Python unicode objects natively. + +When used in Python 3, cx_Oracle returns all strings as Python unicode objects +(that is, plain ``str`` in Python 3). In Python 2, it will return as Python +unicode those column values that are of type ``NVARCHAR`` or ``NCLOB``. For +column values that are of type ``VARCHAR`` or other non-unicode string types, +it will return values as Python strings (e.g. bytestrings). + +The cx_Oracle SQLAlchemy dialect presents two different options for the use +case of returning ``VARCHAR`` column values as Python unicode objects under +Python 2: + +* the cx_Oracle DBAPI has the ability to coerce all string results to Python + unicode objects unconditionally using output type handlers. This has + the advantage that the unicode conversion is global to all statements + at the cx_Oracle driver level, meaning it works with raw textual SQL + statements that have no typing information associated. However, this system + has been observed to incur signfiicant performance overhead, not only + because it takes effect for all string values unconditionally, but also + because cx_Oracle under Python 2 seems to use a pure-Python function call in + order to do the decode operation, which under cPython can orders of + magnitude slower than doing it using C functions alone. + +* SQLAlchemy has unicode-decoding services built in, and when using + SQLAlchemy's C extensions, these functions do not use any Python function + calls and are very fast. The disadvantage to this approach is that the + unicode conversion only takes effect for statements where the + :class:`.Unicode` type or :class:`.String` type with + ``convert_unicode=True`` is explicitly associated with the result column. + This is the case for any ORM or Core query or SQL expression as well as for + a :func:`.text` construct that specifies output column types, so in the vast + majority of cases this is not an issue. However, when sending a completely + raw string to :meth:`.Connection.execute`, this typing information isn't + present, unless the string is handled within a :func:`.text` construct that + adds typing information. + +As of version 0.9.2 of SQLAlchemy, the default approach is to use SQLAlchemy's +typing system. This keeps cx_Oracle's expensive Python 2 approach +disabled unless the user explicitly wants it. Under Python 3, SQLAlchemy +detects that cx_Oracle is returning unicode objects natively and cx_Oracle's +system is used. + +To re-enable cx_Oracle's output type handler under Python 2, the +``coerce_to_unicode=True`` flag (new in 0.9.4) can be passed to +:func:`.create_engine`:: + + engine = create_engine("oracle+cx_oracle://dsn", coerce_to_unicode=True) + +Alternatively, to run a pure string SQL statement and get ``VARCHAR`` results +as Python unicode under Python 2 without using cx_Oracle's native handlers, +the :func:`.text` feature can be used:: + + from sqlalchemy import text, Unicode + result = conn.execute( + text("select username from user").columns(username=Unicode)) + +.. versionchanged:: 0.9.2 cx_Oracle's outputtypehandlers are no longer used + for unicode results of non-unicode datatypes in Python 2, after they were + identified as a major performance bottleneck. SQLAlchemy's own unicode + facilities are used instead. + +.. versionadded:: 0.9.4 Added the ``coerce_to_unicode`` flag, to re-enable + cx_Oracle's outputtypehandler and revert to pre-0.9.2 behavior. + +.. _cx_oracle_returning: + +RETURNING Support +----------------- + +The cx_oracle DBAPI supports a limited subset of Oracle's already limited +RETURNING support. Typically, results can only be guaranteed for at most one +column being returned; this is the typical case when SQLAlchemy uses RETURNING +to get just the value of a primary-key-associated sequence value. +Additional column expressions will cause problems in a non-determinative way, +due to cx_oracle's lack of support for the OCI_DATA_AT_EXEC API which is +required for more complex RETURNING scenarios. + +For this reason, stability may be enhanced by disabling RETURNING support +completely; SQLAlchemy otherwise will use RETURNING to fetch newly +sequence-generated primary keys. As illustrated in :ref:`oracle_returning`:: + + engine = create_engine("oracle://scott:tiger@dsn", + implicit_returning=False) + +.. seealso:: + + http://docs.oracle.com/cd/B10501_01/appdev.920/a96584/oci05bnd.htm#420693 + - OCI documentation for RETURNING + + http://sourceforge.net/mailarchive/message.php?msg_id=31338136 + - cx_oracle developer commentary + +.. _cx_oracle_lob: + +LOB Objects +----------- + +cx_oracle returns oracle LOBs using the cx_oracle.LOB object. SQLAlchemy +converts these to strings so that the interface of the Binary type is +consistent with that of other backends, and so that the linkage to a live +cursor is not needed in scenarios like result.fetchmany() and +result.fetchall(). This means that by default, LOB objects are fully fetched +unconditionally by SQLAlchemy, and the linkage to a live cursor is broken. + +To disable this processing, pass ``auto_convert_lobs=False`` to +:func:`.create_engine()`. + +Two Phase Transaction Support +----------------------------- + +Two Phase transactions are implemented using XA transactions, and are known +to work in a rudimental fashion with recent versions of cx_Oracle +as of SQLAlchemy 0.8.0b2, 0.7.10. However, the mechanism is not yet +considered to be robust and should still be regarded as experimental. + +In particular, the cx_Oracle DBAPI as recently as 5.1.2 has a bug regarding +two phase which prevents +a particular DBAPI connection from being consistently usable in both +prepared transactions as well as traditional DBAPI usage patterns; therefore +once a particular connection is used via :meth:`.Connection.begin_prepared`, +all subsequent usages of the underlying DBAPI connection must be within +the context of prepared transactions. + +The default behavior of :class:`.Engine` is to maintain a pool of DBAPI +connections. Therefore, due to the above glitch, a DBAPI connection that has +been used in a two-phase operation, and is then returned to the pool, will +not be usable in a non-two-phase context. To avoid this situation, +the application can make one of several choices: + +* Disable connection pooling using :class:`.NullPool` + +* Ensure that the particular :class:`.Engine` in use is only used + for two-phase operations. A :class:`.Engine` bound to an ORM + :class:`.Session` which includes ``twophase=True`` will consistently + use the two-phase transaction style. + +* For ad-hoc two-phase operations without disabling pooling, the DBAPI + connection in use can be evicted from the connection pool using the + :meth:`.Connection.detach` method. + +.. versionchanged:: 0.8.0b2,0.7.10 + Support for cx_oracle prepared transactions has been implemented + and tested. + +.. _cx_oracle_numeric: + +Precision Numerics +------------------ + +The SQLAlchemy dialect goes through a lot of steps to ensure +that decimal numbers are sent and received with full accuracy. +An "outputtypehandler" callable is associated with each +cx_oracle connection object which detects numeric types and +receives them as string values, instead of receiving a Python +``float`` directly, which is then passed to the Python +``Decimal`` constructor. The :class:`.Numeric` and +:class:`.Float` types under the cx_oracle dialect are aware of +this behavior, and will coerce the ``Decimal`` to ``float`` if +the ``asdecimal`` flag is ``False`` (default on :class:`.Float`, +optional on :class:`.Numeric`). + +Because the handler coerces to ``Decimal`` in all cases first, +the feature can detract significantly from performance. +If precision numerics aren't required, the decimal handling +can be disabled by passing the flag ``coerce_to_decimal=False`` +to :func:`.create_engine`:: + + engine = create_engine("oracle+cx_oracle://dsn", coerce_to_decimal=False) + +.. versionadded:: 0.7.6 + Add the ``coerce_to_decimal`` flag. + +Another alternative to performance is to use the +`cdecimal `_ library; +see :class:`.Numeric` for additional notes. + +The handler attempts to use the "precision" and "scale" +attributes of the result set column to best determine if +subsequent incoming values should be received as ``Decimal`` as +opposed to int (in which case no processing is added). There are +several scenarios where OCI_ does not provide unambiguous data +as to the numeric type, including some situations where +individual rows may return a combination of floating point and +integer values. Certain values for "precision" and "scale" have +been observed to determine this scenario. When it occurs, the +outputtypehandler receives as string and then passes off to a +processing function which detects, for each returned value, if a +decimal point is present, and if so converts to ``Decimal``, +otherwise to int. The intention is that simple int-based +statements like "SELECT my_seq.nextval() FROM DUAL" continue to +return ints and not ``Decimal`` objects, and that any kind of +floating point value is received as a string so that there is no +floating point loss of precision. + +The "decimal point is present" logic itself is also sensitive to +locale. Under OCI_, this is controlled by the NLS_LANG +environment variable. Upon first connection, the dialect runs a +test to determine the current "decimal" character, which can be +a comma "," for European locales. From that point forward the +outputtypehandler uses that character to represent a decimal +point. Note that cx_oracle 5.0.3 or greater is required +when dealing with numerics with locale settings that don't use +a period "." as the decimal character. + +.. versionchanged:: 0.6.6 + The outputtypehandler supports the case where the locale uses a + comma "," character to represent a decimal point. + +.. _OCI: http://www.oracle.com/technetwork/database/features/oci/index.html + +""" + +from __future__ import absolute_import + +from .base import OracleCompiler, OracleDialect, OracleExecutionContext +from . import base as oracle +from ...engine import result as _result +from sqlalchemy import types as sqltypes, util, exc, processors +from sqlalchemy import util +import random +import collections +import decimal +import re + + +class _OracleNumeric(sqltypes.Numeric): + def bind_processor(self, dialect): + # cx_oracle accepts Decimal objects and floats + return None + + def result_processor(self, dialect, coltype): + # we apply a cx_oracle type handler to all connections + # that converts floating point strings to Decimal(). + # However, in some subquery situations, Oracle doesn't + # give us enough information to determine int or Decimal. + # It could even be int/Decimal differently on each row, + # regardless of the scale given for the originating type. + # So we still need an old school isinstance() handler + # here for decimals. + + if dialect.supports_native_decimal: + if self.asdecimal: + fstring = "%%.%df" % self._effective_decimal_return_scale + + def to_decimal(value): + if value is None: + return None + elif isinstance(value, decimal.Decimal): + return value + else: + return decimal.Decimal(fstring % value) + + return to_decimal + else: + if self.precision is None and self.scale is None: + return processors.to_float + elif not getattr(self, '_is_oracle_number', False) \ + and self.scale is not None: + return processors.to_float + else: + return None + else: + # cx_oracle 4 behavior, will assume + # floats + return super(_OracleNumeric, self).\ + result_processor(dialect, coltype) + + +class _OracleDate(sqltypes.Date): + def bind_processor(self, dialect): + return None + + def result_processor(self, dialect, coltype): + def process(value): + if value is not None: + return value.date() + else: + return value + return process + + +class _LOBMixin(object): + def result_processor(self, dialect, coltype): + if not dialect.auto_convert_lobs: + # return the cx_oracle.LOB directly. + return None + + def process(value): + if value is not None: + return value.read() + else: + return value + return process + + +class _NativeUnicodeMixin(object): + if util.py2k: + def bind_processor(self, dialect): + if dialect._cx_oracle_with_unicode: + def process(value): + if value is None: + return value + else: + return unicode(value) + return process + else: + return super( + _NativeUnicodeMixin, self).bind_processor(dialect) + + # we apply a connection output handler that returns + # unicode in all cases, so the "native_unicode" flag + # will be set for the default String.result_processor. + + +class _OracleChar(_NativeUnicodeMixin, sqltypes.CHAR): + def get_dbapi_type(self, dbapi): + return dbapi.FIXED_CHAR + + +class _OracleNVarChar(_NativeUnicodeMixin, sqltypes.NVARCHAR): + def get_dbapi_type(self, dbapi): + return getattr(dbapi, 'UNICODE', dbapi.STRING) + + +class _OracleText(_LOBMixin, sqltypes.Text): + def get_dbapi_type(self, dbapi): + return dbapi.CLOB + + +class _OracleLong(oracle.LONG): + # a raw LONG is a text type, but does *not* + # get the LobMixin with cx_oracle. + + def get_dbapi_type(self, dbapi): + return dbapi.LONG_STRING + + +class _OracleString(_NativeUnicodeMixin, sqltypes.String): + pass + + +class _OracleUnicodeText( + _LOBMixin, _NativeUnicodeMixin, sqltypes.UnicodeText): + def get_dbapi_type(self, dbapi): + return dbapi.NCLOB + + def result_processor(self, dialect, coltype): + lob_processor = _LOBMixin.result_processor(self, dialect, coltype) + if lob_processor is None: + return None + + string_processor = sqltypes.UnicodeText.result_processor( + self, dialect, coltype) + + if string_processor is None: + return lob_processor + else: + def process(value): + return string_processor(lob_processor(value)) + return process + + +class _OracleInteger(sqltypes.Integer): + def result_processor(self, dialect, coltype): + def to_int(val): + if val is not None: + val = int(val) + return val + return to_int + + +class _OracleBinary(_LOBMixin, sqltypes.LargeBinary): + def get_dbapi_type(self, dbapi): + return dbapi.BLOB + + def bind_processor(self, dialect): + return None + + +class _OracleInterval(oracle.INTERVAL): + def get_dbapi_type(self, dbapi): + return dbapi.INTERVAL + + +class _OracleRaw(oracle.RAW): + pass + + +class _OracleRowid(oracle.ROWID): + def get_dbapi_type(self, dbapi): + return dbapi.ROWID + + +class OracleCompiler_cx_oracle(OracleCompiler): + def bindparam_string(self, name, **kw): + quote = getattr(name, 'quote', None) + if quote is True or quote is not False and \ + self.preparer._bindparam_requires_quotes(name): + quoted_name = '"%s"' % name + self._quoted_bind_names[name] = quoted_name + return OracleCompiler.bindparam_string(self, quoted_name, **kw) + else: + return OracleCompiler.bindparam_string(self, name, **kw) + + +class OracleExecutionContext_cx_oracle(OracleExecutionContext): + + def pre_exec(self): + quoted_bind_names = \ + getattr(self.compiled, '_quoted_bind_names', None) + if quoted_bind_names: + if not self.dialect.supports_unicode_statements: + # if DBAPI doesn't accept unicode statements, + # keys in self.parameters would have been encoded + # here. so convert names in quoted_bind_names + # to encoded as well. + quoted_bind_names = \ + dict( + (fromname.encode(self.dialect.encoding), + toname.encode(self.dialect.encoding)) + for fromname, toname in + quoted_bind_names.items() + ) + for param in self.parameters: + for fromname, toname in quoted_bind_names.items(): + param[toname] = param[fromname] + del param[fromname] + + if self.dialect.auto_setinputsizes: + # cx_oracle really has issues when you setinputsizes + # on String, including that outparams/RETURNING + # breaks for varchars + self.set_input_sizes( + quoted_bind_names, + exclude_types=self.dialect.exclude_setinputsizes + ) + + # if a single execute, check for outparams + if len(self.compiled_parameters) == 1: + for bindparam in self.compiled.binds.values(): + if bindparam.isoutparam: + dbtype = bindparam.type.dialect_impl(self.dialect).\ + get_dbapi_type(self.dialect.dbapi) + if not hasattr(self, 'out_parameters'): + self.out_parameters = {} + if dbtype is None: + raise exc.InvalidRequestError( + "Cannot create out parameter for parameter " + "%r - its type %r is not supported by" + " cx_oracle" % + (bindparam.key, bindparam.type) + ) + name = self.compiled.bind_names[bindparam] + self.out_parameters[name] = self.cursor.var(dbtype) + self.parameters[0][quoted_bind_names.get(name, name)] = \ + self.out_parameters[name] + + def create_cursor(self): + c = self._dbapi_connection.cursor() + if self.dialect.arraysize: + c.arraysize = self.dialect.arraysize + + return c + + def get_result_proxy(self): + if hasattr(self, 'out_parameters') and self.compiled.returning: + returning_params = dict( + (k, v.getvalue()) + for k, v in self.out_parameters.items() + ) + return ReturningResultProxy(self, returning_params) + + result = None + if self.cursor.description is not None: + for column in self.cursor.description: + type_code = column[1] + if type_code in self.dialect._cx_oracle_binary_types: + result = _result.BufferedColumnResultProxy(self) + + if result is None: + result = _result.ResultProxy(self) + + if hasattr(self, 'out_parameters'): + if self.compiled_parameters is not None and \ + len(self.compiled_parameters) == 1: + result.out_parameters = out_parameters = {} + + for bind, name in self.compiled.bind_names.items(): + if name in self.out_parameters: + type = bind.type + impl_type = type.dialect_impl(self.dialect) + dbapi_type = impl_type.get_dbapi_type( + self.dialect.dbapi) + result_processor = impl_type.\ + result_processor(self.dialect, + dbapi_type) + if result_processor is not None: + out_parameters[name] = \ + result_processor( + self.out_parameters[name].getvalue()) + else: + out_parameters[name] = self.out_parameters[ + name].getvalue() + else: + result.out_parameters = dict( + (k, v.getvalue()) + for k, v in self.out_parameters.items() + ) + + return result + + +class OracleExecutionContext_cx_oracle_with_unicode( + OracleExecutionContext_cx_oracle): + """Support WITH_UNICODE in Python 2.xx. + + WITH_UNICODE allows cx_Oracle's Python 3 unicode handling + behavior under Python 2.x. This mode in some cases disallows + and in other cases silently passes corrupted data when + non-Python-unicode strings (a.k.a. plain old Python strings) + are passed as arguments to connect(), the statement sent to execute(), + or any of the bind parameter keys or values sent to execute(). + This optional context therefore ensures that all statements are + passed as Python unicode objects. + + """ + + def __init__(self, *arg, **kw): + OracleExecutionContext_cx_oracle.__init__(self, *arg, **kw) + self.statement = util.text_type(self.statement) + + def _execute_scalar(self, stmt): + return super(OracleExecutionContext_cx_oracle_with_unicode, self).\ + _execute_scalar(util.text_type(stmt)) + + +class ReturningResultProxy(_result.FullyBufferedResultProxy): + """Result proxy which stuffs the _returning clause + outparams + into the fetch.""" + + def __init__(self, context, returning_params): + self._returning_params = returning_params + super(ReturningResultProxy, self).__init__(context) + + def _cursor_description(self): + returning = self.context.compiled.returning + return [ + ("ret_%d" % i, None) + for i, col in enumerate(returning) + ] + + def _buffer_rows(self): + return collections.deque( + [tuple(self._returning_params["ret_%d" % i] + for i, c in enumerate(self._returning_params))] + ) + + +class OracleDialect_cx_oracle(OracleDialect): + execution_ctx_cls = OracleExecutionContext_cx_oracle + statement_compiler = OracleCompiler_cx_oracle + + driver = "cx_oracle" + + colspecs = colspecs = { + sqltypes.Numeric: _OracleNumeric, + # generic type, assume datetime.date is desired + sqltypes.Date: _OracleDate, + sqltypes.LargeBinary: _OracleBinary, + sqltypes.Boolean: oracle._OracleBoolean, + sqltypes.Interval: _OracleInterval, + oracle.INTERVAL: _OracleInterval, + sqltypes.Text: _OracleText, + sqltypes.String: _OracleString, + sqltypes.UnicodeText: _OracleUnicodeText, + sqltypes.CHAR: _OracleChar, + + # a raw LONG is a text type, but does *not* + # get the LobMixin with cx_oracle. + oracle.LONG: _OracleLong, + + # this is only needed for OUT parameters. + # it would be nice if we could not use it otherwise. + sqltypes.Integer: _OracleInteger, + + oracle.RAW: _OracleRaw, + sqltypes.Unicode: _OracleNVarChar, + sqltypes.NVARCHAR: _OracleNVarChar, + oracle.ROWID: _OracleRowid, + } + + execute_sequence_format = list + + def __init__(self, + auto_setinputsizes=True, + exclude_setinputsizes=("STRING", "UNICODE"), + auto_convert_lobs=True, + threaded=True, + allow_twophase=True, + coerce_to_decimal=True, + coerce_to_unicode=False, + arraysize=50, **kwargs): + OracleDialect.__init__(self, **kwargs) + self.threaded = threaded + self.arraysize = arraysize + self.allow_twophase = allow_twophase + self.supports_timestamp = self.dbapi is None or \ + hasattr(self.dbapi, 'TIMESTAMP') + self.auto_setinputsizes = auto_setinputsizes + self.auto_convert_lobs = auto_convert_lobs + + if hasattr(self.dbapi, 'version'): + self.cx_oracle_ver = tuple([int(x) for x in + self.dbapi.version.split('.')]) + else: + self.cx_oracle_ver = (0, 0, 0) + + def types(*names): + return set( + getattr(self.dbapi, name, None) for name in names + ).difference([None]) + + self.exclude_setinputsizes = types(*(exclude_setinputsizes or ())) + self._cx_oracle_string_types = types("STRING", "UNICODE", + "NCLOB", "CLOB") + self._cx_oracle_unicode_types = types("UNICODE", "NCLOB") + self._cx_oracle_binary_types = types("BFILE", "CLOB", "NCLOB", "BLOB") + self.supports_unicode_binds = self.cx_oracle_ver >= (5, 0) + + self.coerce_to_unicode = ( + self.cx_oracle_ver >= (5, 0) and + coerce_to_unicode + ) + + self.supports_native_decimal = ( + self.cx_oracle_ver >= (5, 0) and + coerce_to_decimal + ) + + self._cx_oracle_native_nvarchar = self.cx_oracle_ver >= (5, 0) + + if self.cx_oracle_ver is None: + # this occurs in tests with mock DBAPIs + self._cx_oracle_string_types = set() + self._cx_oracle_with_unicode = False + elif util.py3k or ( + self.cx_oracle_ver >= (5,) and not \ + hasattr(self.dbapi, 'UNICODE') + ): + # cx_Oracle WITH_UNICODE mode. *only* python + # unicode objects accepted for anything + self.supports_unicode_statements = True + self.supports_unicode_binds = True + self._cx_oracle_with_unicode = True + + if util.py2k: + # There's really no reason to run with WITH_UNICODE under + # Python 2.x. Give the user a hint. + util.warn( + "cx_Oracle is compiled under Python 2.xx using the " + "WITH_UNICODE flag. Consider recompiling cx_Oracle " + "without this flag, which is in no way necessary for " + "full support of Unicode. Otherwise, all string-holding " + "bind parameters must be explicitly typed using " + "SQLAlchemy's String type or one of its subtypes," + "or otherwise be passed as Python unicode. " + "Plain Python strings passed as bind parameters will be " + "silently corrupted by cx_Oracle." + ) + self.execution_ctx_cls = \ + OracleExecutionContext_cx_oracle_with_unicode + else: + self._cx_oracle_with_unicode = False + + if self.cx_oracle_ver is None or \ + not self.auto_convert_lobs or \ + not hasattr(self.dbapi, 'CLOB'): + self.dbapi_type_map = {} + else: + # only use this for LOB objects. using it for strings, dates + # etc. leads to a little too much magic, reflection doesn't know + # if it should expect encoded strings or unicodes, etc. + self.dbapi_type_map = { + self.dbapi.CLOB: oracle.CLOB(), + self.dbapi.NCLOB: oracle.NCLOB(), + self.dbapi.BLOB: oracle.BLOB(), + self.dbapi.BINARY: oracle.RAW(), + } + + @classmethod + def dbapi(cls): + import cx_Oracle + return cx_Oracle + + def initialize(self, connection): + super(OracleDialect_cx_oracle, self).initialize(connection) + if self._is_oracle_8: + self.supports_unicode_binds = False + self._detect_decimal_char(connection) + + def _detect_decimal_char(self, connection): + """detect if the decimal separator character is not '.', as + is the case with European locale settings for NLS_LANG. + + cx_oracle itself uses similar logic when it formats Python + Decimal objects to strings on the bind side (as of 5.0.3), + as Oracle sends/receives string numerics only in the + current locale. + + """ + if self.cx_oracle_ver < (5,): + # no output type handlers before version 5 + return + + cx_Oracle = self.dbapi + conn = connection.connection + + # override the output_type_handler that's + # on the cx_oracle connection with a plain + # one on the cursor + + def output_type_handler(cursor, name, defaultType, + size, precision, scale): + return cursor.var( + cx_Oracle.STRING, + 255, arraysize=cursor.arraysize) + + cursor = conn.cursor() + cursor.outputtypehandler = output_type_handler + cursor.execute("SELECT 0.1 FROM DUAL") + val = cursor.fetchone()[0] + cursor.close() + char = re.match(r"([\.,])", val).group(1) + if char != '.': + _detect_decimal = self._detect_decimal + self._detect_decimal = \ + lambda value: _detect_decimal(value.replace(char, '.')) + self._to_decimal = \ + lambda value: decimal.Decimal(value.replace(char, '.')) + + def _detect_decimal(self, value): + if "." in value: + return decimal.Decimal(value) + else: + return int(value) + + _to_decimal = decimal.Decimal + + def on_connect(self): + if self.cx_oracle_ver < (5,): + # no output type handlers before version 5 + return + + cx_Oracle = self.dbapi + + def output_type_handler(cursor, name, defaultType, + size, precision, scale): + # convert all NUMBER with precision + positive scale to Decimal + # this almost allows "native decimal" mode. + if self.supports_native_decimal and \ + defaultType == cx_Oracle.NUMBER and \ + precision and scale > 0: + return cursor.var( + cx_Oracle.STRING, + 255, + outconverter=self._to_decimal, + arraysize=cursor.arraysize) + # if NUMBER with zero precision and 0 or neg scale, this appears + # to indicate "ambiguous". Use a slower converter that will + # make a decision based on each value received - the type + # may change from row to row (!). This kills + # off "native decimal" mode, handlers still needed. + elif self.supports_native_decimal and \ + defaultType == cx_Oracle.NUMBER \ + and not precision and scale <= 0: + return cursor.var( + cx_Oracle.STRING, + 255, + outconverter=self._detect_decimal, + arraysize=cursor.arraysize) + # allow all strings to come back natively as Unicode + elif self.coerce_to_unicode and \ + defaultType in (cx_Oracle.STRING, cx_Oracle.FIXED_CHAR): + return cursor.var(util.text_type, size, cursor.arraysize) + + def on_connect(conn): + conn.outputtypehandler = output_type_handler + + return on_connect + + def create_connect_args(self, url): + dialect_opts = dict(url.query) + for opt in ('use_ansi', 'auto_setinputsizes', 'auto_convert_lobs', + 'threaded', 'allow_twophase'): + if opt in dialect_opts: + util.coerce_kw_type(dialect_opts, opt, bool) + setattr(self, opt, dialect_opts[opt]) + + database = url.database + service_name = dialect_opts.get('service_name', None) + if database or service_name: + # if we have a database, then we have a remote host + port = url.port + if port: + port = int(port) + else: + port = 1521 + + if database and service_name: + raise exc.InvalidRequestError( + '"service_name" option shouldn\'t ' + 'be used with a "database" part of the url') + if database: + makedsn_kwargs = {'sid': database} + if service_name: + makedsn_kwargs = {'service_name': service_name} + + dsn = self.dbapi.makedsn(url.host, port, **makedsn_kwargs) + else: + # we have a local tnsname + dsn = url.host + + opts = dict( + user=url.username, + password=url.password, + dsn=dsn, + threaded=self.threaded, + twophase=self.allow_twophase, + ) + + if util.py2k: + if self._cx_oracle_with_unicode: + for k, v in opts.items(): + if isinstance(v, str): + opts[k] = unicode(v) + else: + for k, v in opts.items(): + if isinstance(v, unicode): + opts[k] = str(v) + + if 'mode' in url.query: + opts['mode'] = url.query['mode'] + if isinstance(opts['mode'], util.string_types): + mode = opts['mode'].upper() + if mode == 'SYSDBA': + opts['mode'] = self.dbapi.SYSDBA + elif mode == 'SYSOPER': + opts['mode'] = self.dbapi.SYSOPER + else: + util.coerce_kw_type(opts, 'mode', int) + return ([], opts) + + def _get_server_version_info(self, connection): + return tuple( + int(x) + for x in connection.connection.version.split('.') + ) + + def is_disconnect(self, e, connection, cursor): + error, = e.args + if isinstance(e, self.dbapi.InterfaceError): + return "not connected" in str(e) + elif hasattr(error, 'code'): + # ORA-00028: your session has been killed + # ORA-03114: not connected to ORACLE + # ORA-03113: end-of-file on communication channel + # ORA-03135: connection lost contact + # ORA-01033: ORACLE initialization or shutdown in progress + # ORA-02396: exceeded maximum idle time, please connect again + # TODO: Others ? + return error.code in (28, 3114, 3113, 3135, 1033, 2396) + else: + return False + + def create_xid(self): + """create a two-phase transaction ID. + + this id will be passed to do_begin_twophase(), do_rollback_twophase(), + do_commit_twophase(). its format is unspecified.""" + + id = random.randint(0, 2 ** 128) + return (0x1234, "%032x" % id, "%032x" % 9) + + def do_executemany(self, cursor, statement, parameters, context=None): + if isinstance(parameters, tuple): + parameters = list(parameters) + cursor.executemany(statement, parameters) + + def do_begin_twophase(self, connection, xid): + connection.connection.begin(*xid) + + def do_prepare_twophase(self, connection, xid): + result = connection.connection.prepare() + connection.info['cx_oracle_prepared'] = result + + def do_rollback_twophase(self, connection, xid, is_prepared=True, + recover=False): + self.do_rollback(connection.connection) + + def do_commit_twophase(self, connection, xid, is_prepared=True, + recover=False): + if not is_prepared: + self.do_commit(connection.connection) + else: + oci_prepared = connection.info['cx_oracle_prepared'] + if oci_prepared: + self.do_commit(connection.connection) + + def do_recover_twophase(self, connection): + connection.info.pop('cx_oracle_prepared', None) + +dialect = OracleDialect_cx_oracle diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/oracle/zxjdbc.py b/lib/python3.4/site-packages/sqlalchemy/dialects/oracle/zxjdbc.py new file mode 100644 index 0000000..c3259fe --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/oracle/zxjdbc.py @@ -0,0 +1,235 @@ +# oracle/zxjdbc.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +.. dialect:: oracle+zxjdbc + :name: zxJDBC for Jython + :dbapi: zxjdbc + :connectstring: oracle+zxjdbc://user:pass@host/dbname + :driverurl: http://www.oracle.com/technetwork/database/features/jdbc/index-091264.html + + .. note:: Jython is not supported by current versions of SQLAlchemy. The + zxjdbc dialect should be considered as experimental. + +""" +import decimal +import re + +from sqlalchemy import sql, types as sqltypes, util +from sqlalchemy.connectors.zxJDBC import ZxJDBCConnector +from sqlalchemy.dialects.oracle.base import (OracleCompiler, + OracleDialect, + OracleExecutionContext) +from sqlalchemy.engine import result as _result +from sqlalchemy.sql import expression +import collections + +SQLException = zxJDBC = None + + +class _ZxJDBCDate(sqltypes.Date): + + def result_processor(self, dialect, coltype): + def process(value): + if value is None: + return None + else: + return value.date() + return process + + +class _ZxJDBCNumeric(sqltypes.Numeric): + + def result_processor(self, dialect, coltype): + # XXX: does the dialect return Decimal or not??? + # if it does (in all cases), we could use a None processor as well as + # the to_float generic processor + if self.asdecimal: + def process(value): + if isinstance(value, decimal.Decimal): + return value + else: + return decimal.Decimal(str(value)) + else: + def process(value): + if isinstance(value, decimal.Decimal): + return float(value) + else: + return value + return process + + +class OracleCompiler_zxjdbc(OracleCompiler): + + def returning_clause(self, stmt, returning_cols): + self.returning_cols = list( + expression._select_iterables(returning_cols)) + + # within_columns_clause=False so that labels (foo AS bar) don't render + columns = [self.process(c, within_columns_clause=False) + for c in self.returning_cols] + + if not hasattr(self, 'returning_parameters'): + self.returning_parameters = [] + + binds = [] + for i, col in enumerate(self.returning_cols): + dbtype = col.type.dialect_impl( + self.dialect).get_dbapi_type(self.dialect.dbapi) + self.returning_parameters.append((i + 1, dbtype)) + + bindparam = sql.bindparam( + "ret_%d" % i, value=ReturningParam(dbtype)) + self.binds[bindparam.key] = bindparam + binds.append( + self.bindparam_string(self._truncate_bindparam(bindparam))) + + return 'RETURNING ' + ', '.join(columns) + " INTO " + ", ".join(binds) + + +class OracleExecutionContext_zxjdbc(OracleExecutionContext): + + def pre_exec(self): + if hasattr(self.compiled, 'returning_parameters'): + # prepare a zxJDBC statement so we can grab its underlying + # OraclePreparedStatement's getReturnResultSet later + self.statement = self.cursor.prepare(self.statement) + + def get_result_proxy(self): + if hasattr(self.compiled, 'returning_parameters'): + rrs = None + try: + try: + rrs = self.statement.__statement__.getReturnResultSet() + next(rrs) + except SQLException as sqle: + msg = '%s [SQLCode: %d]' % ( + sqle.getMessage(), sqle.getErrorCode()) + if sqle.getSQLState() is not None: + msg += ' [SQLState: %s]' % sqle.getSQLState() + raise zxJDBC.Error(msg) + else: + row = tuple( + self.cursor.datahandler.getPyObject( + rrs, index, dbtype) + for index, dbtype in + self.compiled.returning_parameters) + return ReturningResultProxy(self, row) + finally: + if rrs is not None: + try: + rrs.close() + except SQLException: + pass + self.statement.close() + + return _result.ResultProxy(self) + + def create_cursor(self): + cursor = self._dbapi_connection.cursor() + cursor.datahandler = self.dialect.DataHandler(cursor.datahandler) + return cursor + + +class ReturningResultProxy(_result.FullyBufferedResultProxy): + + """ResultProxy backed by the RETURNING ResultSet results.""" + + def __init__(self, context, returning_row): + self._returning_row = returning_row + super(ReturningResultProxy, self).__init__(context) + + def _cursor_description(self): + ret = [] + for c in self.context.compiled.returning_cols: + if hasattr(c, 'name'): + ret.append((c.name, c.type)) + else: + ret.append((c.anon_label, c.type)) + return ret + + def _buffer_rows(self): + return collections.deque([self._returning_row]) + + +class ReturningParam(object): + + """A bindparam value representing a RETURNING parameter. + + Specially handled by OracleReturningDataHandler. + """ + + def __init__(self, type): + self.type = type + + def __eq__(self, other): + if isinstance(other, ReturningParam): + return self.type == other.type + return NotImplemented + + def __ne__(self, other): + if isinstance(other, ReturningParam): + return self.type != other.type + return NotImplemented + + def __repr__(self): + kls = self.__class__ + return '<%s.%s object at 0x%x type=%s>' % ( + kls.__module__, kls.__name__, id(self), self.type) + + +class OracleDialect_zxjdbc(ZxJDBCConnector, OracleDialect): + jdbc_db_name = 'oracle' + jdbc_driver_name = 'oracle.jdbc.OracleDriver' + + statement_compiler = OracleCompiler_zxjdbc + execution_ctx_cls = OracleExecutionContext_zxjdbc + + colspecs = util.update_copy( + OracleDialect.colspecs, + { + sqltypes.Date: _ZxJDBCDate, + sqltypes.Numeric: _ZxJDBCNumeric + } + ) + + def __init__(self, *args, **kwargs): + super(OracleDialect_zxjdbc, self).__init__(*args, **kwargs) + global SQLException, zxJDBC + from java.sql import SQLException + from com.ziclix.python.sql import zxJDBC + from com.ziclix.python.sql.handler import OracleDataHandler + + class OracleReturningDataHandler(OracleDataHandler): + """zxJDBC DataHandler that specially handles ReturningParam.""" + + def setJDBCObject(self, statement, index, object, dbtype=None): + if type(object) is ReturningParam: + statement.registerReturnParameter(index, object.type) + elif dbtype is None: + OracleDataHandler.setJDBCObject( + self, statement, index, object) + else: + OracleDataHandler.setJDBCObject( + self, statement, index, object, dbtype) + self.DataHandler = OracleReturningDataHandler + + def initialize(self, connection): + super(OracleDialect_zxjdbc, self).initialize(connection) + self.implicit_returning = \ + connection.connection.driverversion >= '10.2' + + def _create_jdbc_url(self, url): + return 'jdbc:oracle:thin:@%s:%s:%s' % ( + url.host, url.port or 1521, url.database) + + def _get_server_version_info(self, connection): + version = re.search( + r'Release ([\d\.]+)', connection.connection.dbversion).group(1) + return tuple(int(x) for x in version.split('.')) + +dialect = OracleDialect_zxjdbc diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/postgres.py b/lib/python3.4/site-packages/sqlalchemy/dialects/postgres.py new file mode 100644 index 0000000..04d37a2 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/postgres.py @@ -0,0 +1,18 @@ +# dialects/postgres.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +# backwards compat with the old name +from sqlalchemy.util import warn_deprecated + +warn_deprecated( + "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to " + "'postgresql'. The new URL format is " + "postgresql[+driver]://:@/" +) + +from sqlalchemy.dialects.postgresql import * +from sqlalchemy.dialects.postgresql import base diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/__init__.py b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/__init__.py new file mode 100644 index 0000000..006afbd --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/__init__.py @@ -0,0 +1,31 @@ +# postgresql/__init__.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from . import base, psycopg2, pg8000, pypostgresql, zxjdbc, psycopg2cffi + +base.dialect = psycopg2.dialect + +from .base import \ + INTEGER, BIGINT, SMALLINT, VARCHAR, CHAR, TEXT, NUMERIC, FLOAT, REAL, \ + INET, CIDR, UUID, BIT, MACADDR, OID, DOUBLE_PRECISION, TIMESTAMP, TIME, \ + DATE, BYTEA, BOOLEAN, INTERVAL, ARRAY, ENUM, dialect, array, Any, All, \ + TSVECTOR, DropEnumType +from .constraints import ExcludeConstraint +from .hstore import HSTORE, hstore +from .json import JSON, JSONElement, JSONB +from .ranges import INT4RANGE, INT8RANGE, NUMRANGE, DATERANGE, TSRANGE, \ + TSTZRANGE + +__all__ = ( + 'INTEGER', 'BIGINT', 'SMALLINT', 'VARCHAR', 'CHAR', 'TEXT', 'NUMERIC', + 'FLOAT', 'REAL', 'INET', 'CIDR', 'UUID', 'BIT', 'MACADDR', 'OID', + 'DOUBLE_PRECISION', 'TIMESTAMP', 'TIME', 'DATE', 'BYTEA', 'BOOLEAN', + 'INTERVAL', 'ARRAY', 'ENUM', 'dialect', 'Any', 'All', 'array', 'HSTORE', + 'hstore', 'INT4RANGE', 'INT8RANGE', 'NUMRANGE', 'DATERANGE', + 'TSRANGE', 'TSTZRANGE', 'json', 'JSON', 'JSONB', 'JSONElement', + 'DropEnumType' +) diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/base.py b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/base.py new file mode 100644 index 0000000..6a60c22 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/base.py @@ -0,0 +1,2940 @@ +# postgresql/base.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +.. dialect:: postgresql + :name: PostgreSQL + + +Sequences/SERIAL +---------------- + +PostgreSQL supports sequences, and SQLAlchemy uses these as the default means +of creating new primary key values for integer-based primary key columns. When +creating tables, SQLAlchemy will issue the ``SERIAL`` datatype for +integer-based primary key columns, which generates a sequence and server side +default corresponding to the column. + +To specify a specific named sequence to be used for primary key generation, +use the :func:`~sqlalchemy.schema.Sequence` construct:: + + Table('sometable', metadata, + Column('id', Integer, Sequence('some_id_seq'), primary_key=True) + ) + +When SQLAlchemy issues a single INSERT statement, to fulfill the contract of +having the "last insert identifier" available, a RETURNING clause is added to +the INSERT statement which specifies the primary key columns should be +returned after the statement completes. The RETURNING functionality only takes +place if Postgresql 8.2 or later is in use. As a fallback approach, the +sequence, whether specified explicitly or implicitly via ``SERIAL``, is +executed independently beforehand, the returned value to be used in the +subsequent insert. Note that when an +:func:`~sqlalchemy.sql.expression.insert()` construct is executed using +"executemany" semantics, the "last inserted identifier" functionality does not +apply; no RETURNING clause is emitted nor is the sequence pre-executed in this +case. + +To force the usage of RETURNING by default off, specify the flag +``implicit_returning=False`` to :func:`.create_engine`. + +.. _postgresql_isolation_level: + +Transaction Isolation Level +--------------------------- + +All Postgresql dialects support setting of transaction isolation level +both via a dialect-specific parameter :paramref:`.create_engine.isolation_level` +accepted by :func:`.create_engine`, +as well as the ``isolation_level`` argument as passed to +:meth:`.Connection.execution_options`. When using a non-psycopg2 dialect, +this feature works by issuing the command +``SET SESSION CHARACTERISTICS AS TRANSACTION ISOLATION LEVEL `` for +each new connection. + +To set isolation level using :func:`.create_engine`:: + + engine = create_engine( + "postgresql+pg8000://scott:tiger@localhost/test", + isolation_level="READ UNCOMMITTED" + ) + +To set using per-connection execution options:: + + connection = engine.connect() + connection = connection.execution_options( + isolation_level="READ COMMITTED" + ) + +Valid values for ``isolation_level`` include: + +* ``READ COMMITTED`` +* ``READ UNCOMMITTED`` +* ``REPEATABLE READ`` +* ``SERIALIZABLE`` + +The :mod:`~sqlalchemy.dialects.postgresql.psycopg2` and +:mod:`~sqlalchemy.dialects.postgresql.pg8000` dialects also offer the +special level ``AUTOCOMMIT``. + +.. seealso:: + + :ref:`psycopg2_isolation_level` + + :ref:`pg8000_isolation_level` + +.. _postgresql_schema_reflection: + +Remote-Schema Table Introspection and Postgresql search_path +------------------------------------------------------------ + +The Postgresql dialect can reflect tables from any schema. The +:paramref:`.Table.schema` argument, or alternatively the +:paramref:`.MetaData.reflect.schema` argument determines which schema will +be searched for the table or tables. The reflected :class:`.Table` objects +will in all cases retain this ``.schema`` attribute as was specified. +However, with regards to tables which these :class:`.Table` objects refer to +via foreign key constraint, a decision must be made as to how the ``.schema`` +is represented in those remote tables, in the case where that remote +schema name is also a member of the current +`Postgresql search path +`_. + +By default, the Postgresql dialect mimics the behavior encouraged by +Postgresql's own ``pg_get_constraintdef()`` builtin procedure. This function +returns a sample definition for a particular foreign key constraint, +omitting the referenced schema name from that definition when the name is +also in the Postgresql schema search path. The interaction below +illustrates this behavior:: + + test=> CREATE TABLE test_schema.referred(id INTEGER PRIMARY KEY); + CREATE TABLE + test=> CREATE TABLE referring( + test(> id INTEGER PRIMARY KEY, + test(> referred_id INTEGER REFERENCES test_schema.referred(id)); + CREATE TABLE + test=> SET search_path TO public, test_schema; + test=> SELECT pg_catalog.pg_get_constraintdef(r.oid, true) FROM + test-> pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n + test-> ON n.oid = c.relnamespace + test-> JOIN pg_catalog.pg_constraint r ON c.oid = r.conrelid + test-> WHERE c.relname='referring' AND r.contype = 'f' + test-> ; + pg_get_constraintdef + --------------------------------------------------- + FOREIGN KEY (referred_id) REFERENCES referred(id) + (1 row) + +Above, we created a table ``referred`` as a member of the remote schema +``test_schema``, however when we added ``test_schema`` to the +PG ``search_path`` and then asked ``pg_get_constraintdef()`` for the +``FOREIGN KEY`` syntax, ``test_schema`` was not included in the output of +the function. + +On the other hand, if we set the search path back to the typical default +of ``public``:: + + test=> SET search_path TO public; + SET + +The same query against ``pg_get_constraintdef()`` now returns the fully +schema-qualified name for us:: + + test=> SELECT pg_catalog.pg_get_constraintdef(r.oid, true) FROM + test-> pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n + test-> ON n.oid = c.relnamespace + test-> JOIN pg_catalog.pg_constraint r ON c.oid = r.conrelid + test-> WHERE c.relname='referring' AND r.contype = 'f'; + pg_get_constraintdef + --------------------------------------------------------------- + FOREIGN KEY (referred_id) REFERENCES test_schema.referred(id) + (1 row) + +SQLAlchemy will by default use the return value of ``pg_get_constraintdef()`` +in order to determine the remote schema name. That is, if our ``search_path`` +were set to include ``test_schema``, and we invoked a table +reflection process as follows:: + + >>> from sqlalchemy import Table, MetaData, create_engine + >>> engine = create_engine("postgresql://scott:tiger@localhost/test") + >>> with engine.connect() as conn: + ... conn.execute("SET search_path TO test_schema, public") + ... meta = MetaData() + ... referring = Table('referring', meta, + ... autoload=True, autoload_with=conn) + ... + + +The above process would deliver to the :attr:`.MetaData.tables` collection +``referred`` table named **without** the schema:: + + >>> meta.tables['referred'].schema is None + True + +To alter the behavior of reflection such that the referred schema is +maintained regardless of the ``search_path`` setting, use the +``postgresql_ignore_search_path`` option, which can be specified as a +dialect-specific argument to both :class:`.Table` as well as +:meth:`.MetaData.reflect`:: + + >>> with engine.connect() as conn: + ... conn.execute("SET search_path TO test_schema, public") + ... meta = MetaData() + ... referring = Table('referring', meta, autoload=True, + ... autoload_with=conn, + ... postgresql_ignore_search_path=True) + ... + + +We will now have ``test_schema.referred`` stored as schema-qualified:: + + >>> meta.tables['test_schema.referred'].schema + 'test_schema' + +.. sidebar:: Best Practices for Postgresql Schema reflection + + The description of Postgresql schema reflection behavior is complex, and + is the product of many years of dealing with widely varied use cases and + user preferences. But in fact, there's no need to understand any of it if + you just stick to the simplest use pattern: leave the ``search_path`` set + to its default of ``public`` only, never refer to the name ``public`` as + an explicit schema name otherwise, and refer to all other schema names + explicitly when building up a :class:`.Table` object. The options + described here are only for those users who can't, or prefer not to, stay + within these guidelines. + +Note that **in all cases**, the "default" schema is always reflected as +``None``. The "default" schema on Postgresql is that which is returned by the +Postgresql ``current_schema()`` function. On a typical Postgresql +installation, this is the name ``public``. So a table that refers to another +which is in the ``public`` (i.e. default) schema will always have the +``.schema`` attribute set to ``None``. + +.. versionadded:: 0.9.2 Added the ``postgresql_ignore_search_path`` + dialect-level option accepted by :class:`.Table` and + :meth:`.MetaData.reflect`. + + +.. seealso:: + + `The Schema Search Path + `_ + - on the Postgresql website. + +INSERT/UPDATE...RETURNING +------------------------- + +The dialect supports PG 8.2's ``INSERT..RETURNING``, ``UPDATE..RETURNING`` and +``DELETE..RETURNING`` syntaxes. ``INSERT..RETURNING`` is used by default +for single-row INSERT statements in order to fetch newly generated +primary key identifiers. To specify an explicit ``RETURNING`` clause, +use the :meth:`._UpdateBase.returning` method on a per-statement basis:: + + # INSERT..RETURNING + result = table.insert().returning(table.c.col1, table.c.col2).\\ + values(name='foo') + print result.fetchall() + + # UPDATE..RETURNING + result = table.update().returning(table.c.col1, table.c.col2).\\ + where(table.c.name=='foo').values(name='bar') + print result.fetchall() + + # DELETE..RETURNING + result = table.delete().returning(table.c.col1, table.c.col2).\\ + where(table.c.name=='foo') + print result.fetchall() + +.. _postgresql_match: + +Full Text Search +---------------- + +SQLAlchemy makes available the Postgresql ``@@`` operator via the +:meth:`.ColumnElement.match` method on any textual column expression. +On a Postgresql dialect, an expression like the following:: + + select([sometable.c.text.match("search string")]) + +will emit to the database:: + + SELECT text @@ to_tsquery('search string') FROM table + +The Postgresql text search functions such as ``to_tsquery()`` +and ``to_tsvector()`` are available +explicitly using the standard :data:`.func` construct. For example:: + + select([ + func.to_tsvector('fat cats ate rats').match('cat & rat') + ]) + +Emits the equivalent of:: + + SELECT to_tsvector('fat cats ate rats') @@ to_tsquery('cat & rat') + +The :class:`.postgresql.TSVECTOR` type can provide for explicit CAST:: + + from sqlalchemy.dialects.postgresql import TSVECTOR + from sqlalchemy import select, cast + select([cast("some text", TSVECTOR)]) + +produces a statement equivalent to:: + + SELECT CAST('some text' AS TSVECTOR) AS anon_1 + +Full Text Searches in Postgresql are influenced by a combination of: the +PostgresSQL setting of ``default_text_search_config``, the ``regconfig`` used +to build the GIN/GiST indexes, and the ``regconfig`` optionally passed in +during a query. + +When performing a Full Text Search against a column that has a GIN or +GiST index that is already pre-computed (which is common on full text +searches) one may need to explicitly pass in a particular PostgresSQL +``regconfig`` value to ensure the query-planner utilizes the index and does +not re-compute the column on demand. + +In order to provide for this explicit query planning, or to use different +search strategies, the ``match`` method accepts a ``postgresql_regconfig`` +keyword argument:: + + select([mytable.c.id]).where( + mytable.c.title.match('somestring', postgresql_regconfig='english') + ) + +Emits the equivalent of:: + + SELECT mytable.id FROM mytable + WHERE mytable.title @@ to_tsquery('english', 'somestring') + +One can also specifically pass in a `'regconfig'` value to the +``to_tsvector()`` command as the initial argument:: + + select([mytable.c.id]).where( + func.to_tsvector('english', mytable.c.title )\ + .match('somestring', postgresql_regconfig='english') + ) + +produces a statement equivalent to:: + + SELECT mytable.id FROM mytable + WHERE to_tsvector('english', mytable.title) @@ + to_tsquery('english', 'somestring') + +It is recommended that you use the ``EXPLAIN ANALYZE...`` tool from +PostgresSQL to ensure that you are generating queries with SQLAlchemy that +take full advantage of any indexes you may have created for full text search. + +FROM ONLY ... +------------------------ + +The dialect supports PostgreSQL's ONLY keyword for targeting only a particular +table in an inheritance hierarchy. This can be used to produce the +``SELECT ... FROM ONLY``, ``UPDATE ONLY ...``, and ``DELETE FROM ONLY ...`` +syntaxes. It uses SQLAlchemy's hints mechanism:: + + # SELECT ... FROM ONLY ... + result = table.select().with_hint(table, 'ONLY', 'postgresql') + print result.fetchall() + + # UPDATE ONLY ... + table.update(values=dict(foo='bar')).with_hint('ONLY', + dialect_name='postgresql') + + # DELETE FROM ONLY ... + table.delete().with_hint('ONLY', dialect_name='postgresql') + +.. _postgresql_indexes: + +Postgresql-Specific Index Options +--------------------------------- + +Several extensions to the :class:`.Index` construct are available, specific +to the PostgreSQL dialect. + +Partial Indexes +^^^^^^^^^^^^^^^^ + +Partial indexes add criterion to the index definition so that the index is +applied to a subset of rows. These can be specified on :class:`.Index` +using the ``postgresql_where`` keyword argument:: + + Index('my_index', my_table.c.id, postgresql_where=tbl.c.value > 10) + +Operator Classes +^^^^^^^^^^^^^^^^^ + +PostgreSQL allows the specification of an *operator class* for each column of +an index (see +http://www.postgresql.org/docs/8.3/interactive/indexes-opclass.html). +The :class:`.Index` construct allows these to be specified via the +``postgresql_ops`` keyword argument:: + + Index('my_index', my_table.c.id, my_table.c.data, + postgresql_ops={ + 'data': 'text_pattern_ops', + 'id': 'int4_ops' + }) + +.. versionadded:: 0.7.2 + ``postgresql_ops`` keyword argument to :class:`.Index` construct. + +Note that the keys in the ``postgresql_ops`` dictionary are the "key" name of +the :class:`.Column`, i.e. the name used to access it from the ``.c`` +collection of :class:`.Table`, which can be configured to be different than +the actual name of the column as expressed in the database. + +Index Types +^^^^^^^^^^^^ + +PostgreSQL provides several index types: B-Tree, Hash, GiST, and GIN, as well +as the ability for users to create their own (see +http://www.postgresql.org/docs/8.3/static/indexes-types.html). These can be +specified on :class:`.Index` using the ``postgresql_using`` keyword argument:: + + Index('my_index', my_table.c.data, postgresql_using='gin') + +The value passed to the keyword argument will be simply passed through to the +underlying CREATE INDEX command, so it *must* be a valid index type for your +version of PostgreSQL. + +.. _postgresql_index_storage: + +Index Storage Parameters +^^^^^^^^^^^^^^^^^^^^^^^^ + +PostgreSQL allows storage parameters to be set on indexes. The storage +parameters available depend on the index method used by the index. Storage +parameters can be specified on :class:`.Index` using the ``postgresql_with`` +keyword argument:: + + Index('my_index', my_table.c.data, postgresql_with={"fillfactor": 50}) + +.. versionadded:: 1.0.6 + +.. _postgresql_index_concurrently: + +Indexes with CONCURRENTLY +^^^^^^^^^^^^^^^^^^^^^^^^^ + +The Postgresql index option CONCURRENTLY is supported by passing the +flag ``postgresql_concurrently`` to the :class:`.Index` construct:: + + tbl = Table('testtbl', m, Column('data', Integer)) + + idx1 = Index('test_idx1', tbl.c.data, postgresql_concurrently=True) + +The above index construct will render SQL as:: + + CREATE INDEX CONCURRENTLY test_idx1 ON testtbl (data) + +.. versionadded:: 0.9.9 + +.. _postgresql_index_reflection: + +Postgresql Index Reflection +--------------------------- + +The Postgresql database creates a UNIQUE INDEX implicitly whenever the +UNIQUE CONSTRAINT construct is used. When inspecting a table using +:class:`.Inspector`, the :meth:`.Inspector.get_indexes` +and the :meth:`.Inspector.get_unique_constraints` will report on these +two constructs distinctly; in the case of the index, the key +``duplicates_constraint`` will be present in the index entry if it is +detected as mirroring a constraint. When performing reflection using +``Table(..., autoload=True)``, the UNIQUE INDEX is **not** returned +in :attr:`.Table.indexes` when it is detected as mirroring a +:class:`.UniqueConstraint` in the :attr:`.Table.constraints` collection. + +.. versionchanged:: 1.0.0 - :class:`.Table` reflection now includes + :class:`.UniqueConstraint` objects present in the :attr:`.Table.constraints` + collection; the Postgresql backend will no longer include a "mirrored" + :class:`.Index` construct in :attr:`.Table.indexes` if it is detected + as corresponding to a unique constraint. + +Special Reflection Options +-------------------------- + +The :class:`.Inspector` used for the Postgresql backend is an instance +of :class:`.PGInspector`, which offers additional methods:: + + from sqlalchemy import create_engine, inspect + + engine = create_engine("postgresql+psycopg2://localhost/test") + insp = inspect(engine) # will be a PGInspector + + print(insp.get_enums()) + +.. autoclass:: PGInspector + :members: + +.. _postgresql_table_options: + +PostgreSQL Table Options +------------------------- + +Several options for CREATE TABLE are supported directly by the PostgreSQL +dialect in conjunction with the :class:`.Table` construct: + +* ``TABLESPACE``:: + + Table("some_table", metadata, ..., postgresql_tablespace='some_tablespace') + +* ``ON COMMIT``:: + + Table("some_table", metadata, ..., postgresql_on_commit='PRESERVE ROWS') + +* ``WITH OIDS``:: + + Table("some_table", metadata, ..., postgresql_with_oids=True) + +* ``WITHOUT OIDS``:: + + Table("some_table", metadata, ..., postgresql_with_oids=False) + +* ``INHERITS``:: + + Table("some_table", metadata, ..., postgresql_inherits="some_supertable") + + Table("some_table", metadata, ..., postgresql_inherits=("t1", "t2", ...)) + +.. versionadded:: 1.0.0 + +.. seealso:: + + `Postgresql CREATE TABLE options + `_ + +ENUM Types +---------- + +Postgresql has an independently creatable TYPE structure which is used +to implement an enumerated type. This approach introduces significant +complexity on the SQLAlchemy side in terms of when this type should be +CREATED and DROPPED. The type object is also an independently reflectable +entity. The following sections should be consulted: + +* :class:`.postgresql.ENUM` - DDL and typing support for ENUM. + +* :meth:`.PGInspector.get_enums` - retrieve a listing of current ENUM types + +* :meth:`.postgresql.ENUM.create` , :meth:`.postgresql.ENUM.drop` - individual + CREATE and DROP commands for ENUM. + +.. _postgresql_array_of_enum: + +Using ENUM with ARRAY +^^^^^^^^^^^^^^^^^^^^^ + +The combination of ENUM and ARRAY is not directly supported by backend +DBAPIs at this time. In order to send and receive an ARRAY of ENUM, +use the following workaround type:: + + class ArrayOfEnum(ARRAY): + + def bind_expression(self, bindvalue): + return sa.cast(bindvalue, self) + + def result_processor(self, dialect, coltype): + super_rp = super(ArrayOfEnum, self).result_processor( + dialect, coltype) + + def handle_raw_string(value): + inner = re.match(r"^{(.*)}$", value).group(1) + return inner.split(",") if inner else [] + + def process(value): + if value is None: + return None + return super_rp(handle_raw_string(value)) + return process + +E.g.:: + + Table( + 'mydata', metadata, + Column('id', Integer, primary_key=True), + Column('data', ArrayOfEnum(ENUM('a', 'b, 'c', name='myenum'))) + + ) + +This type is not included as a built-in type as it would be incompatible +with a DBAPI that suddenly decides to support ARRAY of ENUM directly in +a new version. + +""" +from collections import defaultdict +import re +import datetime as dt + + +from ... import sql, schema, exc, util +from ...engine import default, reflection +from ...sql import compiler, expression, operators, default_comparator +from ... import types as sqltypes + +try: + from uuid import UUID as _python_UUID +except ImportError: + _python_UUID = None + +from sqlalchemy.types import INTEGER, BIGINT, SMALLINT, VARCHAR, \ + CHAR, TEXT, FLOAT, NUMERIC, \ + DATE, BOOLEAN, REAL + +RESERVED_WORDS = set( + ["all", "analyse", "analyze", "and", "any", "array", "as", "asc", + "asymmetric", "both", "case", "cast", "check", "collate", "column", + "constraint", "create", "current_catalog", "current_date", + "current_role", "current_time", "current_timestamp", "current_user", + "default", "deferrable", "desc", "distinct", "do", "else", "end", + "except", "false", "fetch", "for", "foreign", "from", "grant", "group", + "having", "in", "initially", "intersect", "into", "leading", "limit", + "localtime", "localtimestamp", "new", "not", "null", "of", "off", + "offset", "old", "on", "only", "or", "order", "placing", "primary", + "references", "returning", "select", "session_user", "some", "symmetric", + "table", "then", "to", "trailing", "true", "union", "unique", "user", + "using", "variadic", "when", "where", "window", "with", "authorization", + "between", "binary", "cross", "current_schema", "freeze", "full", + "ilike", "inner", "is", "isnull", "join", "left", "like", "natural", + "notnull", "outer", "over", "overlaps", "right", "similar", "verbose" + ]) + +_DECIMAL_TYPES = (1231, 1700) +_FLOAT_TYPES = (700, 701, 1021, 1022) +_INT_TYPES = (20, 21, 23, 26, 1005, 1007, 1016) + + +class BYTEA(sqltypes.LargeBinary): + __visit_name__ = 'BYTEA' + + +class DOUBLE_PRECISION(sqltypes.Float): + __visit_name__ = 'DOUBLE_PRECISION' + + +class INET(sqltypes.TypeEngine): + __visit_name__ = "INET" +PGInet = INET + + +class CIDR(sqltypes.TypeEngine): + __visit_name__ = "CIDR" +PGCidr = CIDR + + +class MACADDR(sqltypes.TypeEngine): + __visit_name__ = "MACADDR" +PGMacAddr = MACADDR + + +class OID(sqltypes.TypeEngine): + + """Provide the Postgresql OID type. + + .. versionadded:: 0.9.5 + + """ + __visit_name__ = "OID" + + +class TIMESTAMP(sqltypes.TIMESTAMP): + + def __init__(self, timezone=False, precision=None): + super(TIMESTAMP, self).__init__(timezone=timezone) + self.precision = precision + + +class TIME(sqltypes.TIME): + + def __init__(self, timezone=False, precision=None): + super(TIME, self).__init__(timezone=timezone) + self.precision = precision + + +class INTERVAL(sqltypes.TypeEngine): + + """Postgresql INTERVAL type. + + The INTERVAL type may not be supported on all DBAPIs. + It is known to work on psycopg2 and not pg8000 or zxjdbc. + + """ + __visit_name__ = 'INTERVAL' + + def __init__(self, precision=None): + self.precision = precision + + @classmethod + def _adapt_from_generic_interval(cls, interval): + return INTERVAL(precision=interval.second_precision) + + @property + def _type_affinity(self): + return sqltypes.Interval + + @property + def python_type(self): + return dt.timedelta + +PGInterval = INTERVAL + + +class BIT(sqltypes.TypeEngine): + __visit_name__ = 'BIT' + + def __init__(self, length=None, varying=False): + if not varying: + # BIT without VARYING defaults to length 1 + self.length = length or 1 + else: + # but BIT VARYING can be unlimited-length, so no default + self.length = length + self.varying = varying + +PGBit = BIT + + +class UUID(sqltypes.TypeEngine): + + """Postgresql UUID type. + + Represents the UUID column type, interpreting + data either as natively returned by the DBAPI + or as Python uuid objects. + + The UUID type may not be supported on all DBAPIs. + It is known to work on psycopg2 and not pg8000. + + """ + __visit_name__ = 'UUID' + + def __init__(self, as_uuid=False): + """Construct a UUID type. + + + :param as_uuid=False: if True, values will be interpreted + as Python uuid objects, converting to/from string via the + DBAPI. + + """ + if as_uuid and _python_UUID is None: + raise NotImplementedError( + "This version of Python does not support " + "the native UUID type." + ) + self.as_uuid = as_uuid + + def bind_processor(self, dialect): + if self.as_uuid: + def process(value): + if value is not None: + value = util.text_type(value) + return value + return process + else: + return None + + def result_processor(self, dialect, coltype): + if self.as_uuid: + def process(value): + if value is not None: + value = _python_UUID(value) + return value + return process + else: + return None + +PGUuid = UUID + + +class TSVECTOR(sqltypes.TypeEngine): + + """The :class:`.postgresql.TSVECTOR` type implements the Postgresql + text search type TSVECTOR. + + It can be used to do full text queries on natural language + documents. + + .. versionadded:: 0.9.0 + + .. seealso:: + + :ref:`postgresql_match` + + """ + __visit_name__ = 'TSVECTOR' + + +class _Slice(expression.ColumnElement): + __visit_name__ = 'slice' + type = sqltypes.NULLTYPE + + def __init__(self, slice_, source_comparator): + self.start = default_comparator._check_literal( + source_comparator.expr, + operators.getitem, slice_.start) + self.stop = default_comparator._check_literal( + source_comparator.expr, + operators.getitem, slice_.stop) + + +class Any(expression.ColumnElement): + + """Represent the clause ``left operator ANY (right)``. ``right`` must be + an array expression. + + .. seealso:: + + :class:`.postgresql.ARRAY` + + :meth:`.postgresql.ARRAY.Comparator.any` - ARRAY-bound method + + """ + __visit_name__ = 'any' + + def __init__(self, left, right, operator=operators.eq): + self.type = sqltypes.Boolean() + self.left = expression._literal_as_binds(left) + self.right = right + self.operator = operator + + +class All(expression.ColumnElement): + + """Represent the clause ``left operator ALL (right)``. ``right`` must be + an array expression. + + .. seealso:: + + :class:`.postgresql.ARRAY` + + :meth:`.postgresql.ARRAY.Comparator.all` - ARRAY-bound method + + """ + __visit_name__ = 'all' + + def __init__(self, left, right, operator=operators.eq): + self.type = sqltypes.Boolean() + self.left = expression._literal_as_binds(left) + self.right = right + self.operator = operator + + +class array(expression.Tuple): + + """A Postgresql ARRAY literal. + + This is used to produce ARRAY literals in SQL expressions, e.g.:: + + from sqlalchemy.dialects.postgresql import array + from sqlalchemy.dialects import postgresql + from sqlalchemy import select, func + + stmt = select([ + array([1,2]) + array([3,4,5]) + ]) + + print stmt.compile(dialect=postgresql.dialect()) + + Produces the SQL:: + + SELECT ARRAY[%(param_1)s, %(param_2)s] || + ARRAY[%(param_3)s, %(param_4)s, %(param_5)s]) AS anon_1 + + An instance of :class:`.array` will always have the datatype + :class:`.ARRAY`. The "inner" type of the array is inferred from + the values present, unless the ``type_`` keyword argument is passed:: + + array(['foo', 'bar'], type_=CHAR) + + .. versionadded:: 0.8 Added the :class:`~.postgresql.array` literal type. + + See also: + + :class:`.postgresql.ARRAY` + + """ + __visit_name__ = 'array' + + def __init__(self, clauses, **kw): + super(array, self).__init__(*clauses, **kw) + self.type = ARRAY(self.type) + + def _bind_param(self, operator, obj): + return array([ + expression.BindParameter(None, o, _compared_to_operator=operator, + _compared_to_type=self.type, unique=True) + for o in obj + ]) + + def self_group(self, against=None): + return self + + +class ARRAY(sqltypes.Concatenable, sqltypes.TypeEngine): + + """Postgresql ARRAY type. + + Represents values as Python lists. + + An :class:`.ARRAY` type is constructed given the "type" + of element:: + + mytable = Table("mytable", metadata, + Column("data", ARRAY(Integer)) + ) + + The above type represents an N-dimensional array, + meaning Postgresql will interpret values with any number + of dimensions automatically. To produce an INSERT + construct that passes in a 1-dimensional array of integers:: + + connection.execute( + mytable.insert(), + data=[1,2,3] + ) + + The :class:`.ARRAY` type can be constructed given a fixed number + of dimensions:: + + mytable = Table("mytable", metadata, + Column("data", ARRAY(Integer, dimensions=2)) + ) + + This has the effect of the :class:`.ARRAY` type + specifying that number of bracketed blocks when a :class:`.Table` + is used in a CREATE TABLE statement, or when the type is used + within a :func:`.expression.cast` construct; it also causes + the bind parameter and result set processing of the type + to optimize itself to expect exactly that number of dimensions. + Note that Postgresql itself still allows N dimensions with such a type. + + SQL expressions of type :class:`.ARRAY` have support for "index" and + "slice" behavior. The Python ``[]`` operator works normally here, given + integer indexes or slices. Note that Postgresql arrays default + to 1-based indexing. The operator produces binary expression + constructs which will produce the appropriate SQL, both for + SELECT statements:: + + select([mytable.c.data[5], mytable.c.data[2:7]]) + + as well as UPDATE statements when the :meth:`.Update.values` method + is used:: + + mytable.update().values({ + mytable.c.data[5]: 7, + mytable.c.data[2:7]: [1, 2, 3] + }) + + .. note:: + + Multi-dimensional support for the ``[]`` operator is not supported + in SQLAlchemy 1.0. Please use the :func:`.type_coerce` function + to cast an intermediary expression to ARRAY again as a workaround:: + + expr = type_coerce(my_array_column[5], ARRAY(Integer))[6] + + Multi-dimensional support will be provided in a future release. + + :class:`.ARRAY` provides special methods for containment operations, + e.g.:: + + mytable.c.data.contains([1, 2]) + + For a full list of special methods see :class:`.ARRAY.Comparator`. + + .. versionadded:: 0.8 Added support for index and slice operations + to the :class:`.ARRAY` type, including support for UPDATE + statements, and special array containment operations. + + The :class:`.ARRAY` type may not be supported on all DBAPIs. + It is known to work on psycopg2 and not pg8000. + + Additionally, the :class:`.ARRAY` type does not work directly in + conjunction with the :class:`.ENUM` type. For a workaround, see the + special type at :ref:`postgresql_array_of_enum`. + + See also: + + :class:`.postgresql.array` - produce a literal array value. + + """ + __visit_name__ = 'ARRAY' + + class Comparator(sqltypes.Concatenable.Comparator): + + """Define comparison operations for :class:`.ARRAY`.""" + + def __getitem__(self, index): + shift_indexes = 1 if self.expr.type.zero_indexes else 0 + if isinstance(index, slice): + if shift_indexes: + index = slice( + index.start + shift_indexes, + index.stop + shift_indexes, + index.step + ) + index = _Slice(index, self) + return_type = self.type + else: + index += shift_indexes + return_type = self.type.item_type + + return default_comparator._binary_operate( + self.expr, operators.getitem, index, + result_type=return_type) + + def any(self, other, operator=operators.eq): + """Return ``other operator ANY (array)`` clause. + + Argument places are switched, because ANY requires array + expression to be on the right hand-side. + + E.g.:: + + from sqlalchemy.sql import operators + + conn.execute( + select([table.c.data]).where( + table.c.data.any(7, operator=operators.lt) + ) + ) + + :param other: expression to be compared + :param operator: an operator object from the + :mod:`sqlalchemy.sql.operators` + package, defaults to :func:`.operators.eq`. + + .. seealso:: + + :class:`.postgresql.Any` + + :meth:`.postgresql.ARRAY.Comparator.all` + + """ + return Any(other, self.expr, operator=operator) + + def all(self, other, operator=operators.eq): + """Return ``other operator ALL (array)`` clause. + + Argument places are switched, because ALL requires array + expression to be on the right hand-side. + + E.g.:: + + from sqlalchemy.sql import operators + + conn.execute( + select([table.c.data]).where( + table.c.data.all(7, operator=operators.lt) + ) + ) + + :param other: expression to be compared + :param operator: an operator object from the + :mod:`sqlalchemy.sql.operators` + package, defaults to :func:`.operators.eq`. + + .. seealso:: + + :class:`.postgresql.All` + + :meth:`.postgresql.ARRAY.Comparator.any` + + """ + return All(other, self.expr, operator=operator) + + def contains(self, other, **kwargs): + """Boolean expression. Test if elements are a superset of the + elements of the argument array expression. + """ + return self.expr.op('@>')(other) + + def contained_by(self, other): + """Boolean expression. Test if elements are a proper subset of the + elements of the argument array expression. + """ + return self.expr.op('<@')(other) + + def overlap(self, other): + """Boolean expression. Test if array has elements in common with + an argument array expression. + """ + return self.expr.op('&&')(other) + + def _adapt_expression(self, op, other_comparator): + if isinstance(op, operators.custom_op): + if op.opstring in ['@>', '<@', '&&']: + return op, sqltypes.Boolean + return sqltypes.Concatenable.Comparator.\ + _adapt_expression(self, op, other_comparator) + + comparator_factory = Comparator + + def __init__(self, item_type, as_tuple=False, dimensions=None, + zero_indexes=False): + """Construct an ARRAY. + + E.g.:: + + Column('myarray', ARRAY(Integer)) + + Arguments are: + + :param item_type: The data type of items of this array. Note that + dimensionality is irrelevant here, so multi-dimensional arrays like + ``INTEGER[][]``, are constructed as ``ARRAY(Integer)``, not as + ``ARRAY(ARRAY(Integer))`` or such. + + :param as_tuple=False: Specify whether return results + should be converted to tuples from lists. DBAPIs such + as psycopg2 return lists by default. When tuples are + returned, the results are hashable. + + :param dimensions: if non-None, the ARRAY will assume a fixed + number of dimensions. This will cause the DDL emitted for this + ARRAY to include the exact number of bracket clauses ``[]``, + and will also optimize the performance of the type overall. + Note that PG arrays are always implicitly "non-dimensioned", + meaning they can store any number of dimensions no matter how + they were declared. + + :param zero_indexes=False: when True, index values will be converted + between Python zero-based and Postgresql one-based indexes, e.g. + a value of one will be added to all index values before passing + to the database. + + .. versionadded:: 0.9.5 + + """ + if isinstance(item_type, ARRAY): + raise ValueError("Do not nest ARRAY types; ARRAY(basetype) " + "handles multi-dimensional arrays of basetype") + if isinstance(item_type, type): + item_type = item_type() + self.item_type = item_type + self.as_tuple = as_tuple + self.dimensions = dimensions + self.zero_indexes = zero_indexes + + @property + def python_type(self): + return list + + def compare_values(self, x, y): + return x == y + + def _proc_array(self, arr, itemproc, dim, collection): + if dim is None: + arr = list(arr) + if dim == 1 or dim is None and ( + # this has to be (list, tuple), or at least + # not hasattr('__iter__'), since Py3K strings + # etc. have __iter__ + not arr or not isinstance(arr[0], (list, tuple))): + if itemproc: + return collection(itemproc(x) for x in arr) + else: + return collection(arr) + else: + return collection( + self._proc_array( + x, itemproc, + dim - 1 if dim is not None else None, + collection) + for x in arr + ) + + def bind_processor(self, dialect): + item_proc = self.item_type.\ + dialect_impl(dialect).\ + bind_processor(dialect) + + def process(value): + if value is None: + return value + else: + return self._proc_array( + value, + item_proc, + self.dimensions, + list) + return process + + def result_processor(self, dialect, coltype): + item_proc = self.item_type.\ + dialect_impl(dialect).\ + result_processor(dialect, coltype) + + def process(value): + if value is None: + return value + else: + return self._proc_array( + value, + item_proc, + self.dimensions, + tuple if self.as_tuple else list) + return process + +PGArray = ARRAY + + +class ENUM(sqltypes.Enum): + + """Postgresql ENUM type. + + This is a subclass of :class:`.types.Enum` which includes + support for PG's ``CREATE TYPE`` and ``DROP TYPE``. + + When the builtin type :class:`.types.Enum` is used and the + :paramref:`.Enum.native_enum` flag is left at its default of + True, the Postgresql backend will use a :class:`.postgresql.ENUM` + type as the implementation, so the special create/drop rules + will be used. + + The create/drop behavior of ENUM is necessarily intricate, due to the + awkward relationship the ENUM type has in relationship to the + parent table, in that it may be "owned" by just a single table, or + may be shared among many tables. + + When using :class:`.types.Enum` or :class:`.postgresql.ENUM` + in an "inline" fashion, the ``CREATE TYPE`` and ``DROP TYPE`` is emitted + corresponding to when the :meth:`.Table.create` and :meth:`.Table.drop` + methods are called:: + + table = Table('sometable', metadata, + Column('some_enum', ENUM('a', 'b', 'c', name='myenum')) + ) + + table.create(engine) # will emit CREATE ENUM and CREATE TABLE + table.drop(engine) # will emit DROP TABLE and DROP ENUM + + To use a common enumerated type between multiple tables, the best + practice is to declare the :class:`.types.Enum` or + :class:`.postgresql.ENUM` independently, and associate it with the + :class:`.MetaData` object itself:: + + my_enum = ENUM('a', 'b', 'c', name='myenum', metadata=metadata) + + t1 = Table('sometable_one', metadata, + Column('some_enum', myenum) + ) + + t2 = Table('sometable_two', metadata, + Column('some_enum', myenum) + ) + + When this pattern is used, care must still be taken at the level + of individual table creates. Emitting CREATE TABLE without also + specifying ``checkfirst=True`` will still cause issues:: + + t1.create(engine) # will fail: no such type 'myenum' + + If we specify ``checkfirst=True``, the individual table-level create + operation will check for the ``ENUM`` and create if not exists:: + + # will check if enum exists, and emit CREATE TYPE if not + t1.create(engine, checkfirst=True) + + When using a metadata-level ENUM type, the type will always be created + and dropped if either the metadata-wide create/drop is called:: + + metadata.create_all(engine) # will emit CREATE TYPE + metadata.drop_all(engine) # will emit DROP TYPE + + The type can also be created and dropped directly:: + + my_enum.create(engine) + my_enum.drop(engine) + + .. versionchanged:: 1.0.0 The Postgresql :class:`.postgresql.ENUM` type + now behaves more strictly with regards to CREATE/DROP. A metadata-level + ENUM type will only be created and dropped at the metadata level, + not the table level, with the exception of + ``table.create(checkfirst=True)``. + The ``table.drop()`` call will now emit a DROP TYPE for a table-level + enumerated type. + + """ + + def __init__(self, *enums, **kw): + """Construct an :class:`~.postgresql.ENUM`. + + Arguments are the same as that of + :class:`.types.Enum`, but also including + the following parameters. + + :param create_type: Defaults to True. + Indicates that ``CREATE TYPE`` should be + emitted, after optionally checking for the + presence of the type, when the parent + table is being created; and additionally + that ``DROP TYPE`` is called when the table + is dropped. When ``False``, no check + will be performed and no ``CREATE TYPE`` + or ``DROP TYPE`` is emitted, unless + :meth:`~.postgresql.ENUM.create` + or :meth:`~.postgresql.ENUM.drop` + are called directly. + Setting to ``False`` is helpful + when invoking a creation scheme to a SQL file + without access to the actual database - + the :meth:`~.postgresql.ENUM.create` and + :meth:`~.postgresql.ENUM.drop` methods can + be used to emit SQL to a target bind. + + .. versionadded:: 0.7.4 + + """ + self.create_type = kw.pop("create_type", True) + super(ENUM, self).__init__(*enums, **kw) + + def create(self, bind=None, checkfirst=True): + """Emit ``CREATE TYPE`` for this + :class:`~.postgresql.ENUM`. + + If the underlying dialect does not support + Postgresql CREATE TYPE, no action is taken. + + :param bind: a connectable :class:`.Engine`, + :class:`.Connection`, or similar object to emit + SQL. + :param checkfirst: if ``True``, a query against + the PG catalog will be first performed to see + if the type does not exist already before + creating. + + """ + if not bind.dialect.supports_native_enum: + return + + if not checkfirst or \ + not bind.dialect.has_type( + bind, self.name, schema=self.schema): + bind.execute(CreateEnumType(self)) + + def drop(self, bind=None, checkfirst=True): + """Emit ``DROP TYPE`` for this + :class:`~.postgresql.ENUM`. + + If the underlying dialect does not support + Postgresql DROP TYPE, no action is taken. + + :param bind: a connectable :class:`.Engine`, + :class:`.Connection`, or similar object to emit + SQL. + :param checkfirst: if ``True``, a query against + the PG catalog will be first performed to see + if the type actually exists before dropping. + + """ + if not bind.dialect.supports_native_enum: + return + + if not checkfirst or \ + bind.dialect.has_type(bind, self.name, schema=self.schema): + bind.execute(DropEnumType(self)) + + def _check_for_name_in_memos(self, checkfirst, kw): + """Look in the 'ddl runner' for 'memos', then + note our name in that collection. + + This to ensure a particular named enum is operated + upon only once within any kind of create/drop + sequence without relying upon "checkfirst". + + """ + if not self.create_type: + return True + if '_ddl_runner' in kw: + ddl_runner = kw['_ddl_runner'] + if '_pg_enums' in ddl_runner.memo: + pg_enums = ddl_runner.memo['_pg_enums'] + else: + pg_enums = ddl_runner.memo['_pg_enums'] = set() + present = self.name in pg_enums + pg_enums.add(self.name) + return present + else: + return False + + def _on_table_create(self, target, bind, checkfirst, **kw): + if checkfirst or ( + not self.metadata and + not kw.get('_is_metadata_operation', False)) and \ + not self._check_for_name_in_memos(checkfirst, kw): + self.create(bind=bind, checkfirst=checkfirst) + + def _on_table_drop(self, target, bind, checkfirst, **kw): + if not self.metadata and \ + not kw.get('_is_metadata_operation', False) and \ + not self._check_for_name_in_memos(checkfirst, kw): + self.drop(bind=bind, checkfirst=checkfirst) + + def _on_metadata_create(self, target, bind, checkfirst, **kw): + if not self._check_for_name_in_memos(checkfirst, kw): + self.create(bind=bind, checkfirst=checkfirst) + + def _on_metadata_drop(self, target, bind, checkfirst, **kw): + if not self._check_for_name_in_memos(checkfirst, kw): + self.drop(bind=bind, checkfirst=checkfirst) + +colspecs = { + sqltypes.Interval: INTERVAL, + sqltypes.Enum: ENUM, +} + +ischema_names = { + 'integer': INTEGER, + 'bigint': BIGINT, + 'smallint': SMALLINT, + 'character varying': VARCHAR, + 'character': CHAR, + '"char"': sqltypes.String, + 'name': sqltypes.String, + 'text': TEXT, + 'numeric': NUMERIC, + 'float': FLOAT, + 'real': REAL, + 'inet': INET, + 'cidr': CIDR, + 'uuid': UUID, + 'bit': BIT, + 'bit varying': BIT, + 'macaddr': MACADDR, + 'oid': OID, + 'double precision': DOUBLE_PRECISION, + 'timestamp': TIMESTAMP, + 'timestamp with time zone': TIMESTAMP, + 'timestamp without time zone': TIMESTAMP, + 'time with time zone': TIME, + 'time without time zone': TIME, + 'date': DATE, + 'time': TIME, + 'bytea': BYTEA, + 'boolean': BOOLEAN, + 'interval': INTERVAL, + 'interval year to month': INTERVAL, + 'interval day to second': INTERVAL, + 'tsvector': TSVECTOR +} + + +class PGCompiler(compiler.SQLCompiler): + + def visit_array(self, element, **kw): + return "ARRAY[%s]" % self.visit_clauselist(element, **kw) + + def visit_slice(self, element, **kw): + return "%s:%s" % ( + self.process(element.start, **kw), + self.process(element.stop, **kw), + ) + + def visit_any(self, element, **kw): + return "%s%sANY (%s)" % ( + self.process(element.left, **kw), + compiler.OPERATORS[element.operator], + self.process(element.right, **kw) + ) + + def visit_all(self, element, **kw): + return "%s%sALL (%s)" % ( + self.process(element.left, **kw), + compiler.OPERATORS[element.operator], + self.process(element.right, **kw) + ) + + def visit_getitem_binary(self, binary, operator, **kw): + return "%s[%s]" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw) + ) + + def visit_match_op_binary(self, binary, operator, **kw): + if "postgresql_regconfig" in binary.modifiers: + regconfig = self.render_literal_value( + binary.modifiers['postgresql_regconfig'], + sqltypes.STRINGTYPE) + if regconfig: + return "%s @@ to_tsquery(%s, %s)" % ( + self.process(binary.left, **kw), + regconfig, + self.process(binary.right, **kw) + ) + return "%s @@ to_tsquery(%s)" % ( + self.process(binary.left, **kw), + self.process(binary.right, **kw) + ) + + def visit_ilike_op_binary(self, binary, operator, **kw): + escape = binary.modifiers.get("escape", None) + + return '%s ILIKE %s' % \ + (self.process(binary.left, **kw), + self.process(binary.right, **kw)) \ + + ( + ' ESCAPE ' + + self.render_literal_value(escape, sqltypes.STRINGTYPE) + if escape else '' + ) + + def visit_notilike_op_binary(self, binary, operator, **kw): + escape = binary.modifiers.get("escape", None) + return '%s NOT ILIKE %s' % \ + (self.process(binary.left, **kw), + self.process(binary.right, **kw)) \ + + ( + ' ESCAPE ' + + self.render_literal_value(escape, sqltypes.STRINGTYPE) + if escape else '' + ) + + def render_literal_value(self, value, type_): + value = super(PGCompiler, self).render_literal_value(value, type_) + + if self.dialect._backslash_escapes: + value = value.replace('\\', '\\\\') + return value + + def visit_sequence(self, seq): + return "nextval('%s')" % self.preparer.format_sequence(seq) + + def limit_clause(self, select, **kw): + text = "" + if select._limit_clause is not None: + text += " \n LIMIT " + self.process(select._limit_clause, **kw) + if select._offset_clause is not None: + if select._limit_clause is None: + text += " \n LIMIT ALL" + text += " OFFSET " + self.process(select._offset_clause, **kw) + return text + + def format_from_hint_text(self, sqltext, table, hint, iscrud): + if hint.upper() != 'ONLY': + raise exc.CompileError("Unrecognized hint: %r" % hint) + return "ONLY " + sqltext + + def get_select_precolumns(self, select, **kw): + if select._distinct is not False: + if select._distinct is True: + return "DISTINCT " + elif isinstance(select._distinct, (list, tuple)): + return "DISTINCT ON (" + ', '.join( + [self.process(col) for col in select._distinct] + ) + ") " + else: + return "DISTINCT ON (" + \ + self.process(select._distinct, **kw) + ") " + else: + return "" + + def for_update_clause(self, select, **kw): + + if select._for_update_arg.read: + tmp = " FOR SHARE" + else: + tmp = " FOR UPDATE" + + if select._for_update_arg.of: + tables = util.OrderedSet( + c.table if isinstance(c, expression.ColumnClause) + else c for c in select._for_update_arg.of) + tmp += " OF " + ", ".join( + self.process(table, ashint=True, use_schema=False, **kw) + for table in tables + ) + + if select._for_update_arg.nowait: + tmp += " NOWAIT" + + return tmp + + def returning_clause(self, stmt, returning_cols): + + columns = [ + self._label_select_column(None, c, True, False, {}) + for c in expression._select_iterables(returning_cols) + ] + + return 'RETURNING ' + ', '.join(columns) + + def visit_substring_func(self, func, **kw): + s = self.process(func.clauses.clauses[0], **kw) + start = self.process(func.clauses.clauses[1], **kw) + if len(func.clauses.clauses) > 2: + length = self.process(func.clauses.clauses[2], **kw) + return "SUBSTRING(%s FROM %s FOR %s)" % (s, start, length) + else: + return "SUBSTRING(%s FROM %s)" % (s, start) + + +class PGDDLCompiler(compiler.DDLCompiler): + + def get_column_specification(self, column, **kwargs): + + colspec = self.preparer.format_column(column) + impl_type = column.type.dialect_impl(self.dialect) + if column.primary_key and \ + column is column.table._autoincrement_column and \ + ( + self.dialect.supports_smallserial or + not isinstance(impl_type, sqltypes.SmallInteger) + ) and ( + column.default is None or + ( + isinstance(column.default, schema.Sequence) and + column.default.optional + )): + if isinstance(impl_type, sqltypes.BigInteger): + colspec += " BIGSERIAL" + elif isinstance(impl_type, sqltypes.SmallInteger): + colspec += " SMALLSERIAL" + else: + colspec += " SERIAL" + else: + colspec += " " + self.dialect.type_compiler.process(column.type, + type_expression=column) + default = self.get_column_default_string(column) + if default is not None: + colspec += " DEFAULT " + default + + if not column.nullable: + colspec += " NOT NULL" + return colspec + + def visit_create_enum_type(self, create): + type_ = create.element + + return "CREATE TYPE %s AS ENUM (%s)" % ( + self.preparer.format_type(type_), + ", ".join( + self.sql_compiler.process(sql.literal(e), literal_binds=True) + for e in type_.enums) + ) + + def visit_drop_enum_type(self, drop): + type_ = drop.element + + return "DROP TYPE %s" % ( + self.preparer.format_type(type_) + ) + + def visit_create_index(self, create): + preparer = self.preparer + index = create.element + self._verify_index_table(index) + text = "CREATE " + if index.unique: + text += "UNIQUE " + text += "INDEX " + + concurrently = index.dialect_options['postgresql']['concurrently'] + if concurrently: + text += "CONCURRENTLY " + + text += "%s ON %s " % ( + self._prepared_index_name(index, + include_schema=False), + preparer.format_table(index.table) + ) + + using = index.dialect_options['postgresql']['using'] + if using: + text += "USING %s " % preparer.quote(using) + + ops = index.dialect_options["postgresql"]["ops"] + text += "(%s)" \ + % ( + ', '.join([ + self.sql_compiler.process( + expr.self_group() + if not isinstance(expr, expression.ColumnClause) + else expr, + include_table=False, literal_binds=True) + + ( + (' ' + ops[expr.key]) + if hasattr(expr, 'key') + and expr.key in ops else '' + ) + for expr in index.expressions + ]) + ) + + withclause = index.dialect_options['postgresql']['with'] + + if withclause: + text += " WITH (%s)" % (', '.join( + ['%s = %s' % storage_parameter + for storage_parameter in withclause.items()])) + + whereclause = index.dialect_options["postgresql"]["where"] + + if whereclause is not None: + where_compiled = self.sql_compiler.process( + whereclause, include_table=False, + literal_binds=True) + text += " WHERE " + where_compiled + return text + + def visit_exclude_constraint(self, constraint, **kw): + text = "" + if constraint.name is not None: + text += "CONSTRAINT %s " % \ + self.preparer.format_constraint(constraint) + elements = [] + for expr, name, op in constraint._render_exprs: + kw['include_table'] = False + elements.append( + "%s WITH %s" % (self.sql_compiler.process(expr, **kw), op) + ) + text += "EXCLUDE USING %s (%s)" % (constraint.using, + ', '.join(elements)) + if constraint.where is not None: + text += ' WHERE (%s)' % self.sql_compiler.process( + constraint.where, + literal_binds=True) + text += self.define_constraint_deferrability(constraint) + return text + + def post_create_table(self, table): + table_opts = [] + pg_opts = table.dialect_options['postgresql'] + + inherits = pg_opts.get('inherits') + if inherits is not None: + if not isinstance(inherits, (list, tuple)): + inherits = (inherits, ) + table_opts.append( + '\n INHERITS ( ' + + ', '.join(self.preparer.quote(name) for name in inherits) + + ' )') + + if pg_opts['with_oids'] is True: + table_opts.append('\n WITH OIDS') + elif pg_opts['with_oids'] is False: + table_opts.append('\n WITHOUT OIDS') + + if pg_opts['on_commit']: + on_commit_options = pg_opts['on_commit'].replace("_", " ").upper() + table_opts.append('\n ON COMMIT %s' % on_commit_options) + + if pg_opts['tablespace']: + tablespace_name = pg_opts['tablespace'] + table_opts.append( + '\n TABLESPACE %s' % self.preparer.quote(tablespace_name) + ) + + return ''.join(table_opts) + + +class PGTypeCompiler(compiler.GenericTypeCompiler): + def visit_TSVECTOR(self, type, **kw): + return "TSVECTOR" + + def visit_INET(self, type_, **kw): + return "INET" + + def visit_CIDR(self, type_, **kw): + return "CIDR" + + def visit_MACADDR(self, type_, **kw): + return "MACADDR" + + def visit_OID(self, type_, **kw): + return "OID" + + def visit_FLOAT(self, type_, **kw): + if not type_.precision: + return "FLOAT" + else: + return "FLOAT(%(precision)s)" % {'precision': type_.precision} + + def visit_DOUBLE_PRECISION(self, type_, **kw): + return "DOUBLE PRECISION" + + def visit_BIGINT(self, type_, **kw): + return "BIGINT" + + def visit_HSTORE(self, type_, **kw): + return "HSTORE" + + def visit_JSON(self, type_, **kw): + return "JSON" + + def visit_JSONB(self, type_, **kw): + return "JSONB" + + def visit_INT4RANGE(self, type_, **kw): + return "INT4RANGE" + + def visit_INT8RANGE(self, type_, **kw): + return "INT8RANGE" + + def visit_NUMRANGE(self, type_, **kw): + return "NUMRANGE" + + def visit_DATERANGE(self, type_, **kw): + return "DATERANGE" + + def visit_TSRANGE(self, type_, **kw): + return "TSRANGE" + + def visit_TSTZRANGE(self, type_, **kw): + return "TSTZRANGE" + + def visit_datetime(self, type_, **kw): + return self.visit_TIMESTAMP(type_, **kw) + + def visit_enum(self, type_, **kw): + if not type_.native_enum or not self.dialect.supports_native_enum: + return super(PGTypeCompiler, self).visit_enum(type_, **kw) + else: + return self.visit_ENUM(type_, **kw) + + def visit_ENUM(self, type_, **kw): + return self.dialect.identifier_preparer.format_type(type_) + + def visit_TIMESTAMP(self, type_, **kw): + return "TIMESTAMP%s %s" % ( + getattr(type_, 'precision', None) and "(%d)" % + type_.precision or "", + (type_.timezone and "WITH" or "WITHOUT") + " TIME ZONE" + ) + + def visit_TIME(self, type_, **kw): + return "TIME%s %s" % ( + getattr(type_, 'precision', None) and "(%d)" % + type_.precision or "", + (type_.timezone and "WITH" or "WITHOUT") + " TIME ZONE" + ) + + def visit_INTERVAL(self, type_, **kw): + if type_.precision is not None: + return "INTERVAL(%d)" % type_.precision + else: + return "INTERVAL" + + def visit_BIT(self, type_, **kw): + if type_.varying: + compiled = "BIT VARYING" + if type_.length is not None: + compiled += "(%d)" % type_.length + else: + compiled = "BIT(%d)" % type_.length + return compiled + + def visit_UUID(self, type_, **kw): + return "UUID" + + def visit_large_binary(self, type_, **kw): + return self.visit_BYTEA(type_, **kw) + + def visit_BYTEA(self, type_, **kw): + return "BYTEA" + + def visit_ARRAY(self, type_, **kw): + return self.process(type_.item_type) + ('[]' * (type_.dimensions + if type_.dimensions + is not None else 1)) + + +class PGIdentifierPreparer(compiler.IdentifierPreparer): + + reserved_words = RESERVED_WORDS + + def _unquote_identifier(self, value): + if value[0] == self.initial_quote: + value = value[1:-1].\ + replace(self.escape_to_quote, self.escape_quote) + return value + + def format_type(self, type_, use_schema=True): + if not type_.name: + raise exc.CompileError("Postgresql ENUM type requires a name.") + + name = self.quote(type_.name) + if not self.omit_schema and use_schema and type_.schema is not None: + name = self.quote_schema(type_.schema) + "." + name + return name + + +class PGInspector(reflection.Inspector): + + def __init__(self, conn): + reflection.Inspector.__init__(self, conn) + + def get_table_oid(self, table_name, schema=None): + """Return the OID for the given table name.""" + + return self.dialect.get_table_oid(self.bind, table_name, schema, + info_cache=self.info_cache) + + def get_enums(self, schema=None): + """Return a list of ENUM objects. + + Each member is a dictionary containing these fields: + + * name - name of the enum + * schema - the schema name for the enum. + * visible - boolean, whether or not this enum is visible + in the default search path. + * labels - a list of string labels that apply to the enum. + + :param schema: schema name. If None, the default schema + (typically 'public') is used. May also be set to '*' to + indicate load enums for all schemas. + + .. versionadded:: 1.0.0 + + """ + schema = schema or self.default_schema_name + return self.dialect._load_enums(self.bind, schema) + + def get_foreign_table_names(self, schema=None): + """Return a list of FOREIGN TABLE names. + + Behavior is similar to that of :meth:`.Inspector.get_table_names`, + except that the list is limited to those tables tha report a + ``relkind`` value of ``f``. + + .. versionadded:: 1.0.0 + + """ + schema = schema or self.default_schema_name + return self.dialect._get_foreign_table_names(self.bind, schema) + + +class CreateEnumType(schema._CreateDropBase): + __visit_name__ = "create_enum_type" + + +class DropEnumType(schema._CreateDropBase): + __visit_name__ = "drop_enum_type" + + +class PGExecutionContext(default.DefaultExecutionContext): + + def fire_sequence(self, seq, type_): + return self._execute_scalar(( + "select nextval('%s')" % + self.dialect.identifier_preparer.format_sequence(seq)), type_) + + def get_insert_default(self, column): + if column.primary_key and \ + column is column.table._autoincrement_column: + if column.server_default and column.server_default.has_argument: + + # pre-execute passive defaults on primary key columns + return self._execute_scalar("select %s" % + column.server_default.arg, + column.type) + + elif (column.default is None or + (column.default.is_sequence and + column.default.optional)): + + # execute the sequence associated with a SERIAL primary + # key column. for non-primary-key SERIAL, the ID just + # generates server side. + + try: + seq_name = column._postgresql_seq_name + except AttributeError: + tab = column.table.name + col = column.name + tab = tab[0:29 + max(0, (29 - len(col)))] + col = col[0:29 + max(0, (29 - len(tab)))] + name = "%s_%s_seq" % (tab, col) + column._postgresql_seq_name = seq_name = name + + sch = column.table.schema + if sch is not None: + exc = "select nextval('\"%s\".\"%s\"')" % \ + (sch, seq_name) + else: + exc = "select nextval('\"%s\"')" % \ + (seq_name, ) + + return self._execute_scalar(exc, column.type) + + return super(PGExecutionContext, self).get_insert_default(column) + + +class PGDialect(default.DefaultDialect): + name = 'postgresql' + supports_alter = True + max_identifier_length = 63 + supports_sane_rowcount = True + + supports_native_enum = True + supports_native_boolean = True + supports_smallserial = True + + supports_sequences = True + sequences_optional = True + preexecute_autoincrement_sequences = True + postfetch_lastrowid = False + + supports_default_values = True + supports_empty_insert = False + supports_multivalues_insert = True + default_paramstyle = 'pyformat' + ischema_names = ischema_names + colspecs = colspecs + + statement_compiler = PGCompiler + ddl_compiler = PGDDLCompiler + type_compiler = PGTypeCompiler + preparer = PGIdentifierPreparer + execution_ctx_cls = PGExecutionContext + inspector = PGInspector + isolation_level = None + + construct_arguments = [ + (schema.Index, { + "using": False, + "where": None, + "ops": {}, + "concurrently": False, + "with": {} + }), + (schema.Table, { + "ignore_search_path": False, + "tablespace": None, + "with_oids": None, + "on_commit": None, + "inherits": None + }) + ] + + reflection_options = ('postgresql_ignore_search_path', ) + + _backslash_escapes = True + + def __init__(self, isolation_level=None, json_serializer=None, + json_deserializer=None, **kwargs): + default.DefaultDialect.__init__(self, **kwargs) + self.isolation_level = isolation_level + self._json_deserializer = json_deserializer + self._json_serializer = json_serializer + + def initialize(self, connection): + super(PGDialect, self).initialize(connection) + self.implicit_returning = self.server_version_info > (8, 2) and \ + self.__dict__.get('implicit_returning', True) + self.supports_native_enum = self.server_version_info >= (8, 3) + if not self.supports_native_enum: + self.colspecs = self.colspecs.copy() + # pop base Enum type + self.colspecs.pop(sqltypes.Enum, None) + # psycopg2, others may have placed ENUM here as well + self.colspecs.pop(ENUM, None) + + # http://www.postgresql.org/docs/9.3/static/release-9-2.html#AEN116689 + self.supports_smallserial = self.server_version_info >= (9, 2) + + self._backslash_escapes = self.server_version_info < (8, 2) or \ + connection.scalar( + "show standard_conforming_strings" + ) == 'off' + + def on_connect(self): + if self.isolation_level is not None: + def connect(conn): + self.set_isolation_level(conn, self.isolation_level) + return connect + else: + return None + + _isolation_lookup = set(['SERIALIZABLE', 'READ UNCOMMITTED', + 'READ COMMITTED', 'REPEATABLE READ']) + + def set_isolation_level(self, connection, level): + level = level.replace('_', ' ') + if level not in self._isolation_lookup: + raise exc.ArgumentError( + "Invalid value '%s' for isolation_level. " + "Valid isolation levels for %s are %s" % + (level, self.name, ", ".join(self._isolation_lookup)) + ) + cursor = connection.cursor() + cursor.execute( + "SET SESSION CHARACTERISTICS AS TRANSACTION " + "ISOLATION LEVEL %s" % level) + cursor.execute("COMMIT") + cursor.close() + + def get_isolation_level(self, connection): + cursor = connection.cursor() + cursor.execute('show transaction isolation level') + val = cursor.fetchone()[0] + cursor.close() + return val.upper() + + def do_begin_twophase(self, connection, xid): + self.do_begin(connection.connection) + + def do_prepare_twophase(self, connection, xid): + connection.execute("PREPARE TRANSACTION '%s'" % xid) + + def do_rollback_twophase(self, connection, xid, + is_prepared=True, recover=False): + if is_prepared: + if recover: + # FIXME: ugly hack to get out of transaction + # context when committing recoverable transactions + # Must find out a way how to make the dbapi not + # open a transaction. + connection.execute("ROLLBACK") + connection.execute("ROLLBACK PREPARED '%s'" % xid) + connection.execute("BEGIN") + self.do_rollback(connection.connection) + else: + self.do_rollback(connection.connection) + + def do_commit_twophase(self, connection, xid, + is_prepared=True, recover=False): + if is_prepared: + if recover: + connection.execute("ROLLBACK") + connection.execute("COMMIT PREPARED '%s'" % xid) + connection.execute("BEGIN") + self.do_rollback(connection.connection) + else: + self.do_commit(connection.connection) + + def do_recover_twophase(self, connection): + resultset = connection.execute( + sql.text("SELECT gid FROM pg_prepared_xacts")) + return [row[0] for row in resultset] + + def _get_default_schema_name(self, connection): + return connection.scalar("select current_schema()") + + def has_schema(self, connection, schema): + query = ("select nspname from pg_namespace " + "where lower(nspname)=:schema") + cursor = connection.execute( + sql.text( + query, + bindparams=[ + sql.bindparam( + 'schema', util.text_type(schema.lower()), + type_=sqltypes.Unicode)] + ) + ) + + return bool(cursor.first()) + + def has_table(self, connection, table_name, schema=None): + # seems like case gets folded in pg_class... + if schema is None: + cursor = connection.execute( + sql.text( + "select relname from pg_class c join pg_namespace n on " + "n.oid=c.relnamespace where " + "pg_catalog.pg_table_is_visible(c.oid) " + "and relname=:name", + bindparams=[ + sql.bindparam('name', util.text_type(table_name), + type_=sqltypes.Unicode)] + ) + ) + else: + cursor = connection.execute( + sql.text( + "select relname from pg_class c join pg_namespace n on " + "n.oid=c.relnamespace where n.nspname=:schema and " + "relname=:name", + bindparams=[ + sql.bindparam('name', + util.text_type(table_name), + type_=sqltypes.Unicode), + sql.bindparam('schema', + util.text_type(schema), + type_=sqltypes.Unicode)] + ) + ) + return bool(cursor.first()) + + def has_sequence(self, connection, sequence_name, schema=None): + if schema is None: + cursor = connection.execute( + sql.text( + "SELECT relname FROM pg_class c join pg_namespace n on " + "n.oid=c.relnamespace where relkind='S' and " + "n.nspname=current_schema() " + "and relname=:name", + bindparams=[ + sql.bindparam('name', util.text_type(sequence_name), + type_=sqltypes.Unicode) + ] + ) + ) + else: + cursor = connection.execute( + sql.text( + "SELECT relname FROM pg_class c join pg_namespace n on " + "n.oid=c.relnamespace where relkind='S' and " + "n.nspname=:schema and relname=:name", + bindparams=[ + sql.bindparam('name', util.text_type(sequence_name), + type_=sqltypes.Unicode), + sql.bindparam('schema', + util.text_type(schema), + type_=sqltypes.Unicode) + ] + ) + ) + + return bool(cursor.first()) + + def has_type(self, connection, type_name, schema=None): + if schema is not None: + query = """ + SELECT EXISTS ( + SELECT * FROM pg_catalog.pg_type t, pg_catalog.pg_namespace n + WHERE t.typnamespace = n.oid + AND t.typname = :typname + AND n.nspname = :nspname + ) + """ + query = sql.text(query) + else: + query = """ + SELECT EXISTS ( + SELECT * FROM pg_catalog.pg_type t + WHERE t.typname = :typname + AND pg_type_is_visible(t.oid) + ) + """ + query = sql.text(query) + query = query.bindparams( + sql.bindparam('typname', + util.text_type(type_name), type_=sqltypes.Unicode), + ) + if schema is not None: + query = query.bindparams( + sql.bindparam('nspname', + util.text_type(schema), type_=sqltypes.Unicode), + ) + cursor = connection.execute(query) + return bool(cursor.scalar()) + + def _get_server_version_info(self, connection): + v = connection.execute("select version()").scalar() + m = re.match( + '.*(?:PostgreSQL|EnterpriseDB) ' + '(\d+)\.(\d+)(?:\.(\d+))?(?:\.\d+)?(?:devel)?', + v) + if not m: + raise AssertionError( + "Could not determine version from string '%s'" % v) + return tuple([int(x) for x in m.group(1, 2, 3) if x is not None]) + + @reflection.cache + def get_table_oid(self, connection, table_name, schema=None, **kw): + """Fetch the oid for schema.table_name. + + Several reflection methods require the table oid. The idea for using + this method is that it can be fetched one time and cached for + subsequent calls. + + """ + table_oid = None + if schema is not None: + schema_where_clause = "n.nspname = :schema" + else: + schema_where_clause = "pg_catalog.pg_table_is_visible(c.oid)" + query = """ + SELECT c.oid + FROM pg_catalog.pg_class c + LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace + WHERE (%s) + AND c.relname = :table_name AND c.relkind in ('r', 'v', 'm', 'f') + """ % schema_where_clause + # Since we're binding to unicode, table_name and schema_name must be + # unicode. + table_name = util.text_type(table_name) + if schema is not None: + schema = util.text_type(schema) + s = sql.text(query).bindparams(table_name=sqltypes.Unicode) + s = s.columns(oid=sqltypes.Integer) + if schema: + s = s.bindparams(sql.bindparam('schema', type_=sqltypes.Unicode)) + c = connection.execute(s, table_name=table_name, schema=schema) + table_oid = c.scalar() + if table_oid is None: + raise exc.NoSuchTableError(table_name) + return table_oid + + @reflection.cache + def get_schema_names(self, connection, **kw): + s = """ + SELECT nspname + FROM pg_namespace + ORDER BY nspname + """ + rp = connection.execute(s) + # what about system tables? + + if util.py2k: + schema_names = [row[0].decode(self.encoding) for row in rp + if not row[0].startswith('pg_')] + else: + schema_names = [row[0] for row in rp + if not row[0].startswith('pg_')] + return schema_names + + @reflection.cache + def get_table_names(self, connection, schema=None, **kw): + if schema is not None: + current_schema = schema + else: + current_schema = self.default_schema_name + + result = connection.execute( + sql.text("SELECT relname FROM pg_class c " + "WHERE relkind = 'r' " + "AND '%s' = (select nspname from pg_namespace n " + "where n.oid = c.relnamespace) " % + current_schema, + typemap={'relname': sqltypes.Unicode} + ) + ) + return [row[0] for row in result] + + @reflection.cache + def _get_foreign_table_names(self, connection, schema=None, **kw): + if schema is not None: + current_schema = schema + else: + current_schema = self.default_schema_name + + result = connection.execute( + sql.text("SELECT relname FROM pg_class c " + "WHERE relkind = 'f' " + "AND '%s' = (select nspname from pg_namespace n " + "where n.oid = c.relnamespace) " % + current_schema, + typemap={'relname': sqltypes.Unicode} + ) + ) + return [row[0] for row in result] + + @reflection.cache + def get_view_names(self, connection, schema=None, **kw): + if schema is not None: + current_schema = schema + else: + current_schema = self.default_schema_name + s = """ + SELECT relname + FROM pg_class c + WHERE relkind IN ('m', 'v') + AND '%(schema)s' = (select nspname from pg_namespace n + where n.oid = c.relnamespace) + """ % dict(schema=current_schema) + + if util.py2k: + view_names = [row[0].decode(self.encoding) + for row in connection.execute(s)] + else: + view_names = [row[0] for row in connection.execute(s)] + return view_names + + @reflection.cache + def get_view_definition(self, connection, view_name, schema=None, **kw): + if schema is not None: + current_schema = schema + else: + current_schema = self.default_schema_name + s = """ + SELECT definition FROM pg_views + WHERE schemaname = :schema + AND viewname = :view_name + """ + rp = connection.execute(sql.text(s), + view_name=view_name, schema=current_schema) + if rp: + if util.py2k: + view_def = rp.scalar().decode(self.encoding) + else: + view_def = rp.scalar() + return view_def + + @reflection.cache + def get_columns(self, connection, table_name, schema=None, **kw): + + table_oid = self.get_table_oid(connection, table_name, schema, + info_cache=kw.get('info_cache')) + SQL_COLS = """ + SELECT a.attname, + pg_catalog.format_type(a.atttypid, a.atttypmod), + (SELECT pg_catalog.pg_get_expr(d.adbin, d.adrelid) + FROM pg_catalog.pg_attrdef d + WHERE d.adrelid = a.attrelid AND d.adnum = a.attnum + AND a.atthasdef) + AS DEFAULT, + a.attnotnull, a.attnum, a.attrelid as table_oid + FROM pg_catalog.pg_attribute a + WHERE a.attrelid = :table_oid + AND a.attnum > 0 AND NOT a.attisdropped + ORDER BY a.attnum + """ + s = sql.text(SQL_COLS, + bindparams=[ + sql.bindparam('table_oid', type_=sqltypes.Integer)], + typemap={ + 'attname': sqltypes.Unicode, + 'default': sqltypes.Unicode} + ) + c = connection.execute(s, table_oid=table_oid) + rows = c.fetchall() + domains = self._load_domains(connection) + enums = dict( + ( + "%s.%s" % (rec['schema'], rec['name']) + if not rec['visible'] else rec['name'], rec) for rec in + self._load_enums(connection, schema='*') + ) + + # format columns + columns = [] + for name, format_type, default, notnull, attnum, table_oid in rows: + column_info = self._get_column_info( + name, format_type, default, notnull, domains, enums, schema) + columns.append(column_info) + return columns + + def _get_column_info(self, name, format_type, default, + notnull, domains, enums, schema): + # strip (*) from character varying(5), timestamp(5) + # with time zone, geometry(POLYGON), etc. + attype = re.sub(r'\(.*\)', '', format_type) + + # strip '[]' from integer[], etc. + attype = re.sub(r'\[\]', '', attype) + + nullable = not notnull + is_array = format_type.endswith('[]') + charlen = re.search('\(([\d,]+)\)', format_type) + if charlen: + charlen = charlen.group(1) + args = re.search('\((.*)\)', format_type) + if args and args.group(1): + args = tuple(re.split('\s*,\s*', args.group(1))) + else: + args = () + kwargs = {} + + if attype == 'numeric': + if charlen: + prec, scale = charlen.split(',') + args = (int(prec), int(scale)) + else: + args = () + elif attype == 'double precision': + args = (53, ) + elif attype == 'integer': + args = () + elif attype in ('timestamp with time zone', + 'time with time zone'): + kwargs['timezone'] = True + if charlen: + kwargs['precision'] = int(charlen) + args = () + elif attype in ('timestamp without time zone', + 'time without time zone', 'time'): + kwargs['timezone'] = False + if charlen: + kwargs['precision'] = int(charlen) + args = () + elif attype == 'bit varying': + kwargs['varying'] = True + if charlen: + args = (int(charlen),) + else: + args = () + elif attype in ('interval', 'interval year to month', + 'interval day to second'): + if charlen: + kwargs['precision'] = int(charlen) + args = () + elif charlen: + args = (int(charlen),) + + while True: + if attype in self.ischema_names: + coltype = self.ischema_names[attype] + break + elif attype in enums: + enum = enums[attype] + coltype = ENUM + kwargs['name'] = enum['name'] + if not enum['visible']: + kwargs['schema'] = enum['schema'] + args = tuple(enum['labels']) + break + elif attype in domains: + domain = domains[attype] + attype = domain['attype'] + # A table can't override whether the domain is nullable. + nullable = domain['nullable'] + if domain['default'] and not default: + # It can, however, override the default + # value, but can't set it to null. + default = domain['default'] + continue + else: + coltype = None + break + + if coltype: + coltype = coltype(*args, **kwargs) + if is_array: + coltype = ARRAY(coltype) + else: + util.warn("Did not recognize type '%s' of column '%s'" % + (attype, name)) + coltype = sqltypes.NULLTYPE + # adjust the default value + autoincrement = False + if default is not None: + match = re.search(r"""(nextval\(')([^']+)('.*$)""", default) + if match is not None: + autoincrement = True + # the default is related to a Sequence + sch = schema + if '.' not in match.group(2) and sch is not None: + # unconditionally quote the schema name. this could + # later be enhanced to obey quoting rules / + # "quote schema" + default = match.group(1) + \ + ('"%s"' % sch) + '.' + \ + match.group(2) + match.group(3) + + column_info = dict(name=name, type=coltype, nullable=nullable, + default=default, autoincrement=autoincrement) + return column_info + + @reflection.cache + def get_pk_constraint(self, connection, table_name, schema=None, **kw): + table_oid = self.get_table_oid(connection, table_name, schema, + info_cache=kw.get('info_cache')) + + if self.server_version_info < (8, 4): + PK_SQL = """ + SELECT a.attname + FROM + pg_class t + join pg_index ix on t.oid = ix.indrelid + join pg_attribute a + on t.oid=a.attrelid AND %s + WHERE + t.oid = :table_oid and ix.indisprimary = 't' + ORDER BY a.attnum + """ % self._pg_index_any("a.attnum", "ix.indkey") + + else: + # unnest() and generate_subscripts() both introduced in + # version 8.4 + PK_SQL = """ + SELECT a.attname + FROM pg_attribute a JOIN ( + SELECT unnest(ix.indkey) attnum, + generate_subscripts(ix.indkey, 1) ord + FROM pg_index ix + WHERE ix.indrelid = :table_oid AND ix.indisprimary + ) k ON a.attnum=k.attnum + WHERE a.attrelid = :table_oid + ORDER BY k.ord + """ + t = sql.text(PK_SQL, typemap={'attname': sqltypes.Unicode}) + c = connection.execute(t, table_oid=table_oid) + cols = [r[0] for r in c.fetchall()] + + PK_CONS_SQL = """ + SELECT conname + FROM pg_catalog.pg_constraint r + WHERE r.conrelid = :table_oid AND r.contype = 'p' + ORDER BY 1 + """ + t = sql.text(PK_CONS_SQL, typemap={'conname': sqltypes.Unicode}) + c = connection.execute(t, table_oid=table_oid) + name = c.scalar() + + return {'constrained_columns': cols, 'name': name} + + @reflection.cache + def get_foreign_keys(self, connection, table_name, schema=None, + postgresql_ignore_search_path=False, **kw): + preparer = self.identifier_preparer + table_oid = self.get_table_oid(connection, table_name, schema, + info_cache=kw.get('info_cache')) + + FK_SQL = """ + SELECT r.conname, + pg_catalog.pg_get_constraintdef(r.oid, true) as condef, + n.nspname as conschema + FROM pg_catalog.pg_constraint r, + pg_namespace n, + pg_class c + + WHERE r.conrelid = :table AND + r.contype = 'f' AND + c.oid = confrelid AND + n.oid = c.relnamespace + ORDER BY 1 + """ + # http://www.postgresql.org/docs/9.0/static/sql-createtable.html + FK_REGEX = re.compile( + r'FOREIGN KEY \((.*?)\) REFERENCES (?:(.*?)\.)?(.*?)\((.*?)\)' + r'[\s]?(MATCH (FULL|PARTIAL|SIMPLE)+)?' + r'[\s]?(ON UPDATE ' + r'(CASCADE|RESTRICT|NO ACTION|SET NULL|SET DEFAULT)+)?' + r'[\s]?(ON DELETE ' + r'(CASCADE|RESTRICT|NO ACTION|SET NULL|SET DEFAULT)+)?' + r'[\s]?(DEFERRABLE|NOT DEFERRABLE)?' + r'[\s]?(INITIALLY (DEFERRED|IMMEDIATE)+)?' + ) + + t = sql.text(FK_SQL, typemap={ + 'conname': sqltypes.Unicode, + 'condef': sqltypes.Unicode}) + c = connection.execute(t, table=table_oid) + fkeys = [] + for conname, condef, conschema in c.fetchall(): + m = re.search(FK_REGEX, condef).groups() + + constrained_columns, referred_schema, \ + referred_table, referred_columns, \ + _, match, _, onupdate, _, ondelete, \ + deferrable, _, initially = m + + if deferrable is not None: + deferrable = True if deferrable == 'DEFERRABLE' else False + constrained_columns = [preparer._unquote_identifier(x) + for x in re.split( + r'\s*,\s*', constrained_columns)] + + if postgresql_ignore_search_path: + # when ignoring search path, we use the actual schema + # provided it isn't the "default" schema + if conschema != self.default_schema_name: + referred_schema = conschema + else: + referred_schema = schema + elif referred_schema: + # referred_schema is the schema that we regexp'ed from + # pg_get_constraintdef(). If the schema is in the search + # path, pg_get_constraintdef() will give us None. + referred_schema = \ + preparer._unquote_identifier(referred_schema) + elif schema is not None and schema == conschema: + # If the actual schema matches the schema of the table + # we're reflecting, then we will use that. + referred_schema = schema + + referred_table = preparer._unquote_identifier(referred_table) + referred_columns = [preparer._unquote_identifier(x) + for x in + re.split(r'\s*,\s', referred_columns)] + fkey_d = { + 'name': conname, + 'constrained_columns': constrained_columns, + 'referred_schema': referred_schema, + 'referred_table': referred_table, + 'referred_columns': referred_columns, + 'options': { + 'onupdate': onupdate, + 'ondelete': ondelete, + 'deferrable': deferrable, + 'initially': initially, + 'match': match + } + } + fkeys.append(fkey_d) + return fkeys + + def _pg_index_any(self, col, compare_to): + if self.server_version_info < (8, 1): + # http://www.postgresql.org/message-id/10279.1124395722@sss.pgh.pa.us + # "In CVS tip you could replace this with "attnum = ANY (indkey)". + # Unfortunately, most array support doesn't work on int2vector in + # pre-8.1 releases, so I think you're kinda stuck with the above + # for now. + # regards, tom lane" + return "(%s)" % " OR ".join( + "%s[%d] = %s" % (compare_to, ind, col) + for ind in range(0, 10) + ) + else: + return "%s = ANY(%s)" % (col, compare_to) + + @reflection.cache + def get_indexes(self, connection, table_name, schema, **kw): + table_oid = self.get_table_oid(connection, table_name, schema, + info_cache=kw.get('info_cache')) + + # cast indkey as varchar since it's an int2vector, + # returned as a list by some drivers such as pypostgresql + + if self.server_version_info < (8, 5): + IDX_SQL = """ + SELECT + i.relname as relname, + ix.indisunique, ix.indexprs, ix.indpred, + a.attname, a.attnum, NULL, ix.indkey%s, + %s, am.amname + FROM + pg_class t + join pg_index ix on t.oid = ix.indrelid + join pg_class i on i.oid = ix.indexrelid + left outer join + pg_attribute a + on t.oid = a.attrelid and %s + left outer join + pg_am am + on i.relam = am.oid + WHERE + t.relkind IN ('r', 'v', 'f', 'm') + and t.oid = :table_oid + and ix.indisprimary = 'f' + ORDER BY + t.relname, + i.relname + """ % ( + # version 8.3 here was based on observing the + # cast does not work in PG 8.2.4, does work in 8.3.0. + # nothing in PG changelogs regarding this. + "::varchar" if self.server_version_info >= (8, 3) else "", + "i.reloptions" if self.server_version_info >= (8, 2) + else "NULL", + self._pg_index_any("a.attnum", "ix.indkey") + ) + else: + IDX_SQL = """ + SELECT + i.relname as relname, + ix.indisunique, ix.indexprs, ix.indpred, + a.attname, a.attnum, c.conrelid, ix.indkey::varchar, + i.reloptions, am.amname + FROM + pg_class t + join pg_index ix on t.oid = ix.indrelid + join pg_class i on i.oid = ix.indexrelid + left outer join + pg_attribute a + on t.oid = a.attrelid and a.attnum = ANY(ix.indkey) + left outer join + pg_constraint c + on (ix.indrelid = c.conrelid and + ix.indexrelid = c.conindid and + c.contype in ('p', 'u', 'x')) + left outer join + pg_am am + on i.relam = am.oid + WHERE + t.relkind IN ('r', 'v', 'f', 'm') + and t.oid = :table_oid + and ix.indisprimary = 'f' + ORDER BY + t.relname, + i.relname + """ + + t = sql.text(IDX_SQL, typemap={'attname': sqltypes.Unicode}) + c = connection.execute(t, table_oid=table_oid) + + indexes = defaultdict(lambda: defaultdict(dict)) + + sv_idx_name = None + for row in c.fetchall(): + (idx_name, unique, expr, prd, col, + col_num, conrelid, idx_key, options, amname) = row + + if expr: + if idx_name != sv_idx_name: + util.warn( + "Skipped unsupported reflection of " + "expression-based index %s" + % idx_name) + sv_idx_name = idx_name + continue + + if prd and not idx_name == sv_idx_name: + util.warn( + "Predicate of partial index %s ignored during reflection" + % idx_name) + sv_idx_name = idx_name + + has_idx = idx_name in indexes + index = indexes[idx_name] + if col is not None: + index['cols'][col_num] = col + if not has_idx: + index['key'] = [int(k.strip()) for k in idx_key.split()] + index['unique'] = unique + if conrelid is not None: + index['duplicates_constraint'] = idx_name + if options: + index['options'] = dict( + [option.split("=") for option in options]) + + # it *might* be nice to include that this is 'btree' in the + # reflection info. But we don't want an Index object + # to have a ``postgresql_using`` in it that is just the + # default, so for the moment leaving this out. + if amname and amname != 'btree': + index['amname'] = amname + + result = [] + for name, idx in indexes.items(): + entry = { + 'name': name, + 'unique': idx['unique'], + 'column_names': [idx['cols'][i] for i in idx['key']] + } + if 'duplicates_constraint' in idx: + entry['duplicates_constraint'] = idx['duplicates_constraint'] + if 'options' in idx: + entry.setdefault( + 'dialect_options', {})["postgresql_with"] = idx['options'] + if 'amname' in idx: + entry.setdefault( + 'dialect_options', {})["postgresql_using"] = idx['amname'] + result.append(entry) + return result + + @reflection.cache + def get_unique_constraints(self, connection, table_name, + schema=None, **kw): + table_oid = self.get_table_oid(connection, table_name, schema, + info_cache=kw.get('info_cache')) + + UNIQUE_SQL = """ + SELECT + cons.conname as name, + cons.conkey as key, + a.attnum as col_num, + a.attname as col_name + FROM + pg_catalog.pg_constraint cons + join pg_attribute a + on cons.conrelid = a.attrelid AND + a.attnum = ANY(cons.conkey) + WHERE + cons.conrelid = :table_oid AND + cons.contype = 'u' + """ + + t = sql.text(UNIQUE_SQL, typemap={'col_name': sqltypes.Unicode}) + c = connection.execute(t, table_oid=table_oid) + + uniques = defaultdict(lambda: defaultdict(dict)) + for row in c.fetchall(): + uc = uniques[row.name] + uc["key"] = row.key + uc["cols"][row.col_num] = row.col_name + + return [ + {'name': name, + 'column_names': [uc["cols"][i] for i in uc["key"]]} + for name, uc in uniques.items() + ] + + def _load_enums(self, connection, schema=None): + schema = schema or self.default_schema_name + if not self.supports_native_enum: + return {} + + # Load data types for enums: + SQL_ENUMS = """ + SELECT t.typname as "name", + -- no enum defaults in 8.4 at least + -- t.typdefault as "default", + pg_catalog.pg_type_is_visible(t.oid) as "visible", + n.nspname as "schema", + e.enumlabel as "label" + FROM pg_catalog.pg_type t + LEFT JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace + LEFT JOIN pg_catalog.pg_enum e ON t.oid = e.enumtypid + WHERE t.typtype = 'e' + """ + + if schema != '*': + SQL_ENUMS += "AND n.nspname = :schema " + + # e.oid gives us label order within an enum + SQL_ENUMS += 'ORDER BY "schema", "name", e.oid' + + s = sql.text(SQL_ENUMS, typemap={ + 'attname': sqltypes.Unicode, + 'label': sqltypes.Unicode}) + + if schema != '*': + s = s.bindparams(schema=schema) + + c = connection.execute(s) + + enums = [] + enum_by_name = {} + for enum in c.fetchall(): + key = (enum['schema'], enum['name']) + if key in enum_by_name: + enum_by_name[key]['labels'].append(enum['label']) + else: + enum_by_name[key] = enum_rec = { + 'name': enum['name'], + 'schema': enum['schema'], + 'visible': enum['visible'], + 'labels': [enum['label']], + } + enums.append(enum_rec) + + return enums + + def _load_domains(self, connection): + # Load data types for domains: + SQL_DOMAINS = """ + SELECT t.typname as "name", + pg_catalog.format_type(t.typbasetype, t.typtypmod) as "attype", + not t.typnotnull as "nullable", + t.typdefault as "default", + pg_catalog.pg_type_is_visible(t.oid) as "visible", + n.nspname as "schema" + FROM pg_catalog.pg_type t + LEFT JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace + WHERE t.typtype = 'd' + """ + + s = sql.text(SQL_DOMAINS, typemap={'attname': sqltypes.Unicode}) + c = connection.execute(s) + + domains = {} + for domain in c.fetchall(): + # strip (30) from character varying(30) + attype = re.search('([^\(]+)', domain['attype']).group(1) + if domain['visible']: + # 'visible' just means whether or not the domain is in a + # schema that's on the search path -- or not overridden by + # a schema with higher precedence. If it's not visible, + # it will be prefixed with the schema-name when it's used. + name = domain['name'] + else: + name = "%s.%s" % (domain['schema'], domain['name']) + + domains[name] = { + 'attype': attype, + 'nullable': domain['nullable'], + 'default': domain['default'] + } + + return domains diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/constraints.py b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/constraints.py new file mode 100644 index 0000000..c6bb890 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/constraints.py @@ -0,0 +1,98 @@ +# Copyright (C) 2013-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +from ...sql.schema import ColumnCollectionConstraint +from ...sql import expression +from ... import util + + +class ExcludeConstraint(ColumnCollectionConstraint): + """A table-level EXCLUDE constraint. + + Defines an EXCLUDE constraint as described in the `postgres + documentation`__. + + __ http://www.postgresql.org/docs/9.0/\ +static/sql-createtable.html#SQL-CREATETABLE-EXCLUDE + """ + + __visit_name__ = 'exclude_constraint' + + where = None + + def __init__(self, *elements, **kw): + """ + :param \*elements: + A sequence of two tuples of the form ``(column, operator)`` where + column must be a column name or Column object and operator must + be a string containing the operator to use. + + :param name: + Optional, the in-database name of this constraint. + + :param deferrable: + Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when + issuing DDL for this constraint. + + :param initially: + Optional string. If set, emit INITIALLY when issuing DDL + for this constraint. + + :param using: + Optional string. If set, emit USING when issuing DDL + for this constraint. Defaults to 'gist'. + + :param where: + Optional string. If set, emit WHERE when issuing DDL + for this constraint. + + """ + columns = [] + render_exprs = [] + self.operators = {} + + expressions, operators = zip(*elements) + + for (expr, column, strname, add_element), operator in zip( + self._extract_col_expression_collection(expressions), + operators + ): + if add_element is not None: + columns.append(add_element) + + name = column.name if column is not None else strname + + if name is not None: + # backwards compat + self.operators[name] = operator + + expr = expression._literal_as_text(expr) + + render_exprs.append( + (expr, name, operator) + ) + + self._render_exprs = render_exprs + ColumnCollectionConstraint.__init__( + self, + *columns, + name=kw.get('name'), + deferrable=kw.get('deferrable'), + initially=kw.get('initially') + ) + self.using = kw.get('using', 'gist') + where = kw.get('where') + if where is not None: + self.where = expression._literal_as_text(where) + + def copy(self, **kw): + elements = [(col, self.operators[col]) + for col in self.columns.keys()] + c = self.__class__(*elements, + name=self.name, + deferrable=self.deferrable, + initially=self.initially) + c.dispatch._update(self.dispatch) + return c diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/hstore.py b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/hstore.py new file mode 100644 index 0000000..a4ff461 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/hstore.py @@ -0,0 +1,376 @@ +# postgresql/hstore.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +import re + +from .base import ARRAY, ischema_names +from ... import types as sqltypes +from ...sql import functions as sqlfunc +from ...sql.operators import custom_op +from ... import util + +__all__ = ('HSTORE', 'hstore') + +# My best guess at the parsing rules of hstore literals, since no formal +# grammar is given. This is mostly reverse engineered from PG's input parser +# behavior. +HSTORE_PAIR_RE = re.compile(r""" +( + "(?P (\\ . | [^"])* )" # Quoted key +) +[ ]* => [ ]* # Pair operator, optional adjoining whitespace +( + (?P NULL ) # NULL value + | "(?P (\\ . | [^"])* )" # Quoted value +) +""", re.VERBOSE) + +HSTORE_DELIMITER_RE = re.compile(r""" +[ ]* , [ ]* +""", re.VERBOSE) + + +def _parse_error(hstore_str, pos): + """format an unmarshalling error.""" + + ctx = 20 + hslen = len(hstore_str) + + parsed_tail = hstore_str[max(pos - ctx - 1, 0):min(pos, hslen)] + residual = hstore_str[min(pos, hslen):min(pos + ctx + 1, hslen)] + + if len(parsed_tail) > ctx: + parsed_tail = '[...]' + parsed_tail[1:] + if len(residual) > ctx: + residual = residual[:-1] + '[...]' + + return "After %r, could not parse residual at position %d: %r" % ( + parsed_tail, pos, residual) + + +def _parse_hstore(hstore_str): + """Parse an hstore from its literal string representation. + + Attempts to approximate PG's hstore input parsing rules as closely as + possible. Although currently this is not strictly necessary, since the + current implementation of hstore's output syntax is stricter than what it + accepts as input, the documentation makes no guarantees that will always + be the case. + + + + """ + result = {} + pos = 0 + pair_match = HSTORE_PAIR_RE.match(hstore_str) + + while pair_match is not None: + key = pair_match.group('key').replace(r'\"', '"').replace( + "\\\\", "\\") + if pair_match.group('value_null'): + value = None + else: + value = pair_match.group('value').replace( + r'\"', '"').replace("\\\\", "\\") + result[key] = value + + pos += pair_match.end() + + delim_match = HSTORE_DELIMITER_RE.match(hstore_str[pos:]) + if delim_match is not None: + pos += delim_match.end() + + pair_match = HSTORE_PAIR_RE.match(hstore_str[pos:]) + + if pos != len(hstore_str): + raise ValueError(_parse_error(hstore_str, pos)) + + return result + + +def _serialize_hstore(val): + """Serialize a dictionary into an hstore literal. Keys and values must + both be strings (except None for values). + + """ + def esc(s, position): + if position == 'value' and s is None: + return 'NULL' + elif isinstance(s, util.string_types): + return '"%s"' % s.replace("\\", "\\\\").replace('"', r'\"') + else: + raise ValueError("%r in %s position is not a string." % + (s, position)) + + return ', '.join('%s=>%s' % (esc(k, 'key'), esc(v, 'value')) + for k, v in val.items()) + + +class HSTORE(sqltypes.Concatenable, sqltypes.TypeEngine): + """Represent the Postgresql HSTORE type. + + The :class:`.HSTORE` type stores dictionaries containing strings, e.g.:: + + data_table = Table('data_table', metadata, + Column('id', Integer, primary_key=True), + Column('data', HSTORE) + ) + + with engine.connect() as conn: + conn.execute( + data_table.insert(), + data = {"key1": "value1", "key2": "value2"} + ) + + :class:`.HSTORE` provides for a wide range of operations, including: + + * Index operations:: + + data_table.c.data['some key'] == 'some value' + + * Containment operations:: + + data_table.c.data.has_key('some key') + + data_table.c.data.has_all(['one', 'two', 'three']) + + * Concatenation:: + + data_table.c.data + {"k1": "v1"} + + For a full list of special methods see + :class:`.HSTORE.comparator_factory`. + + For usage with the SQLAlchemy ORM, it may be desirable to combine + the usage of :class:`.HSTORE` with :class:`.MutableDict` dictionary + now part of the :mod:`sqlalchemy.ext.mutable` + extension. This extension will allow "in-place" changes to the + dictionary, e.g. addition of new keys or replacement/removal of existing + keys to/from the current dictionary, to produce events which will be + detected by the unit of work:: + + from sqlalchemy.ext.mutable import MutableDict + + class MyClass(Base): + __tablename__ = 'data_table' + + id = Column(Integer, primary_key=True) + data = Column(MutableDict.as_mutable(HSTORE)) + + my_object = session.query(MyClass).one() + + # in-place mutation, requires Mutable extension + # in order for the ORM to detect + my_object.data['some_key'] = 'some value' + + session.commit() + + When the :mod:`sqlalchemy.ext.mutable` extension is not used, the ORM + will not be alerted to any changes to the contents of an existing + dictionary, unless that dictionary value is re-assigned to the + HSTORE-attribute itself, thus generating a change event. + + .. versionadded:: 0.8 + + .. seealso:: + + :class:`.hstore` - render the Postgresql ``hstore()`` function. + + + """ + + __visit_name__ = 'HSTORE' + hashable = False + + class comparator_factory(sqltypes.Concatenable.Comparator): + """Define comparison operations for :class:`.HSTORE`.""" + + def has_key(self, other): + """Boolean expression. Test for presence of a key. Note that the + key may be a SQLA expression. + """ + return self.expr.op('?')(other) + + def has_all(self, other): + """Boolean expression. Test for presence of all keys in the PG + array. + """ + return self.expr.op('?&')(other) + + def has_any(self, other): + """Boolean expression. Test for presence of any key in the PG + array. + """ + return self.expr.op('?|')(other) + + def defined(self, key): + """Boolean expression. Test for presence of a non-NULL value for + the key. Note that the key may be a SQLA expression. + """ + return _HStoreDefinedFunction(self.expr, key) + + def contains(self, other, **kwargs): + """Boolean expression. Test if keys are a superset of the keys of + the argument hstore expression. + """ + return self.expr.op('@>')(other) + + def contained_by(self, other): + """Boolean expression. Test if keys are a proper subset of the + keys of the argument hstore expression. + """ + return self.expr.op('<@')(other) + + def __getitem__(self, other): + """Text expression. Get the value at a given key. Note that the + key may be a SQLA expression. + """ + return self.expr.op('->', precedence=5)(other) + + def delete(self, key): + """HStore expression. Returns the contents of this hstore with the + given key deleted. Note that the key may be a SQLA expression. + """ + if isinstance(key, dict): + key = _serialize_hstore(key) + return _HStoreDeleteFunction(self.expr, key) + + def slice(self, array): + """HStore expression. Returns a subset of an hstore defined by + array of keys. + """ + return _HStoreSliceFunction(self.expr, array) + + def keys(self): + """Text array expression. Returns array of keys.""" + return _HStoreKeysFunction(self.expr) + + def vals(self): + """Text array expression. Returns array of values.""" + return _HStoreValsFunction(self.expr) + + def array(self): + """Text array expression. Returns array of alternating keys and + values. + """ + return _HStoreArrayFunction(self.expr) + + def matrix(self): + """Text array expression. Returns array of [key, value] pairs.""" + return _HStoreMatrixFunction(self.expr) + + def _adapt_expression(self, op, other_comparator): + if isinstance(op, custom_op): + if op.opstring in ['?', '?&', '?|', '@>', '<@']: + return op, sqltypes.Boolean + elif op.opstring == '->': + return op, sqltypes.Text + return sqltypes.Concatenable.Comparator.\ + _adapt_expression(self, op, other_comparator) + + def bind_processor(self, dialect): + if util.py2k: + encoding = dialect.encoding + + def process(value): + if isinstance(value, dict): + return _serialize_hstore(value).encode(encoding) + else: + return value + else: + def process(value): + if isinstance(value, dict): + return _serialize_hstore(value) + else: + return value + return process + + def result_processor(self, dialect, coltype): + if util.py2k: + encoding = dialect.encoding + + def process(value): + if value is not None: + return _parse_hstore(value.decode(encoding)) + else: + return value + else: + def process(value): + if value is not None: + return _parse_hstore(value) + else: + return value + return process + + +ischema_names['hstore'] = HSTORE + + +class hstore(sqlfunc.GenericFunction): + """Construct an hstore value within a SQL expression using the + Postgresql ``hstore()`` function. + + The :class:`.hstore` function accepts one or two arguments as described + in the Postgresql documentation. + + E.g.:: + + from sqlalchemy.dialects.postgresql import array, hstore + + select([hstore('key1', 'value1')]) + + select([ + hstore( + array(['key1', 'key2', 'key3']), + array(['value1', 'value2', 'value3']) + ) + ]) + + .. versionadded:: 0.8 + + .. seealso:: + + :class:`.HSTORE` - the Postgresql ``HSTORE`` datatype. + + """ + type = HSTORE + name = 'hstore' + + +class _HStoreDefinedFunction(sqlfunc.GenericFunction): + type = sqltypes.Boolean + name = 'defined' + + +class _HStoreDeleteFunction(sqlfunc.GenericFunction): + type = HSTORE + name = 'delete' + + +class _HStoreSliceFunction(sqlfunc.GenericFunction): + type = HSTORE + name = 'slice' + + +class _HStoreKeysFunction(sqlfunc.GenericFunction): + type = ARRAY(sqltypes.Text) + name = 'akeys' + + +class _HStoreValsFunction(sqlfunc.GenericFunction): + type = ARRAY(sqltypes.Text) + name = 'avals' + + +class _HStoreArrayFunction(sqlfunc.GenericFunction): + type = ARRAY(sqltypes.Text) + name = 'hstore_to_array' + + +class _HStoreMatrixFunction(sqlfunc.GenericFunction): + type = ARRAY(sqltypes.Text) + name = 'hstore_to_matrix' diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/json.py b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/json.py new file mode 100644 index 0000000..f7ede85 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/json.py @@ -0,0 +1,358 @@ +# postgresql/json.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +from __future__ import absolute_import + +import json + +from .base import ischema_names +from ... import types as sqltypes +from ...sql.operators import custom_op +from ... import sql +from ...sql import elements, default_comparator +from ... import util + +__all__ = ('JSON', 'JSONElement', 'JSONB') + + +class JSONElement(elements.BinaryExpression): + """Represents accessing an element of a :class:`.JSON` value. + + The :class:`.JSONElement` is produced whenever using the Python index + operator on an expression that has the type :class:`.JSON`:: + + expr = mytable.c.json_data['some_key'] + + The expression typically compiles to a JSON access such as ``col -> key``. + Modifiers are then available for typing behavior, including + :meth:`.JSONElement.cast` and :attr:`.JSONElement.astext`. + + """ + + def __init__(self, left, right, astext=False, + opstring=None, result_type=None): + self._astext = astext + if opstring is None: + if hasattr(right, '__iter__') and \ + not isinstance(right, util.string_types): + opstring = "#>" + right = "{%s}" % ( + ", ".join(util.text_type(elem) for elem in right)) + else: + opstring = "->" + + self._json_opstring = opstring + operator = custom_op(opstring, precedence=5) + right = default_comparator._check_literal( + left, operator, right) + super(JSONElement, self).__init__( + left, right, operator, type_=result_type) + + @property + def astext(self): + """Convert this :class:`.JSONElement` to use the 'astext' operator + when evaluated. + + E.g.:: + + select([data_table.c.data['some key'].astext]) + + .. seealso:: + + :meth:`.JSONElement.cast` + + """ + if self._astext: + return self + else: + return JSONElement( + self.left, + self.right, + astext=True, + opstring=self._json_opstring + ">", + result_type=sqltypes.String(convert_unicode=True) + ) + + def cast(self, type_): + """Convert this :class:`.JSONElement` to apply both the 'astext' operator + as well as an explicit type cast when evaluated. + + E.g.:: + + select([data_table.c.data['some key'].cast(Integer)]) + + .. seealso:: + + :attr:`.JSONElement.astext` + + """ + if not self._astext: + return self.astext.cast(type_) + else: + return sql.cast(self, type_) + + +class JSON(sqltypes.TypeEngine): + """Represent the Postgresql JSON type. + + The :class:`.JSON` type stores arbitrary JSON format data, e.g.:: + + data_table = Table('data_table', metadata, + Column('id', Integer, primary_key=True), + Column('data', JSON) + ) + + with engine.connect() as conn: + conn.execute( + data_table.insert(), + data = {"key1": "value1", "key2": "value2"} + ) + + :class:`.JSON` provides several operations: + + * Index operations:: + + data_table.c.data['some key'] + + * Index operations returning text (required for text comparison):: + + data_table.c.data['some key'].astext == 'some value' + + * Index operations with a built-in CAST call:: + + data_table.c.data['some key'].cast(Integer) == 5 + + * Path index operations:: + + data_table.c.data[('key_1', 'key_2', ..., 'key_n')] + + * Path index operations returning text (required for text comparison):: + + data_table.c.data[('key_1', 'key_2', ..., 'key_n')].astext == \\ + 'some value' + + Index operations return an instance of :class:`.JSONElement`, which + represents an expression such as ``column -> index``. This element then + defines methods such as :attr:`.JSONElement.astext` and + :meth:`.JSONElement.cast` for setting up type behavior. + + The :class:`.JSON` type, when used with the SQLAlchemy ORM, does not + detect in-place mutations to the structure. In order to detect these, the + :mod:`sqlalchemy.ext.mutable` extension must be used. This extension will + allow "in-place" changes to the datastructure to produce events which + will be detected by the unit of work. See the example at :class:`.HSTORE` + for a simple example involving a dictionary. + + Custom serializers and deserializers are specified at the dialect level, + that is using :func:`.create_engine`. The reason for this is that when + using psycopg2, the DBAPI only allows serializers at the per-cursor + or per-connection level. E.g.:: + + engine = create_engine("postgresql://scott:tiger@localhost/test", + json_serializer=my_serialize_fn, + json_deserializer=my_deserialize_fn + ) + + When using the psycopg2 dialect, the json_deserializer is registered + against the database using ``psycopg2.extras.register_default_json``. + + .. versionadded:: 0.9 + + """ + + __visit_name__ = 'JSON' + + def __init__(self, none_as_null=False): + """Construct a :class:`.JSON` type. + + :param none_as_null: if True, persist the value ``None`` as a + SQL NULL value, not the JSON encoding of ``null``. Note that + when this flag is False, the :func:`.null` construct can still + be used to persist a NULL value:: + + from sqlalchemy import null + conn.execute(table.insert(), data=null()) + + .. versionchanged:: 0.9.8 - Added ``none_as_null``, and :func:`.null` + is now supported in order to persist a NULL value. + + """ + self.none_as_null = none_as_null + + class comparator_factory(sqltypes.Concatenable.Comparator): + """Define comparison operations for :class:`.JSON`.""" + + def __getitem__(self, other): + """Get the value at a given key.""" + + return JSONElement(self.expr, other) + + def _adapt_expression(self, op, other_comparator): + if isinstance(op, custom_op): + if op.opstring == '->': + return op, sqltypes.Text + return sqltypes.Concatenable.Comparator.\ + _adapt_expression(self, op, other_comparator) + + def bind_processor(self, dialect): + json_serializer = dialect._json_serializer or json.dumps + if util.py2k: + encoding = dialect.encoding + + def process(value): + if isinstance(value, elements.Null) or ( + value is None and self.none_as_null + ): + return None + return json_serializer(value).encode(encoding) + else: + def process(value): + if isinstance(value, elements.Null) or ( + value is None and self.none_as_null + ): + return None + return json_serializer(value) + return process + + def result_processor(self, dialect, coltype): + json_deserializer = dialect._json_deserializer or json.loads + if util.py2k: + encoding = dialect.encoding + + def process(value): + if value is None: + return None + return json_deserializer(value.decode(encoding)) + else: + def process(value): + if value is None: + return None + return json_deserializer(value) + return process + + +ischema_names['json'] = JSON + + +class JSONB(JSON): + """Represent the Postgresql JSONB type. + + The :class:`.JSONB` type stores arbitrary JSONB format data, e.g.:: + + data_table = Table('data_table', metadata, + Column('id', Integer, primary_key=True), + Column('data', JSONB) + ) + + with engine.connect() as conn: + conn.execute( + data_table.insert(), + data = {"key1": "value1", "key2": "value2"} + ) + + :class:`.JSONB` provides several operations: + + * Index operations:: + + data_table.c.data['some key'] + + * Index operations returning text (required for text comparison):: + + data_table.c.data['some key'].astext == 'some value' + + * Index operations with a built-in CAST call:: + + data_table.c.data['some key'].cast(Integer) == 5 + + * Path index operations:: + + data_table.c.data[('key_1', 'key_2', ..., 'key_n')] + + * Path index operations returning text (required for text comparison):: + + data_table.c.data[('key_1', 'key_2', ..., 'key_n')].astext == \\ + 'some value' + + Index operations return an instance of :class:`.JSONElement`, which + represents an expression such as ``column -> index``. This element then + defines methods such as :attr:`.JSONElement.astext` and + :meth:`.JSONElement.cast` for setting up type behavior. + + The :class:`.JSON` type, when used with the SQLAlchemy ORM, does not + detect in-place mutations to the structure. In order to detect these, the + :mod:`sqlalchemy.ext.mutable` extension must be used. This extension will + allow "in-place" changes to the datastructure to produce events which + will be detected by the unit of work. See the example at :class:`.HSTORE` + for a simple example involving a dictionary. + + Custom serializers and deserializers are specified at the dialect level, + that is using :func:`.create_engine`. The reason for this is that when + using psycopg2, the DBAPI only allows serializers at the per-cursor + or per-connection level. E.g.:: + + engine = create_engine("postgresql://scott:tiger@localhost/test", + json_serializer=my_serialize_fn, + json_deserializer=my_deserialize_fn + ) + + When using the psycopg2 dialect, the json_deserializer is registered + against the database using ``psycopg2.extras.register_default_json``. + + .. versionadded:: 0.9.7 + + """ + + __visit_name__ = 'JSONB' + hashable = False + + class comparator_factory(sqltypes.Concatenable.Comparator): + """Define comparison operations for :class:`.JSON`.""" + + def __getitem__(self, other): + """Get the value at a given key.""" + + return JSONElement(self.expr, other) + + def _adapt_expression(self, op, other_comparator): + # How does one do equality?? jsonb also has "=" eg. + # '[1,2,3]'::jsonb = '[1,2,3]'::jsonb + if isinstance(op, custom_op): + if op.opstring in ['?', '?&', '?|', '@>', '<@']: + return op, sqltypes.Boolean + if op.opstring == '->': + return op, sqltypes.Text + return sqltypes.Concatenable.Comparator.\ + _adapt_expression(self, op, other_comparator) + + def has_key(self, other): + """Boolean expression. Test for presence of a key. Note that the + key may be a SQLA expression. + """ + return self.expr.op('?')(other) + + def has_all(self, other): + """Boolean expression. Test for presence of all keys in jsonb + """ + return self.expr.op('?&')(other) + + def has_any(self, other): + """Boolean expression. Test for presence of any key in jsonb + """ + return self.expr.op('?|')(other) + + def contains(self, other, **kwargs): + """Boolean expression. Test if keys (or array) are a superset of/contained + the keys of the argument jsonb expression. + """ + return self.expr.op('@>')(other) + + def contained_by(self, other): + """Boolean expression. Test if keys are a proper subset of the + keys of the argument jsonb expression. + """ + return self.expr.op('<@')(other) + +ischema_names['jsonb'] = JSONB diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/pg8000.py b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/pg8000.py new file mode 100644 index 0000000..68e8e02 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/pg8000.py @@ -0,0 +1,264 @@ +# postgresql/pg8000.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +.. dialect:: postgresql+pg8000 + :name: pg8000 + :dbapi: pg8000 + :connectstring: \ +postgresql+pg8000://user:password@host:port/dbname[?key=value&key=value...] + :url: https://pythonhosted.org/pg8000/ + + +.. _pg8000_unicode: + +Unicode +------- + +pg8000 will encode / decode string values between it and the server using the +PostgreSQL ``client_encoding`` parameter; by default this is the value in +the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``. +Typically, this can be changed to ``utf-8``, as a more useful default:: + + #client_encoding = sql_ascii # actually, defaults to database + # encoding + client_encoding = utf8 + +The ``client_encoding`` can be overriden for a session by executing the SQL: + +SET CLIENT_ENCODING TO 'utf8'; + +SQLAlchemy will execute this SQL on all new connections based on the value +passed to :func:`.create_engine` using the ``client_encoding`` parameter:: + + engine = create_engine( + "postgresql+pg8000://user:pass@host/dbname", client_encoding='utf8') + + +.. _pg8000_isolation_level: + +pg8000 Transaction Isolation Level +------------------------------------- + +The pg8000 dialect offers the same isolation level settings as that +of the :ref:`psycopg2 ` dialect: + +* ``READ COMMITTED`` +* ``READ UNCOMMITTED`` +* ``REPEATABLE READ`` +* ``SERIALIZABLE`` +* ``AUTOCOMMIT`` + +.. versionadded:: 0.9.5 support for AUTOCOMMIT isolation level when using + pg8000. + +.. seealso:: + + :ref:`postgresql_isolation_level` + + :ref:`psycopg2_isolation_level` + + +""" +from ... import util, exc +import decimal +from ... import processors +from ... import types as sqltypes +from .base import ( + PGDialect, PGCompiler, PGIdentifierPreparer, PGExecutionContext, + _DECIMAL_TYPES, _FLOAT_TYPES, _INT_TYPES) +import re +from sqlalchemy.dialects.postgresql.json import JSON + + +class _PGNumeric(sqltypes.Numeric): + def result_processor(self, dialect, coltype): + if self.asdecimal: + if coltype in _FLOAT_TYPES: + return processors.to_decimal_processor_factory( + decimal.Decimal, self._effective_decimal_return_scale) + elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES: + # pg8000 returns Decimal natively for 1700 + return None + else: + raise exc.InvalidRequestError( + "Unknown PG numeric type: %d" % coltype) + else: + if coltype in _FLOAT_TYPES: + # pg8000 returns float natively for 701 + return None + elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES: + return processors.to_float + else: + raise exc.InvalidRequestError( + "Unknown PG numeric type: %d" % coltype) + + +class _PGNumericNoBind(_PGNumeric): + def bind_processor(self, dialect): + return None + + +class _PGJSON(JSON): + + def result_processor(self, dialect, coltype): + if dialect._dbapi_version > (1, 10, 1): + return None # Has native JSON + else: + return super(_PGJSON, self).result_processor(dialect, coltype) + + +class PGExecutionContext_pg8000(PGExecutionContext): + pass + + +class PGCompiler_pg8000(PGCompiler): + def visit_mod_binary(self, binary, operator, **kw): + return self.process(binary.left, **kw) + " %% " + \ + self.process(binary.right, **kw) + + def post_process_text(self, text): + if '%%' in text: + util.warn("The SQLAlchemy postgresql dialect " + "now automatically escapes '%' in text() " + "expressions to '%%'.") + return text.replace('%', '%%') + + +class PGIdentifierPreparer_pg8000(PGIdentifierPreparer): + def _escape_identifier(self, value): + value = value.replace(self.escape_quote, self.escape_to_quote) + return value.replace('%', '%%') + + +class PGDialect_pg8000(PGDialect): + driver = 'pg8000' + + supports_unicode_statements = True + + supports_unicode_binds = True + + default_paramstyle = 'format' + supports_sane_multi_rowcount = True + execution_ctx_cls = PGExecutionContext_pg8000 + statement_compiler = PGCompiler_pg8000 + preparer = PGIdentifierPreparer_pg8000 + description_encoding = 'use_encoding' + + colspecs = util.update_copy( + PGDialect.colspecs, + { + sqltypes.Numeric: _PGNumericNoBind, + sqltypes.Float: _PGNumeric, + JSON: _PGJSON, + } + ) + + def __init__(self, client_encoding=None, **kwargs): + PGDialect.__init__(self, **kwargs) + self.client_encoding = client_encoding + + def initialize(self, connection): + self.supports_sane_multi_rowcount = self._dbapi_version >= (1, 9, 14) + super(PGDialect_pg8000, self).initialize(connection) + + @util.memoized_property + def _dbapi_version(self): + if self.dbapi and hasattr(self.dbapi, '__version__'): + return tuple( + [ + int(x) for x in re.findall( + r'(\d+)(?:[-\.]?|$)', self.dbapi.__version__)]) + else: + return (99, 99, 99) + + @classmethod + def dbapi(cls): + return __import__('pg8000') + + def create_connect_args(self, url): + opts = url.translate_connect_args(username='user') + if 'port' in opts: + opts['port'] = int(opts['port']) + opts.update(url.query) + return ([], opts) + + def is_disconnect(self, e, connection, cursor): + return "connection is closed" in str(e) + + def set_isolation_level(self, connection, level): + level = level.replace('_', ' ') + + # adjust for ConnectionFairy possibly being present + if hasattr(connection, 'connection'): + connection = connection.connection + + if level == 'AUTOCOMMIT': + connection.autocommit = True + elif level in self._isolation_lookup: + connection.autocommit = False + cursor = connection.cursor() + cursor.execute( + "SET SESSION CHARACTERISTICS AS TRANSACTION " + "ISOLATION LEVEL %s" % level) + cursor.execute("COMMIT") + cursor.close() + else: + raise exc.ArgumentError( + "Invalid value '%s' for isolation_level. " + "Valid isolation levels for %s are %s or AUTOCOMMIT" % + (level, self.name, ", ".join(self._isolation_lookup)) + ) + + def set_client_encoding(self, connection, client_encoding): + # adjust for ConnectionFairy possibly being present + if hasattr(connection, 'connection'): + connection = connection.connection + + cursor = connection.cursor() + cursor.execute("SET CLIENT_ENCODING TO '" + client_encoding + "'") + cursor.execute("COMMIT") + cursor.close() + + def do_begin_twophase(self, connection, xid): + connection.connection.tpc_begin((0, xid, '')) + + def do_prepare_twophase(self, connection, xid): + connection.connection.tpc_prepare() + + def do_rollback_twophase( + self, connection, xid, is_prepared=True, recover=False): + connection.connection.tpc_rollback((0, xid, '')) + + def do_commit_twophase( + self, connection, xid, is_prepared=True, recover=False): + connection.connection.tpc_commit((0, xid, '')) + + def do_recover_twophase(self, connection): + return [row[1] for row in connection.connection.tpc_recover()] + + def on_connect(self): + fns = [] + if self.client_encoding is not None: + def on_connect(conn): + self.set_client_encoding(conn, self.client_encoding) + fns.append(on_connect) + + if self.isolation_level is not None: + def on_connect(conn): + self.set_isolation_level(conn, self.isolation_level) + fns.append(on_connect) + + if len(fns) > 0: + def on_connect(conn): + for fn in fns: + fn(conn) + return on_connect + else: + return None + +dialect = PGDialect_pg8000 diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/psycopg2.py new file mode 100644 index 0000000..a0f0cca --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/psycopg2.py @@ -0,0 +1,726 @@ +# postgresql/psycopg2.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +.. dialect:: postgresql+psycopg2 + :name: psycopg2 + :dbapi: psycopg2 + :connectstring: postgresql+psycopg2://user:password@host:port/dbname\ +[?key=value&key=value...] + :url: http://pypi.python.org/pypi/psycopg2/ + +psycopg2 Connect Arguments +----------------------------------- + +psycopg2-specific keyword arguments which are accepted by +:func:`.create_engine()` are: + +* ``server_side_cursors``: Enable the usage of "server side cursors" for SQL + statements which support this feature. What this essentially means from a + psycopg2 point of view is that the cursor is created using a name, e.g. + ``connection.cursor('some name')``, which has the effect that result rows + are not immediately pre-fetched and buffered after statement execution, but + are instead left on the server and only retrieved as needed. SQLAlchemy's + :class:`~sqlalchemy.engine.ResultProxy` uses special row-buffering + behavior when this feature is enabled, such that groups of 100 rows at a + time are fetched over the wire to reduce conversational overhead. + Note that the ``stream_results=True`` execution option is a more targeted + way of enabling this mode on a per-execution basis. +* ``use_native_unicode``: Enable the usage of Psycopg2 "native unicode" mode + per connection. True by default. + + .. seealso:: + + :ref:`psycopg2_disable_native_unicode` + +* ``isolation_level``: This option, available for all PostgreSQL dialects, + includes the ``AUTOCOMMIT`` isolation level when using the psycopg2 + dialect. + + .. seealso:: + + :ref:`psycopg2_isolation_level` + +* ``client_encoding``: sets the client encoding in a libpq-agnostic way, + using psycopg2's ``set_client_encoding()`` method. + + .. seealso:: + + :ref:`psycopg2_unicode` + +Unix Domain Connections +------------------------ + +psycopg2 supports connecting via Unix domain connections. When the ``host`` +portion of the URL is omitted, SQLAlchemy passes ``None`` to psycopg2, +which specifies Unix-domain communication rather than TCP/IP communication:: + + create_engine("postgresql+psycopg2://user:password@/dbname") + +By default, the socket file used is to connect to a Unix-domain socket +in ``/tmp``, or whatever socket directory was specified when PostgreSQL +was built. This value can be overridden by passing a pathname to psycopg2, +using ``host`` as an additional keyword argument:: + + create_engine("postgresql+psycopg2://user:password@/dbname?\ +host=/var/lib/postgresql") + +See also: + +`PQconnectdbParams `_ + +.. _psycopg2_execution_options: + +Per-Statement/Connection Execution Options +------------------------------------------- + +The following DBAPI-specific options are respected when used with +:meth:`.Connection.execution_options`, :meth:`.Executable.execution_options`, +:meth:`.Query.execution_options`, in addition to those not specific to DBAPIs: + +* ``isolation_level`` - Set the transaction isolation level for the lifespan of a + :class:`.Connection` (can only be set on a connection, not a statement + or query). See :ref:`psycopg2_isolation_level`. + +* ``stream_results`` - Enable or disable usage of psycopg2 server side cursors - + this feature makes use of "named" cursors in combination with special + result handling methods so that result rows are not fully buffered. + If ``None`` or not set, the ``server_side_cursors`` option of the + :class:`.Engine` is used. + +* ``max_row_buffer`` - when using ``stream_results``, an integer value that + specifies the maximum number of rows to buffer at a time. This is + interpreted by the :class:`.BufferedRowResultProxy`, and if omitted the + buffer will grow to ultimately store 1000 rows at a time. + + .. versionadded:: 1.0.6 + +.. _psycopg2_unicode: + +Unicode with Psycopg2 +---------------------- + +By default, the psycopg2 driver uses the ``psycopg2.extensions.UNICODE`` +extension, such that the DBAPI receives and returns all strings as Python +Unicode objects directly - SQLAlchemy passes these values through without +change. Psycopg2 here will encode/decode string values based on the +current "client encoding" setting; by default this is the value in +the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``. +Typically, this can be changed to ``utf8``, as a more useful default:: + + # postgresql.conf file + + # client_encoding = sql_ascii # actually, defaults to database + # encoding + client_encoding = utf8 + +A second way to affect the client encoding is to set it within Psycopg2 +locally. SQLAlchemy will call psycopg2's +:meth:`psycopg2:connection.set_client_encoding` method +on all new connections based on the value passed to +:func:`.create_engine` using the ``client_encoding`` parameter:: + + # set_client_encoding() setting; + # works for *all* Postgresql versions + engine = create_engine("postgresql://user:pass@host/dbname", + client_encoding='utf8') + +This overrides the encoding specified in the Postgresql client configuration. +When using the parameter in this way, the psycopg2 driver emits +``SET client_encoding TO 'utf8'`` on the connection explicitly, and works +in all Postgresql versions. + +Note that the ``client_encoding`` setting as passed to :func:`.create_engine` +is **not the same** as the more recently added ``client_encoding`` parameter +now supported by libpq directly. This is enabled when ``client_encoding`` +is passed directly to ``psycopg2.connect()``, and from SQLAlchemy is passed +using the :paramref:`.create_engine.connect_args` parameter:: + + # libpq direct parameter setting; + # only works for Postgresql **9.1 and above** + engine = create_engine("postgresql://user:pass@host/dbname", + connect_args={'client_encoding': 'utf8'}) + + # using the query string is equivalent + engine = create_engine("postgresql://user:pass@host/dbname?client_encoding=utf8") + +The above parameter was only added to libpq as of version 9.1 of Postgresql, +so using the previous method is better for cross-version support. + +.. _psycopg2_disable_native_unicode: + +Disabling Native Unicode +^^^^^^^^^^^^^^^^^^^^^^^^ + +SQLAlchemy can also be instructed to skip the usage of the psycopg2 +``UNICODE`` extension and to instead utilize its own unicode encode/decode +services, which are normally reserved only for those DBAPIs that don't +fully support unicode directly. Passing ``use_native_unicode=False`` to +:func:`.create_engine` will disable usage of ``psycopg2.extensions.UNICODE``. +SQLAlchemy will instead encode data itself into Python bytestrings on the way +in and coerce from bytes on the way back, +using the value of the :func:`.create_engine` ``encoding`` parameter, which +defaults to ``utf-8``. +SQLAlchemy's own unicode encode/decode functionality is steadily becoming +obsolete as most DBAPIs now support unicode fully. + +Bound Parameter Styles +---------------------- + +The default parameter style for the psycopg2 dialect is "pyformat", where +SQL is rendered using ``%(paramname)s`` style. This format has the limitation +that it does not accommodate the unusual case of parameter names that +actually contain percent or parenthesis symbols; as SQLAlchemy in many cases +generates bound parameter names based on the name of a column, the presence +of these characters in a column name can lead to problems. + +There are two solutions to the issue of a :class:`.schema.Column` that contains +one of these characters in its name. One is to specify the +:paramref:`.schema.Column.key` for columns that have such names:: + + measurement = Table('measurement', metadata, + Column('Size (meters)', Integer, key='size_meters') + ) + +Above, an INSERT statement such as ``measurement.insert()`` will use +``size_meters`` as the parameter name, and a SQL expression such as +``measurement.c.size_meters > 10`` will derive the bound parameter name +from the ``size_meters`` key as well. + +.. versionchanged:: 1.0.0 - SQL expressions will use :attr:`.Column.key` + as the source of naming when anonymous bound parameters are created + in SQL expressions; previously, this behavior only applied to + :meth:`.Table.insert` and :meth:`.Table.update` parameter names. + +The other solution is to use a positional format; psycopg2 allows use of the +"format" paramstyle, which can be passed to +:paramref:`.create_engine.paramstyle`:: + + engine = create_engine( + 'postgresql://scott:tiger@localhost:5432/test', paramstyle='format') + +With the above engine, instead of a statement like:: + + INSERT INTO measurement ("Size (meters)") VALUES (%(Size (meters))s) + {'Size (meters)': 1} + +we instead see:: + + INSERT INTO measurement ("Size (meters)") VALUES (%s) + (1, ) + +Where above, the dictionary style is converted into a tuple with positional +style. + + +Transactions +------------ + +The psycopg2 dialect fully supports SAVEPOINT and two-phase commit operations. + +.. _psycopg2_isolation_level: + +Psycopg2 Transaction Isolation Level +------------------------------------- + +As discussed in :ref:`postgresql_isolation_level`, +all Postgresql dialects support setting of transaction isolation level +both via the ``isolation_level`` parameter passed to :func:`.create_engine`, +as well as the ``isolation_level`` argument used by +:meth:`.Connection.execution_options`. When using the psycopg2 dialect, these +options make use of psycopg2's ``set_isolation_level()`` connection method, +rather than emitting a Postgresql directive; this is because psycopg2's +API-level setting is always emitted at the start of each transaction in any +case. + +The psycopg2 dialect supports these constants for isolation level: + +* ``READ COMMITTED`` +* ``READ UNCOMMITTED`` +* ``REPEATABLE READ`` +* ``SERIALIZABLE`` +* ``AUTOCOMMIT`` + +.. versionadded:: 0.8.2 support for AUTOCOMMIT isolation level when using + psycopg2. + +.. seealso:: + + :ref:`postgresql_isolation_level` + + :ref:`pg8000_isolation_level` + + +NOTICE logging +--------------- + +The psycopg2 dialect will log Postgresql NOTICE messages via the +``sqlalchemy.dialects.postgresql`` logger:: + + import logging + logging.getLogger('sqlalchemy.dialects.postgresql').setLevel(logging.INFO) + +.. _psycopg2_hstore:: + +HSTORE type +------------ + +The ``psycopg2`` DBAPI includes an extension to natively handle marshalling of +the HSTORE type. The SQLAlchemy psycopg2 dialect will enable this extension +by default when psycopg2 version 2.4 or greater is used, and +it is detected that the target database has the HSTORE type set up for use. +In other words, when the dialect makes the first +connection, a sequence like the following is performed: + +1. Request the available HSTORE oids using + ``psycopg2.extras.HstoreAdapter.get_oids()``. + If this function returns a list of HSTORE identifiers, we then determine + that the ``HSTORE`` extension is present. + This function is **skipped** if the version of psycopg2 installed is + less than version 2.4. + +2. If the ``use_native_hstore`` flag is at its default of ``True``, and + we've detected that ``HSTORE`` oids are available, the + ``psycopg2.extensions.register_hstore()`` extension is invoked for all + connections. + +The ``register_hstore()`` extension has the effect of **all Python +dictionaries being accepted as parameters regardless of the type of target +column in SQL**. The dictionaries are converted by this extension into a +textual HSTORE expression. If this behavior is not desired, disable the +use of the hstore extension by setting ``use_native_hstore`` to ``False`` as +follows:: + + engine = create_engine("postgresql+psycopg2://scott:tiger@localhost/test", + use_native_hstore=False) + +The ``HSTORE`` type is **still supported** when the +``psycopg2.extensions.register_hstore()`` extension is not used. It merely +means that the coercion between Python dictionaries and the HSTORE +string format, on both the parameter side and the result side, will take +place within SQLAlchemy's own marshalling logic, and not that of ``psycopg2`` +which may be more performant. + +""" +from __future__ import absolute_import + +import re +import logging + +from ... import util, exc +import decimal +from ... import processors +from ...engine import result as _result +from ...sql import expression +from ... import types as sqltypes +from .base import PGDialect, PGCompiler, \ + PGIdentifierPreparer, PGExecutionContext, \ + ENUM, ARRAY, _DECIMAL_TYPES, _FLOAT_TYPES,\ + _INT_TYPES, UUID +from .hstore import HSTORE +from .json import JSON, JSONB + +try: + from uuid import UUID as _python_UUID +except ImportError: + _python_UUID = None + + +logger = logging.getLogger('sqlalchemy.dialects.postgresql') + + +class _PGNumeric(sqltypes.Numeric): + def bind_processor(self, dialect): + return None + + def result_processor(self, dialect, coltype): + if self.asdecimal: + if coltype in _FLOAT_TYPES: + return processors.to_decimal_processor_factory( + decimal.Decimal, + self._effective_decimal_return_scale) + elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES: + # pg8000 returns Decimal natively for 1700 + return None + else: + raise exc.InvalidRequestError( + "Unknown PG numeric type: %d" % coltype) + else: + if coltype in _FLOAT_TYPES: + # pg8000 returns float natively for 701 + return None + elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES: + return processors.to_float + else: + raise exc.InvalidRequestError( + "Unknown PG numeric type: %d" % coltype) + + +class _PGEnum(ENUM): + def result_processor(self, dialect, coltype): + if self.native_enum and util.py2k and self.convert_unicode is True: + # we can't easily use PG's extensions here because + # the OID is on the fly, and we need to give it a python + # function anyway - not really worth it. + self.convert_unicode = "force_nocheck" + return super(_PGEnum, self).result_processor(dialect, coltype) + + +class _PGHStore(HSTORE): + def bind_processor(self, dialect): + if dialect._has_native_hstore: + return None + else: + return super(_PGHStore, self).bind_processor(dialect) + + def result_processor(self, dialect, coltype): + if dialect._has_native_hstore: + return None + else: + return super(_PGHStore, self).result_processor(dialect, coltype) + + +class _PGJSON(JSON): + + def result_processor(self, dialect, coltype): + if dialect._has_native_json: + return None + else: + return super(_PGJSON, self).result_processor(dialect, coltype) + + +class _PGJSONB(JSONB): + + def result_processor(self, dialect, coltype): + if dialect._has_native_jsonb: + return None + else: + return super(_PGJSONB, self).result_processor(dialect, coltype) + + +class _PGUUID(UUID): + def bind_processor(self, dialect): + if not self.as_uuid and dialect.use_native_uuid: + nonetype = type(None) + + def process(value): + if value is not None: + value = _python_UUID(value) + return value + return process + + def result_processor(self, dialect, coltype): + if not self.as_uuid and dialect.use_native_uuid: + def process(value): + if value is not None: + value = str(value) + return value + return process + +# When we're handed literal SQL, ensure it's a SELECT query. Since +# 8.3, combining cursors and "FOR UPDATE" has been fine. +SERVER_SIDE_CURSOR_RE = re.compile( + r'\s*SELECT', + re.I | re.UNICODE) + +_server_side_id = util.counter() + + +class PGExecutionContext_psycopg2(PGExecutionContext): + def create_cursor(self): + # TODO: coverage for server side cursors + select.for_update() + + if self.dialect.server_side_cursors: + is_server_side = \ + self.execution_options.get('stream_results', True) and ( + (self.compiled and isinstance(self.compiled.statement, + expression.Selectable) + or + ( + (not self.compiled or + isinstance(self.compiled.statement, + expression.TextClause)) + and self.statement and SERVER_SIDE_CURSOR_RE.match( + self.statement)) + ) + ) + else: + is_server_side = \ + self.execution_options.get('stream_results', False) + + self.__is_server_side = is_server_side + if is_server_side: + # use server-side cursors: + # http://lists.initd.org/pipermail/psycopg/2007-January/005251.html + ident = "c_%s_%s" % (hex(id(self))[2:], + hex(_server_side_id())[2:]) + return self._dbapi_connection.cursor(ident) + else: + return self._dbapi_connection.cursor() + + def get_result_proxy(self): + # TODO: ouch + if logger.isEnabledFor(logging.INFO): + self._log_notices(self.cursor) + + if self.__is_server_side: + return _result.BufferedRowResultProxy(self) + else: + return _result.ResultProxy(self) + + def _log_notices(self, cursor): + for notice in cursor.connection.notices: + # NOTICE messages have a + # newline character at the end + logger.info(notice.rstrip()) + + cursor.connection.notices[:] = [] + + +class PGCompiler_psycopg2(PGCompiler): + def visit_mod_binary(self, binary, operator, **kw): + return self.process(binary.left, **kw) + " %% " + \ + self.process(binary.right, **kw) + + def post_process_text(self, text): + return text.replace('%', '%%') + + +class PGIdentifierPreparer_psycopg2(PGIdentifierPreparer): + def _escape_identifier(self, value): + value = value.replace(self.escape_quote, self.escape_to_quote) + return value.replace('%', '%%') + + +class PGDialect_psycopg2(PGDialect): + driver = 'psycopg2' + if util.py2k: + supports_unicode_statements = False + + default_paramstyle = 'pyformat' + # set to true based on psycopg2 version + supports_sane_multi_rowcount = False + execution_ctx_cls = PGExecutionContext_psycopg2 + statement_compiler = PGCompiler_psycopg2 + preparer = PGIdentifierPreparer_psycopg2 + psycopg2_version = (0, 0) + + FEATURE_VERSION_MAP = dict( + native_json=(2, 5), + native_jsonb=(2, 5, 4), + sane_multi_rowcount=(2, 0, 9), + array_oid=(2, 4, 3), + hstore_adapter=(2, 4) + ) + + _has_native_hstore = False + _has_native_json = False + _has_native_jsonb = False + + engine_config_types = PGDialect.engine_config_types.union([ + ('use_native_unicode', util.asbool), + ]) + + colspecs = util.update_copy( + PGDialect.colspecs, + { + sqltypes.Numeric: _PGNumeric, + ENUM: _PGEnum, # needs force_unicode + sqltypes.Enum: _PGEnum, # needs force_unicode + HSTORE: _PGHStore, + JSON: _PGJSON, + JSONB: _PGJSONB, + UUID: _PGUUID + } + ) + + def __init__(self, server_side_cursors=False, use_native_unicode=True, + client_encoding=None, + use_native_hstore=True, use_native_uuid=True, + **kwargs): + PGDialect.__init__(self, **kwargs) + self.server_side_cursors = server_side_cursors + self.use_native_unicode = use_native_unicode + self.use_native_hstore = use_native_hstore + self.use_native_uuid = use_native_uuid + self.supports_unicode_binds = use_native_unicode + self.client_encoding = client_encoding + if self.dbapi and hasattr(self.dbapi, '__version__'): + m = re.match(r'(\d+)\.(\d+)(?:\.(\d+))?', + self.dbapi.__version__) + if m: + self.psycopg2_version = tuple( + int(x) + for x in m.group(1, 2, 3) + if x is not None) + + def initialize(self, connection): + super(PGDialect_psycopg2, self).initialize(connection) + self._has_native_hstore = self.use_native_hstore and \ + self._hstore_oids(connection.connection) \ + is not None + self._has_native_json = \ + self.psycopg2_version >= self.FEATURE_VERSION_MAP['native_json'] + self._has_native_jsonb = \ + self.psycopg2_version >= self.FEATURE_VERSION_MAP['native_jsonb'] + + # http://initd.org/psycopg/docs/news.html#what-s-new-in-psycopg-2-0-9 + self.supports_sane_multi_rowcount = \ + self.psycopg2_version >= \ + self.FEATURE_VERSION_MAP['sane_multi_rowcount'] + + @classmethod + def dbapi(cls): + import psycopg2 + return psycopg2 + + @classmethod + def _psycopg2_extensions(cls): + from psycopg2 import extensions + return extensions + + @classmethod + def _psycopg2_extras(cls): + from psycopg2 import extras + return extras + + @util.memoized_property + def _isolation_lookup(self): + extensions = self._psycopg2_extensions() + return { + 'AUTOCOMMIT': extensions.ISOLATION_LEVEL_AUTOCOMMIT, + 'READ COMMITTED': extensions.ISOLATION_LEVEL_READ_COMMITTED, + 'READ UNCOMMITTED': extensions.ISOLATION_LEVEL_READ_UNCOMMITTED, + 'REPEATABLE READ': extensions.ISOLATION_LEVEL_REPEATABLE_READ, + 'SERIALIZABLE': extensions.ISOLATION_LEVEL_SERIALIZABLE + } + + def set_isolation_level(self, connection, level): + try: + level = self._isolation_lookup[level.replace('_', ' ')] + except KeyError: + raise exc.ArgumentError( + "Invalid value '%s' for isolation_level. " + "Valid isolation levels for %s are %s" % + (level, self.name, ", ".join(self._isolation_lookup)) + ) + + connection.set_isolation_level(level) + + def on_connect(self): + extras = self._psycopg2_extras() + extensions = self._psycopg2_extensions() + + fns = [] + if self.client_encoding is not None: + def on_connect(conn): + conn.set_client_encoding(self.client_encoding) + fns.append(on_connect) + + if self.isolation_level is not None: + def on_connect(conn): + self.set_isolation_level(conn, self.isolation_level) + fns.append(on_connect) + + if self.dbapi and self.use_native_uuid: + def on_connect(conn): + extras.register_uuid(None, conn) + fns.append(on_connect) + + if self.dbapi and self.use_native_unicode: + def on_connect(conn): + extensions.register_type(extensions.UNICODE, conn) + extensions.register_type(extensions.UNICODEARRAY, conn) + fns.append(on_connect) + + if self.dbapi and self.use_native_hstore: + def on_connect(conn): + hstore_oids = self._hstore_oids(conn) + if hstore_oids is not None: + oid, array_oid = hstore_oids + kw = {'oid': oid} + if util.py2k: + kw['unicode'] = True + if self.psycopg2_version >= \ + self.FEATURE_VERSION_MAP['array_oid']: + kw['array_oid'] = array_oid + extras.register_hstore(conn, **kw) + fns.append(on_connect) + + if self.dbapi and self._json_deserializer: + def on_connect(conn): + if self._has_native_json: + extras.register_default_json( + conn, loads=self._json_deserializer) + if self._has_native_jsonb: + extras.register_default_jsonb( + conn, loads=self._json_deserializer) + fns.append(on_connect) + + if fns: + def on_connect(conn): + for fn in fns: + fn(conn) + return on_connect + else: + return None + + @util.memoized_instancemethod + def _hstore_oids(self, conn): + if self.psycopg2_version >= self.FEATURE_VERSION_MAP['hstore_adapter']: + extras = self._psycopg2_extras() + oids = extras.HstoreAdapter.get_oids(conn) + if oids is not None and oids[0]: + return oids[0:2] + return None + + def create_connect_args(self, url): + opts = url.translate_connect_args(username='user') + if 'port' in opts: + opts['port'] = int(opts['port']) + opts.update(url.query) + return ([], opts) + + def is_disconnect(self, e, connection, cursor): + if isinstance(e, self.dbapi.Error): + # check the "closed" flag. this might not be + # present on old psycopg2 versions. Also, + # this flag doesn't actually help in a lot of disconnect + # situations, so don't rely on it. + if getattr(connection, 'closed', False): + return True + + # checks based on strings. in the case that .closed + # didn't cut it, fall back onto these. + str_e = str(e).partition("\n")[0] + for msg in [ + # these error messages from libpq: interfaces/libpq/fe-misc.c + # and interfaces/libpq/fe-secure.c. + 'terminating connection', + 'closed the connection', + 'connection not open', + 'could not receive data from server', + 'could not send data to server', + # psycopg2 client errors, psycopg2/conenction.h, + # psycopg2/cursor.h + 'connection already closed', + 'cursor already closed', + # not sure where this path is originally from, it may + # be obsolete. It really says "losed", not "closed". + 'losed the connection unexpectedly', + # these can occur in newer SSL + 'connection has been closed unexpectedly', + 'SSL SYSCALL error: Bad file descriptor', + 'SSL SYSCALL error: EOF detected', + ]: + idx = str_e.find(msg) + if idx >= 0 and '"' not in str_e[:idx]: + return True + return False + +dialect = PGDialect_psycopg2 diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/psycopg2cffi.py b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/psycopg2cffi.py new file mode 100644 index 0000000..ab99a83 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/psycopg2cffi.py @@ -0,0 +1,61 @@ +# testing/engines.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +""" +.. dialect:: postgresql+psycopg2cffi + :name: psycopg2cffi + :dbapi: psycopg2cffi + :connectstring: \ +postgresql+psycopg2cffi://user:password@host:port/dbname\ +[?key=value&key=value...] + :url: http://pypi.python.org/pypi/psycopg2cffi/ + +``psycopg2cffi`` is an adaptation of ``psycopg2``, using CFFI for the C +layer. This makes it suitable for use in e.g. PyPy. Documentation +is as per ``psycopg2``. + +.. versionadded:: 1.0.0 + +.. seealso:: + + :mod:`sqlalchemy.dialects.postgresql.psycopg2` + +""" +from .psycopg2 import PGDialect_psycopg2 + + +class PGDialect_psycopg2cffi(PGDialect_psycopg2): + driver = 'psycopg2cffi' + supports_unicode_statements = True + + # psycopg2cffi's first release is 2.5.0, but reports + # __version__ as 2.4.4. Subsequent releases seem to have + # fixed this. + + FEATURE_VERSION_MAP = dict( + native_json=(2, 4, 4), + native_jsonb=(2, 7, 1), + sane_multi_rowcount=(2, 4, 4), + array_oid=(2, 4, 4), + hstore_adapter=(2, 4, 4) + ) + + @classmethod + def dbapi(cls): + return __import__('psycopg2cffi') + + @classmethod + def _psycopg2_extensions(cls): + root = __import__('psycopg2cffi', fromlist=['extensions']) + return root.extensions + + @classmethod + def _psycopg2_extras(cls): + root = __import__('psycopg2cffi', fromlist=['extras']) + return root.extras + + +dialect = PGDialect_psycopg2cffi diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/pypostgresql.py b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/pypostgresql.py new file mode 100644 index 0000000..f2b850a --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/pypostgresql.py @@ -0,0 +1,97 @@ +# postgresql/pypostgresql.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +.. dialect:: postgresql+pypostgresql + :name: py-postgresql + :dbapi: pypostgresql + :connectstring: postgresql+pypostgresql://user:password@host:port/dbname\ +[?key=value&key=value...] + :url: http://python.projects.pgfoundry.org/ + + +""" +from ... import util +from ... import types as sqltypes +from .base import PGDialect, PGExecutionContext +from ... import processors + + +class PGNumeric(sqltypes.Numeric): + def bind_processor(self, dialect): + return processors.to_str + + def result_processor(self, dialect, coltype): + if self.asdecimal: + return None + else: + return processors.to_float + + +class PGExecutionContext_pypostgresql(PGExecutionContext): + pass + + +class PGDialect_pypostgresql(PGDialect): + driver = 'pypostgresql' + + supports_unicode_statements = True + supports_unicode_binds = True + description_encoding = None + default_paramstyle = 'pyformat' + + # requires trunk version to support sane rowcounts + # TODO: use dbapi version information to set this flag appropriately + supports_sane_rowcount = True + supports_sane_multi_rowcount = False + + execution_ctx_cls = PGExecutionContext_pypostgresql + colspecs = util.update_copy( + PGDialect.colspecs, + { + sqltypes.Numeric: PGNumeric, + + # prevents PGNumeric from being used + sqltypes.Float: sqltypes.Float, + } + ) + + @classmethod + def dbapi(cls): + from postgresql.driver import dbapi20 + return dbapi20 + + _DBAPI_ERROR_NAMES = [ + "Error", + "InterfaceError", "DatabaseError", "DataError", + "OperationalError", "IntegrityError", "InternalError", + "ProgrammingError", "NotSupportedError" + ] + + @util.memoized_property + def dbapi_exception_translation_map(self): + if self.dbapi is None: + return {} + + return dict( + (getattr(self.dbapi, name).__name__, name) + for name in self._DBAPI_ERROR_NAMES + ) + + def create_connect_args(self, url): + opts = url.translate_connect_args(username='user') + if 'port' in opts: + opts['port'] = int(opts['port']) + else: + opts['port'] = 5432 + opts.update(url.query) + return ([], opts) + + def is_disconnect(self, e, connection, cursor): + return "connection is closed" in str(e) + +dialect = PGDialect_pypostgresql diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/ranges.py b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/ranges.py new file mode 100644 index 0000000..42a1cd4 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/ranges.py @@ -0,0 +1,168 @@ +# Copyright (C) 2013-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from .base import ischema_names +from ... import types as sqltypes + +__all__ = ('INT4RANGE', 'INT8RANGE', 'NUMRANGE') + + +class RangeOperators(object): + """ + This mixin provides functionality for the Range Operators + listed in Table 9-44 of the `postgres documentation`__ for Range + Functions and Operators. It is used by all the range types + provided in the ``postgres`` dialect and can likely be used for + any range types you create yourself. + + __ http://www.postgresql.org/docs/devel/static/functions-range.html + + No extra support is provided for the Range Functions listed in + Table 9-45 of the postgres documentation. For these, the normal + :func:`~sqlalchemy.sql.expression.func` object should be used. + + .. versionadded:: 0.8.2 Support for Postgresql RANGE operations. + + """ + + class comparator_factory(sqltypes.Concatenable.Comparator): + """Define comparison operations for range types.""" + + def __ne__(self, other): + "Boolean expression. Returns true if two ranges are not equal" + return self.expr.op('<>')(other) + + def contains(self, other, **kw): + """Boolean expression. Returns true if the right hand operand, + which can be an element or a range, is contained within the + column. + """ + return self.expr.op('@>')(other) + + def contained_by(self, other): + """Boolean expression. Returns true if the column is contained + within the right hand operand. + """ + return self.expr.op('<@')(other) + + def overlaps(self, other): + """Boolean expression. Returns true if the column overlaps + (has points in common with) the right hand operand. + """ + return self.expr.op('&&')(other) + + def strictly_left_of(self, other): + """Boolean expression. Returns true if the column is strictly + left of the right hand operand. + """ + return self.expr.op('<<')(other) + + __lshift__ = strictly_left_of + + def strictly_right_of(self, other): + """Boolean expression. Returns true if the column is strictly + right of the right hand operand. + """ + return self.expr.op('>>')(other) + + __rshift__ = strictly_right_of + + def not_extend_right_of(self, other): + """Boolean expression. Returns true if the range in the column + does not extend right of the range in the operand. + """ + return self.expr.op('&<')(other) + + def not_extend_left_of(self, other): + """Boolean expression. Returns true if the range in the column + does not extend left of the range in the operand. + """ + return self.expr.op('&>')(other) + + def adjacent_to(self, other): + """Boolean expression. Returns true if the range in the column + is adjacent to the range in the operand. + """ + return self.expr.op('-|-')(other) + + def __add__(self, other): + """Range expression. Returns the union of the two ranges. + Will raise an exception if the resulting range is not + contigous. + """ + return self.expr.op('+')(other) + + +class INT4RANGE(RangeOperators, sqltypes.TypeEngine): + """Represent the Postgresql INT4RANGE type. + + .. versionadded:: 0.8.2 + + """ + + __visit_name__ = 'INT4RANGE' + +ischema_names['int4range'] = INT4RANGE + + +class INT8RANGE(RangeOperators, sqltypes.TypeEngine): + """Represent the Postgresql INT8RANGE type. + + .. versionadded:: 0.8.2 + + """ + + __visit_name__ = 'INT8RANGE' + +ischema_names['int8range'] = INT8RANGE + + +class NUMRANGE(RangeOperators, sqltypes.TypeEngine): + """Represent the Postgresql NUMRANGE type. + + .. versionadded:: 0.8.2 + + """ + + __visit_name__ = 'NUMRANGE' + +ischema_names['numrange'] = NUMRANGE + + +class DATERANGE(RangeOperators, sqltypes.TypeEngine): + """Represent the Postgresql DATERANGE type. + + .. versionadded:: 0.8.2 + + """ + + __visit_name__ = 'DATERANGE' + +ischema_names['daterange'] = DATERANGE + + +class TSRANGE(RangeOperators, sqltypes.TypeEngine): + """Represent the Postgresql TSRANGE type. + + .. versionadded:: 0.8.2 + + """ + + __visit_name__ = 'TSRANGE' + +ischema_names['tsrange'] = TSRANGE + + +class TSTZRANGE(RangeOperators, sqltypes.TypeEngine): + """Represent the Postgresql TSTZRANGE type. + + .. versionadded:: 0.8.2 + + """ + + __visit_name__ = 'TSTZRANGE' + +ischema_names['tstzrange'] = TSTZRANGE diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/zxjdbc.py b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/zxjdbc.py new file mode 100644 index 0000000..cc46460 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/zxjdbc.py @@ -0,0 +1,46 @@ +# postgresql/zxjdbc.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +.. dialect:: postgresql+zxjdbc + :name: zxJDBC for Jython + :dbapi: zxjdbc + :connectstring: postgresql+zxjdbc://scott:tiger@localhost/db + :driverurl: http://jdbc.postgresql.org/ + + +""" +from ...connectors.zxJDBC import ZxJDBCConnector +from .base import PGDialect, PGExecutionContext + + +class PGExecutionContext_zxjdbc(PGExecutionContext): + + def create_cursor(self): + cursor = self._dbapi_connection.cursor() + cursor.datahandler = self.dialect.DataHandler(cursor.datahandler) + return cursor + + +class PGDialect_zxjdbc(ZxJDBCConnector, PGDialect): + jdbc_db_name = 'postgresql' + jdbc_driver_name = 'org.postgresql.Driver' + + execution_ctx_cls = PGExecutionContext_zxjdbc + + supports_native_decimal = True + + def __init__(self, *args, **kwargs): + super(PGDialect_zxjdbc, self).__init__(*args, **kwargs) + from com.ziclix.python.sql.handler import PostgresqlDataHandler + self.DataHandler = PostgresqlDataHandler + + def _get_server_version_info(self, connection): + parts = connection.connection.dbversion.split('.') + return tuple(int(x) for x in parts) + +dialect = PGDialect_zxjdbc diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/sqlite/__init__.py b/lib/python3.4/site-packages/sqlalchemy/dialects/sqlite/__init__.py new file mode 100644 index 0000000..a8dec30 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/sqlite/__init__.py @@ -0,0 +1,20 @@ +# sqlite/__init__.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from sqlalchemy.dialects.sqlite import base, pysqlite, pysqlcipher + +# default dialect +base.dialect = pysqlite.dialect + +from sqlalchemy.dialects.sqlite.base import ( + BLOB, BOOLEAN, CHAR, DATE, DATETIME, DECIMAL, FLOAT, INTEGER, REAL, + NUMERIC, SMALLINT, TEXT, TIME, TIMESTAMP, VARCHAR, dialect, +) + +__all__ = ('BLOB', 'BOOLEAN', 'CHAR', 'DATE', 'DATETIME', 'DECIMAL', + 'FLOAT', 'INTEGER', 'NUMERIC', 'SMALLINT', 'TEXT', 'TIME', + 'TIMESTAMP', 'VARCHAR', 'REAL', 'dialect') diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/sqlite/base.py b/lib/python3.4/site-packages/sqlalchemy/dialects/sqlite/base.py new file mode 100644 index 0000000..e623ff0 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/sqlite/base.py @@ -0,0 +1,1488 @@ +# sqlite/base.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +.. dialect:: sqlite + :name: SQLite + +.. _sqlite_datetime: + +Date and Time Types +------------------- + +SQLite does not have built-in DATE, TIME, or DATETIME types, and pysqlite does +not provide out of the box functionality for translating values between Python +`datetime` objects and a SQLite-supported format. SQLAlchemy's own +:class:`~sqlalchemy.types.DateTime` and related types provide date formatting +and parsing functionality when SQlite is used. The implementation classes are +:class:`~.sqlite.DATETIME`, :class:`~.sqlite.DATE` and :class:`~.sqlite.TIME`. +These types represent dates and times as ISO formatted strings, which also +nicely support ordering. There's no reliance on typical "libc" internals for +these functions so historical dates are fully supported. + +Ensuring Text affinity +^^^^^^^^^^^^^^^^^^^^^^ + +The DDL rendered for these types is the standard ``DATE``, ``TIME`` +and ``DATETIME`` indicators. However, custom storage formats can also be +applied to these types. When the +storage format is detected as containing no alpha characters, the DDL for +these types is rendered as ``DATE_CHAR``, ``TIME_CHAR``, and ``DATETIME_CHAR``, +so that the column continues to have textual affinity. + +.. seealso:: + + `Type Affinity `_ - in the SQLite documentation + +.. _sqlite_autoincrement: + +SQLite Auto Incrementing Behavior +---------------------------------- + +Background on SQLite's autoincrement is at: http://sqlite.org/autoinc.html + +Key concepts: + +* SQLite has an implicit "auto increment" feature that takes place for any + non-composite primary-key column that is specifically created using + "INTEGER PRIMARY KEY" for the type + primary key. + +* SQLite also has an explicit "AUTOINCREMENT" keyword, that is **not** + equivalent to the implicit autoincrement feature; this keyword is not + recommended for general use. SQLAlchemy does not render this keyword + unless a special SQLite-specific directive is used (see below). However, + it still requires that the column's type is named "INTEGER". + +Using the AUTOINCREMENT Keyword +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +To specifically render the AUTOINCREMENT keyword on the primary key column +when rendering DDL, add the flag ``sqlite_autoincrement=True`` to the Table +construct:: + + Table('sometable', metadata, + Column('id', Integer, primary_key=True), + sqlite_autoincrement=True) + +Allowing autoincrement behavior SQLAlchemy types other than Integer/INTEGER +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +SQLite's typing model is based on naming conventions. Among +other things, this means that any type name which contains the +substring ``"INT"`` will be determined to be of "integer affinity". A +type named ``"BIGINT"``, ``"SPECIAL_INT"`` or even ``"XYZINTQPR"``, will be considered by +SQLite to be of "integer" affinity. However, **the SQLite +autoincrement feature, whether implicitly or explicitly enabled, +requires that the name of the column's type +is exactly the string "INTEGER"**. Therefore, if an +application uses a type like :class:`.BigInteger` for a primary key, on +SQLite this type will need to be rendered as the name ``"INTEGER"`` when +emitting the initial ``CREATE TABLE`` statement in order for the autoincrement +behavior to be available. + +One approach to achieve this is to use :class:`.Integer` on SQLite +only using :meth:`.TypeEngine.with_variant`:: + + table = Table( + "my_table", metadata, + Column("id", BigInteger().with_variant(Integer, "sqlite"), primary_key=True) + ) + +Another is to use a subclass of :class:`.BigInteger` that overrides its DDL name +to be ``INTEGER`` when compiled against SQLite:: + + from sqlalchemy import BigInteger + from sqlalchemy.ext.compiler import compiles + + class SLBigInteger(BigInteger): + pass + + @compiles(SLBigInteger, 'sqlite') + def bi_c(element, compiler, **kw): + return "INTEGER" + + @compiles(SLBigInteger) + def bi_c(element, compiler, **kw): + return compiler.visit_BIGINT(element, **kw) + + + table = Table( + "my_table", metadata, + Column("id", SLBigInteger(), primary_key=True) + ) + +.. seealso:: + + :meth:`.TypeEngine.with_variant` + + :ref:`sqlalchemy.ext.compiler_toplevel` + + `Datatypes In SQLite Version 3 `_ + +.. _sqlite_concurrency: + +Database Locking Behavior / Concurrency +--------------------------------------- + +SQLite is not designed for a high level of write concurrency. The database +itself, being a file, is locked completely during write operations within +transactions, meaning exactly one "connection" (in reality a file handle) +has exclusive access to the database during this period - all other +"connections" will be blocked during this time. + +The Python DBAPI specification also calls for a connection model that is +always in a transaction; there is no ``connection.begin()`` method, +only ``connection.commit()`` and ``connection.rollback()``, upon which a +new transaction is to be begun immediately. This may seem to imply +that the SQLite driver would in theory allow only a single filehandle on a +particular database file at any time; however, there are several +factors both within SQlite itself as well as within the pysqlite driver +which loosen this restriction significantly. + +However, no matter what locking modes are used, SQLite will still always +lock the database file once a transaction is started and DML (e.g. INSERT, +UPDATE, DELETE) has at least been emitted, and this will block +other transactions at least at the point that they also attempt to emit DML. +By default, the length of time on this block is very short before it times out +with an error. + +This behavior becomes more critical when used in conjunction with the +SQLAlchemy ORM. SQLAlchemy's :class:`.Session` object by default runs +within a transaction, and with its autoflush model, may emit DML preceding +any SELECT statement. This may lead to a SQLite database that locks +more quickly than is expected. The locking mode of SQLite and the pysqlite +driver can be manipulated to some degree, however it should be noted that +achieving a high degree of write-concurrency with SQLite is a losing battle. + +For more information on SQLite's lack of write concurrency by design, please +see +`Situations Where Another RDBMS May Work Better - High Concurrency +`_ near the bottom of the page. + +The following subsections introduce areas that are impacted by SQLite's +file-based architecture and additionally will usually require workarounds to +work when using the pysqlite driver. + +.. _sqlite_isolation_level: + +Transaction Isolation Level +---------------------------- + +SQLite supports "transaction isolation" in a non-standard way, along two +axes. One is that of the `PRAGMA read_uncommitted `_ +instruction. This setting can essentially switch SQLite between its +default mode of ``SERIALIZABLE`` isolation, and a "dirty read" isolation +mode normally referred to as ``READ UNCOMMITTED``. + +SQLAlchemy ties into this PRAGMA statement using the +:paramref:`.create_engine.isolation_level` parameter of :func:`.create_engine`. +Valid values for this parameter when used with SQLite are ``"SERIALIZABLE"`` +and ``"READ UNCOMMITTED"`` corresponding to a value of 0 and 1, respectively. +SQLite defaults to ``SERIALIZABLE``, however its behavior is impacted by +the pysqlite driver's default behavior. + +The other axis along which SQLite's transactional locking is impacted is +via the nature of the ``BEGIN`` statement used. The three varieties +are "deferred", "immediate", and "exclusive", as described at +`BEGIN TRANSACTION `_. A straight +``BEGIN`` statement uses the "deferred" mode, where the the database file is +not locked until the first read or write operation, and read access remains +open to other transactions until the first write operation. But again, +it is critical to note that the pysqlite driver interferes with this behavior +by *not even emitting BEGIN* until the first write operation. + +.. warning:: + + SQLite's transactional scope is impacted by unresolved + issues in the pysqlite driver, which defers BEGIN statements to a greater + degree than is often feasible. See the section :ref:`pysqlite_serializable` + for techniques to work around this behavior. + +SAVEPOINT Support +---------------------------- + +SQLite supports SAVEPOINTs, which only function once a transaction is +begun. SQLAlchemy's SAVEPOINT support is available using the +:meth:`.Connection.begin_nested` method at the Core level, and +:meth:`.Session.begin_nested` at the ORM level. However, SAVEPOINTs +won't work at all with pysqlite unless workarounds are taken. + +.. warning:: + + SQLite's SAVEPOINT feature is impacted by unresolved + issues in the pysqlite driver, which defers BEGIN statements to a greater + degree than is often feasible. See the section :ref:`pysqlite_serializable` + for techniques to work around this behavior. + +Transactional DDL +---------------------------- + +The SQLite database supports transactional :term:`DDL` as well. +In this case, the pysqlite driver is not only failing to start transactions, +it also is ending any existing transction when DDL is detected, so again, +workarounds are required. + +.. warning:: + + SQLite's transactional DDL is impacted by unresolved issues + in the pysqlite driver, which fails to emit BEGIN and additionally + forces a COMMIT to cancel any transaction when DDL is encountered. + See the section :ref:`pysqlite_serializable` + for techniques to work around this behavior. + +.. _sqlite_foreign_keys: + +Foreign Key Support +------------------- + +SQLite supports FOREIGN KEY syntax when emitting CREATE statements for tables, +however by default these constraints have no effect on the operation of the +table. + +Constraint checking on SQLite has three prerequisites: + +* At least version 3.6.19 of SQLite must be in use +* The SQLite library must be compiled *without* the SQLITE_OMIT_FOREIGN_KEY + or SQLITE_OMIT_TRIGGER symbols enabled. +* The ``PRAGMA foreign_keys = ON`` statement must be emitted on all + connections before use. + +SQLAlchemy allows for the ``PRAGMA`` statement to be emitted automatically for +new connections through the usage of events:: + + from sqlalchemy.engine import Engine + from sqlalchemy import event + + @event.listens_for(Engine, "connect") + def set_sqlite_pragma(dbapi_connection, connection_record): + cursor = dbapi_connection.cursor() + cursor.execute("PRAGMA foreign_keys=ON") + cursor.close() + +.. warning:: + + When SQLite foreign keys are enabled, it is **not possible** + to emit CREATE or DROP statements for tables that contain + mutually-dependent foreign key constraints; + to emit the DDL for these tables requires that ALTER TABLE be used to + create or drop these constraints separately, for which SQLite has + no support. + +.. seealso:: + + `SQLite Foreign Key Support `_ + - on the SQLite web site. + + :ref:`event_toplevel` - SQLAlchemy event API. + + :ref:`use_alter` - more information on SQLAlchemy's facilities for handling + mutually-dependent foreign key constraints. + +.. _sqlite_type_reflection: + +Type Reflection +--------------- + +SQLite types are unlike those of most other database backends, in that +the string name of the type usually does not correspond to a "type" in a +one-to-one fashion. Instead, SQLite links per-column typing behavior +to one of five so-called "type affinities" based on a string matching +pattern for the type. + +SQLAlchemy's reflection process, when inspecting types, uses a simple +lookup table to link the keywords returned to provided SQLAlchemy types. +This lookup table is present within the SQLite dialect as it is for all +other dialects. However, the SQLite dialect has a different "fallback" +routine for when a particular type name is not located in the lookup map; +it instead implements the SQLite "type affinity" scheme located at +http://www.sqlite.org/datatype3.html section 2.1. + +The provided typemap will make direct associations from an exact string +name match for the following types: + +:class:`~.types.BIGINT`, :class:`~.types.BLOB`, +:class:`~.types.BOOLEAN`, :class:`~.types.BOOLEAN`, +:class:`~.types.CHAR`, :class:`~.types.DATE`, +:class:`~.types.DATETIME`, :class:`~.types.FLOAT`, +:class:`~.types.DECIMAL`, :class:`~.types.FLOAT`, +:class:`~.types.INTEGER`, :class:`~.types.INTEGER`, +:class:`~.types.NUMERIC`, :class:`~.types.REAL`, +:class:`~.types.SMALLINT`, :class:`~.types.TEXT`, +:class:`~.types.TIME`, :class:`~.types.TIMESTAMP`, +:class:`~.types.VARCHAR`, :class:`~.types.NVARCHAR`, +:class:`~.types.NCHAR` + +When a type name does not match one of the above types, the "type affinity" +lookup is used instead: + +* :class:`~.types.INTEGER` is returned if the type name includes the + string ``INT`` +* :class:`~.types.TEXT` is returned if the type name includes the + string ``CHAR``, ``CLOB`` or ``TEXT`` +* :class:`~.types.NullType` is returned if the type name includes the + string ``BLOB`` +* :class:`~.types.REAL` is returned if the type name includes the string + ``REAL``, ``FLOA`` or ``DOUB``. +* Otherwise, the :class:`~.types.NUMERIC` type is used. + +.. versionadded:: 0.9.3 Support for SQLite type affinity rules when reflecting + columns. + + +.. _sqlite_partial_index: + +Partial Indexes +--------------- + +A partial index, e.g. one which uses a WHERE clause, can be specified +with the DDL system using the argument ``sqlite_where``:: + + tbl = Table('testtbl', m, Column('data', Integer)) + idx = Index('test_idx1', tbl.c.data, + sqlite_where=and_(tbl.c.data > 5, tbl.c.data < 10)) + +The index will be rendered at create time as:: + + CREATE INDEX test_idx1 ON testtbl (data) + WHERE data > 5 AND data < 10 + +.. versionadded:: 0.9.9 + +Dotted Column Names +------------------- + +Using table or column names that explicitly have periods in them is +**not recommended**. While this is generally a bad idea for relational +databases in general, as the dot is a syntactically significant character, +the SQLite driver up until version **3.10.0** of SQLite has a bug which +requires that SQLAlchemy filter out these dots in result sets. + +.. note:: + + The following SQLite issue has been resolved as of version 3.10.0 + of SQLite. SQLAlchemy as of **1.1** automatically disables its internal + workarounds based on detection of this version. + +The bug, entirely outside of SQLAlchemy, can be illustrated thusly:: + + import sqlite3 + + conn = sqlite3.connect(":memory:") + cursor = conn.cursor() + + cursor.execute("create table x (a integer, b integer)") + cursor.execute("insert into x (a, b) values (1, 1)") + cursor.execute("insert into x (a, b) values (2, 2)") + + cursor.execute("select x.a, x.b from x") + assert [c[0] for c in cursor.description] == ['a', 'b'] + + cursor.execute(''' + select x.a, x.b from x where a=1 + union + select x.a, x.b from x where a=2 + ''') + assert [c[0] for c in cursor.description] == ['a', 'b'], \\ + [c[0] for c in cursor.description] + +The second assertion fails:: + + Traceback (most recent call last): + File "test.py", line 19, in + [c[0] for c in cursor.description] + AssertionError: ['x.a', 'x.b'] + +Where above, the driver incorrectly reports the names of the columns +including the name of the table, which is entirely inconsistent vs. +when the UNION is not present. + +SQLAlchemy relies upon column names being predictable in how they match +to the original statement, so the SQLAlchemy dialect has no choice but +to filter these out:: + + + from sqlalchemy import create_engine + + eng = create_engine("sqlite://") + conn = eng.connect() + + conn.execute("create table x (a integer, b integer)") + conn.execute("insert into x (a, b) values (1, 1)") + conn.execute("insert into x (a, b) values (2, 2)") + + result = conn.execute("select x.a, x.b from x") + assert result.keys() == ["a", "b"] + + result = conn.execute(''' + select x.a, x.b from x where a=1 + union + select x.a, x.b from x where a=2 + ''') + assert result.keys() == ["a", "b"] + +Note that above, even though SQLAlchemy filters out the dots, *both +names are still addressable*:: + + >>> row = result.first() + >>> row["a"] + 1 + >>> row["x.a"] + 1 + >>> row["b"] + 1 + >>> row["x.b"] + 1 + +Therefore, the workaround applied by SQLAlchemy only impacts +:meth:`.ResultProxy.keys` and :meth:`.RowProxy.keys()` in the public API. +In the very specific case where +an application is forced to use column names that contain dots, and the +functionality of :meth:`.ResultProxy.keys` and :meth:`.RowProxy.keys()` +is required to return these dotted names unmodified, the ``sqlite_raw_colnames`` +execution option may be provided, either on a per-:class:`.Connection` basis:: + + result = conn.execution_options(sqlite_raw_colnames=True).execute(''' + select x.a, x.b from x where a=1 + union + select x.a, x.b from x where a=2 + ''') + assert result.keys() == ["x.a", "x.b"] + +or on a per-:class:`.Engine` basis:: + + engine = create_engine("sqlite://", execution_options={"sqlite_raw_colnames": True}) + +When using the per-:class:`.Engine` execution option, note that +**Core and ORM queries that use UNION may not function properly**. + +""" + +import datetime +import re + +from ... import processors +from ... import sql, exc +from ... import types as sqltypes, schema as sa_schema +from ... import util +from ...engine import default, reflection +from ...sql import compiler + +from ...types import (BLOB, BOOLEAN, CHAR, DECIMAL, FLOAT, + INTEGER, REAL, NUMERIC, SMALLINT, TEXT, + TIMESTAMP, VARCHAR) + + +class _DateTimeMixin(object): + _reg = None + _storage_format = None + + def __init__(self, storage_format=None, regexp=None, **kw): + super(_DateTimeMixin, self).__init__(**kw) + if regexp is not None: + self._reg = re.compile(regexp) + if storage_format is not None: + self._storage_format = storage_format + + @property + def format_is_text_affinity(self): + """return True if the storage format will automatically imply + a TEXT affinity. + + If the storage format contains no non-numeric characters, + it will imply a NUMERIC storage format on SQLite; in this case, + the type will generate its DDL as DATE_CHAR, DATETIME_CHAR, + TIME_CHAR. + + .. versionadded:: 1.0.0 + + """ + spec = self._storage_format % { + "year": 0, "month": 0, "day": 0, "hour": 0, + "minute": 0, "second": 0, "microsecond": 0 + } + return bool(re.search(r'[^0-9]', spec)) + + def adapt(self, cls, **kw): + if issubclass(cls, _DateTimeMixin): + if self._storage_format: + kw["storage_format"] = self._storage_format + if self._reg: + kw["regexp"] = self._reg + return super(_DateTimeMixin, self).adapt(cls, **kw) + + def literal_processor(self, dialect): + bp = self.bind_processor(dialect) + + def process(value): + return "'%s'" % bp(value) + return process + + +class DATETIME(_DateTimeMixin, sqltypes.DateTime): + """Represent a Python datetime object in SQLite using a string. + + The default string storage format is:: + + "%(year)04d-%(month)02d-%(day)02d %(hour)02d:%(min)02d:\ +%(second)02d.%(microsecond)06d" + + e.g.:: + + 2011-03-15 12:05:57.10558 + + The storage format can be customized to some degree using the + ``storage_format`` and ``regexp`` parameters, such as:: + + import re + from sqlalchemy.dialects.sqlite import DATETIME + + dt = DATETIME( + storage_format="%(year)04d/%(month)02d/%(day)02d %(hour)02d:\ +%(min)02d:%(second)02d", + regexp=r"(\d+)/(\d+)/(\d+) (\d+)-(\d+)-(\d+)" + ) + + :param storage_format: format string which will be applied to the dict + with keys year, month, day, hour, minute, second, and microsecond. + + :param regexp: regular expression which will be applied to incoming result + rows. If the regexp contains named groups, the resulting match dict is + applied to the Python datetime() constructor as keyword arguments. + Otherwise, if positional groups are used, the datetime() constructor + is called with positional arguments via + ``*map(int, match_obj.groups(0))``. + """ + + _storage_format = ( + "%(year)04d-%(month)02d-%(day)02d " + "%(hour)02d:%(minute)02d:%(second)02d.%(microsecond)06d" + ) + + def __init__(self, *args, **kwargs): + truncate_microseconds = kwargs.pop('truncate_microseconds', False) + super(DATETIME, self).__init__(*args, **kwargs) + if truncate_microseconds: + assert 'storage_format' not in kwargs, "You can specify only "\ + "one of truncate_microseconds or storage_format." + assert 'regexp' not in kwargs, "You can specify only one of "\ + "truncate_microseconds or regexp." + self._storage_format = ( + "%(year)04d-%(month)02d-%(day)02d " + "%(hour)02d:%(minute)02d:%(second)02d" + ) + + def bind_processor(self, dialect): + datetime_datetime = datetime.datetime + datetime_date = datetime.date + format = self._storage_format + + def process(value): + if value is None: + return None + elif isinstance(value, datetime_datetime): + return format % { + 'year': value.year, + 'month': value.month, + 'day': value.day, + 'hour': value.hour, + 'minute': value.minute, + 'second': value.second, + 'microsecond': value.microsecond, + } + elif isinstance(value, datetime_date): + return format % { + 'year': value.year, + 'month': value.month, + 'day': value.day, + 'hour': 0, + 'minute': 0, + 'second': 0, + 'microsecond': 0, + } + else: + raise TypeError("SQLite DateTime type only accepts Python " + "datetime and date objects as input.") + return process + + def result_processor(self, dialect, coltype): + if self._reg: + return processors.str_to_datetime_processor_factory( + self._reg, datetime.datetime) + else: + return processors.str_to_datetime + + +class DATE(_DateTimeMixin, sqltypes.Date): + """Represent a Python date object in SQLite using a string. + + The default string storage format is:: + + "%(year)04d-%(month)02d-%(day)02d" + + e.g.:: + + 2011-03-15 + + The storage format can be customized to some degree using the + ``storage_format`` and ``regexp`` parameters, such as:: + + import re + from sqlalchemy.dialects.sqlite import DATE + + d = DATE( + storage_format="%(month)02d/%(day)02d/%(year)04d", + regexp=re.compile("(?P\d+)/(?P\d+)/(?P\d+)") + ) + + :param storage_format: format string which will be applied to the + dict with keys year, month, and day. + + :param regexp: regular expression which will be applied to + incoming result rows. If the regexp contains named groups, the + resulting match dict is applied to the Python date() constructor + as keyword arguments. Otherwise, if positional groups are used, the + date() constructor is called with positional arguments via + ``*map(int, match_obj.groups(0))``. + """ + + _storage_format = "%(year)04d-%(month)02d-%(day)02d" + + def bind_processor(self, dialect): + datetime_date = datetime.date + format = self._storage_format + + def process(value): + if value is None: + return None + elif isinstance(value, datetime_date): + return format % { + 'year': value.year, + 'month': value.month, + 'day': value.day, + } + else: + raise TypeError("SQLite Date type only accepts Python " + "date objects as input.") + return process + + def result_processor(self, dialect, coltype): + if self._reg: + return processors.str_to_datetime_processor_factory( + self._reg, datetime.date) + else: + return processors.str_to_date + + +class TIME(_DateTimeMixin, sqltypes.Time): + """Represent a Python time object in SQLite using a string. + + The default string storage format is:: + + "%(hour)02d:%(minute)02d:%(second)02d.%(microsecond)06d" + + e.g.:: + + 12:05:57.10558 + + The storage format can be customized to some degree using the + ``storage_format`` and ``regexp`` parameters, such as:: + + import re + from sqlalchemy.dialects.sqlite import TIME + + t = TIME( + storage_format="%(hour)02d-%(minute)02d-%(second)02d-\ +%(microsecond)06d", + regexp=re.compile("(\d+)-(\d+)-(\d+)-(?:-(\d+))?") + ) + + :param storage_format: format string which will be applied to the dict + with keys hour, minute, second, and microsecond. + + :param regexp: regular expression which will be applied to incoming result + rows. If the regexp contains named groups, the resulting match dict is + applied to the Python time() constructor as keyword arguments. Otherwise, + if positional groups are used, the time() constructor is called with + positional arguments via ``*map(int, match_obj.groups(0))``. + """ + + _storage_format = "%(hour)02d:%(minute)02d:%(second)02d.%(microsecond)06d" + + def __init__(self, *args, **kwargs): + truncate_microseconds = kwargs.pop('truncate_microseconds', False) + super(TIME, self).__init__(*args, **kwargs) + if truncate_microseconds: + assert 'storage_format' not in kwargs, "You can specify only "\ + "one of truncate_microseconds or storage_format." + assert 'regexp' not in kwargs, "You can specify only one of "\ + "truncate_microseconds or regexp." + self._storage_format = "%(hour)02d:%(minute)02d:%(second)02d" + + def bind_processor(self, dialect): + datetime_time = datetime.time + format = self._storage_format + + def process(value): + if value is None: + return None + elif isinstance(value, datetime_time): + return format % { + 'hour': value.hour, + 'minute': value.minute, + 'second': value.second, + 'microsecond': value.microsecond, + } + else: + raise TypeError("SQLite Time type only accepts Python " + "time objects as input.") + return process + + def result_processor(self, dialect, coltype): + if self._reg: + return processors.str_to_datetime_processor_factory( + self._reg, datetime.time) + else: + return processors.str_to_time + +colspecs = { + sqltypes.Date: DATE, + sqltypes.DateTime: DATETIME, + sqltypes.Time: TIME, +} + +ischema_names = { + 'BIGINT': sqltypes.BIGINT, + 'BLOB': sqltypes.BLOB, + 'BOOL': sqltypes.BOOLEAN, + 'BOOLEAN': sqltypes.BOOLEAN, + 'CHAR': sqltypes.CHAR, + 'DATE': sqltypes.DATE, + 'DATE_CHAR': sqltypes.DATE, + 'DATETIME': sqltypes.DATETIME, + 'DATETIME_CHAR': sqltypes.DATETIME, + 'DOUBLE': sqltypes.FLOAT, + 'DECIMAL': sqltypes.DECIMAL, + 'FLOAT': sqltypes.FLOAT, + 'INT': sqltypes.INTEGER, + 'INTEGER': sqltypes.INTEGER, + 'NUMERIC': sqltypes.NUMERIC, + 'REAL': sqltypes.REAL, + 'SMALLINT': sqltypes.SMALLINT, + 'TEXT': sqltypes.TEXT, + 'TIME': sqltypes.TIME, + 'TIME_CHAR': sqltypes.TIME, + 'TIMESTAMP': sqltypes.TIMESTAMP, + 'VARCHAR': sqltypes.VARCHAR, + 'NVARCHAR': sqltypes.NVARCHAR, + 'NCHAR': sqltypes.NCHAR, +} + + +class SQLiteCompiler(compiler.SQLCompiler): + extract_map = util.update_copy( + compiler.SQLCompiler.extract_map, + { + 'month': '%m', + 'day': '%d', + 'year': '%Y', + 'second': '%S', + 'hour': '%H', + 'doy': '%j', + 'minute': '%M', + 'epoch': '%s', + 'dow': '%w', + 'week': '%W', + }) + + def visit_now_func(self, fn, **kw): + return "CURRENT_TIMESTAMP" + + def visit_localtimestamp_func(self, func, **kw): + return 'DATETIME(CURRENT_TIMESTAMP, "localtime")' + + def visit_true(self, expr, **kw): + return '1' + + def visit_false(self, expr, **kw): + return '0' + + def visit_char_length_func(self, fn, **kw): + return "length%s" % self.function_argspec(fn) + + def visit_cast(self, cast, **kwargs): + if self.dialect.supports_cast: + return super(SQLiteCompiler, self).visit_cast(cast, **kwargs) + else: + return self.process(cast.clause, **kwargs) + + def visit_extract(self, extract, **kw): + try: + return "CAST(STRFTIME('%s', %s) AS INTEGER)" % ( + self.extract_map[extract.field], + self.process(extract.expr, **kw) + ) + except KeyError: + raise exc.CompileError( + "%s is not a valid extract argument." % extract.field) + + def limit_clause(self, select, **kw): + text = "" + if select._limit_clause is not None: + text += "\n LIMIT " + self.process(select._limit_clause, **kw) + if select._offset_clause is not None: + if select._limit_clause is None: + text += "\n LIMIT " + self.process(sql.literal(-1)) + text += " OFFSET " + self.process(select._offset_clause, **kw) + else: + text += " OFFSET " + self.process(sql.literal(0), **kw) + return text + + def for_update_clause(self, select, **kw): + # sqlite has no "FOR UPDATE" AFAICT + return '' + + +class SQLiteDDLCompiler(compiler.DDLCompiler): + + def get_column_specification(self, column, **kwargs): + coltype = self.dialect.type_compiler.process( + column.type, type_expression=column) + colspec = self.preparer.format_column(column) + " " + coltype + default = self.get_column_default_string(column) + if default is not None: + colspec += " DEFAULT " + default + + if not column.nullable: + colspec += " NOT NULL" + + if (column.primary_key and + column.table.dialect_options['sqlite']['autoincrement'] and + len(column.table.primary_key.columns) == 1 and + issubclass(column.type._type_affinity, sqltypes.Integer) and + not column.foreign_keys): + colspec += " PRIMARY KEY AUTOINCREMENT" + + return colspec + + def visit_primary_key_constraint(self, constraint): + # for columns with sqlite_autoincrement=True, + # the PRIMARY KEY constraint can only be inline + # with the column itself. + if len(constraint.columns) == 1: + c = list(constraint)[0] + if (c.primary_key and + c.table.dialect_options['sqlite']['autoincrement'] and + issubclass(c.type._type_affinity, sqltypes.Integer) and + not c.foreign_keys): + return None + + return super(SQLiteDDLCompiler, self).visit_primary_key_constraint( + constraint) + + def visit_foreign_key_constraint(self, constraint): + + local_table = constraint.elements[0].parent.table + remote_table = constraint.elements[0].column.table + + if local_table.schema != remote_table.schema: + return None + else: + return super( + SQLiteDDLCompiler, + self).visit_foreign_key_constraint(constraint) + + def define_constraint_remote_table(self, constraint, table, preparer): + """Format the remote table clause of a CREATE CONSTRAINT clause.""" + + return preparer.format_table(table, use_schema=False) + + def visit_create_index(self, create): + index = create.element + + text = super(SQLiteDDLCompiler, self).visit_create_index( + create, include_table_schema=False) + + whereclause = index.dialect_options["sqlite"]["where"] + if whereclause is not None: + where_compiled = self.sql_compiler.process( + whereclause, include_table=False, + literal_binds=True) + text += " WHERE " + where_compiled + + return text + + +class SQLiteTypeCompiler(compiler.GenericTypeCompiler): + def visit_large_binary(self, type_, **kw): + return self.visit_BLOB(type_) + + def visit_DATETIME(self, type_, **kw): + if not isinstance(type_, _DateTimeMixin) or \ + type_.format_is_text_affinity: + return super(SQLiteTypeCompiler, self).visit_DATETIME(type_) + else: + return "DATETIME_CHAR" + + def visit_DATE(self, type_, **kw): + if not isinstance(type_, _DateTimeMixin) or \ + type_.format_is_text_affinity: + return super(SQLiteTypeCompiler, self).visit_DATE(type_) + else: + return "DATE_CHAR" + + def visit_TIME(self, type_, **kw): + if not isinstance(type_, _DateTimeMixin) or \ + type_.format_is_text_affinity: + return super(SQLiteTypeCompiler, self).visit_TIME(type_) + else: + return "TIME_CHAR" + + +class SQLiteIdentifierPreparer(compiler.IdentifierPreparer): + reserved_words = set([ + 'add', 'after', 'all', 'alter', 'analyze', 'and', 'as', 'asc', + 'attach', 'autoincrement', 'before', 'begin', 'between', 'by', + 'cascade', 'case', 'cast', 'check', 'collate', 'column', 'commit', + 'conflict', 'constraint', 'create', 'cross', 'current_date', + 'current_time', 'current_timestamp', 'database', 'default', + 'deferrable', 'deferred', 'delete', 'desc', 'detach', 'distinct', + 'drop', 'each', 'else', 'end', 'escape', 'except', 'exclusive', + 'explain', 'false', 'fail', 'for', 'foreign', 'from', 'full', 'glob', + 'group', 'having', 'if', 'ignore', 'immediate', 'in', 'index', + 'indexed', 'initially', 'inner', 'insert', 'instead', 'intersect', + 'into', 'is', 'isnull', 'join', 'key', 'left', 'like', 'limit', + 'match', 'natural', 'not', 'notnull', 'null', 'of', 'offset', 'on', + 'or', 'order', 'outer', 'plan', 'pragma', 'primary', 'query', + 'raise', 'references', 'reindex', 'rename', 'replace', 'restrict', + 'right', 'rollback', 'row', 'select', 'set', 'table', 'temp', + 'temporary', 'then', 'to', 'transaction', 'trigger', 'true', 'union', + 'unique', 'update', 'using', 'vacuum', 'values', 'view', 'virtual', + 'when', 'where', + ]) + + def format_index(self, index, use_schema=True, name=None): + """Prepare a quoted index and schema name.""" + + if name is None: + name = index.name + result = self.quote(name, index.quote) + if (not self.omit_schema and + use_schema and + getattr(index.table, "schema", None)): + result = self.quote_schema( + index.table.schema, index.table.quote_schema) + "." + result + return result + + +class SQLiteExecutionContext(default.DefaultExecutionContext): + @util.memoized_property + def _preserve_raw_colnames(self): + return self.execution_options.get("sqlite_raw_colnames", False) + + def _translate_colname(self, colname): + # TODO: detect SQLite version 3.10.0 or greater; + # see [ticket:3633] + + # adjust for dotted column names. SQLite + # in the case of UNION may store col names as + # "tablename.colname", or if using an attached database, + # "database.tablename.colname", in cursor.description + if not self._preserve_raw_colnames and "." in colname: + return colname.split(".")[-1], colname + else: + return colname, None + + +class SQLiteDialect(default.DefaultDialect): + name = 'sqlite' + supports_alter = False + supports_unicode_statements = True + supports_unicode_binds = True + supports_default_values = True + supports_empty_insert = False + supports_cast = True + supports_multivalues_insert = True + + # TODO: detect version 3.7.16 or greater; + # see [ticket:3634] + supports_right_nested_joins = False + + default_paramstyle = 'qmark' + execution_ctx_cls = SQLiteExecutionContext + statement_compiler = SQLiteCompiler + ddl_compiler = SQLiteDDLCompiler + type_compiler = SQLiteTypeCompiler + preparer = SQLiteIdentifierPreparer + ischema_names = ischema_names + colspecs = colspecs + isolation_level = None + + supports_cast = True + supports_default_values = True + + construct_arguments = [ + (sa_schema.Table, { + "autoincrement": False + }), + (sa_schema.Index, { + "where": None, + }), + ] + + _broken_fk_pragma_quotes = False + + def __init__(self, isolation_level=None, native_datetime=False, **kwargs): + default.DefaultDialect.__init__(self, **kwargs) + self.isolation_level = isolation_level + + # this flag used by pysqlite dialect, and perhaps others in the + # future, to indicate the driver is handling date/timestamp + # conversions (and perhaps datetime/time as well on some hypothetical + # driver ?) + self.native_datetime = native_datetime + + if self.dbapi is not None: + self.supports_default_values = ( + self.dbapi.sqlite_version_info >= (3, 3, 8)) + self.supports_cast = ( + self.dbapi.sqlite_version_info >= (3, 2, 3)) + self.supports_multivalues_insert = ( + # http://www.sqlite.org/releaselog/3_7_11.html + self.dbapi.sqlite_version_info >= (3, 7, 11)) + # see http://www.sqlalchemy.org/trac/ticket/2568 + # as well as http://www.sqlite.org/src/info/600482d161 + self._broken_fk_pragma_quotes = ( + self.dbapi.sqlite_version_info < (3, 6, 14)) + + _isolation_lookup = { + 'READ UNCOMMITTED': 1, + 'SERIALIZABLE': 0, + } + + def set_isolation_level(self, connection, level): + try: + isolation_level = self._isolation_lookup[level.replace('_', ' ')] + except KeyError: + raise exc.ArgumentError( + "Invalid value '%s' for isolation_level. " + "Valid isolation levels for %s are %s" % + (level, self.name, ", ".join(self._isolation_lookup)) + ) + cursor = connection.cursor() + cursor.execute("PRAGMA read_uncommitted = %d" % isolation_level) + cursor.close() + + def get_isolation_level(self, connection): + cursor = connection.cursor() + cursor.execute('PRAGMA read_uncommitted') + res = cursor.fetchone() + if res: + value = res[0] + else: + # http://www.sqlite.org/changes.html#version_3_3_3 + # "Optional READ UNCOMMITTED isolation (instead of the + # default isolation level of SERIALIZABLE) and + # table level locking when database connections + # share a common cache."" + # pre-SQLite 3.3.0 default to 0 + value = 0 + cursor.close() + if value == 0: + return "SERIALIZABLE" + elif value == 1: + return "READ UNCOMMITTED" + else: + assert False, "Unknown isolation level %s" % value + + def on_connect(self): + if self.isolation_level is not None: + def connect(conn): + self.set_isolation_level(conn, self.isolation_level) + return connect + else: + return None + + @reflection.cache + def get_table_names(self, connection, schema=None, **kw): + if schema is not None: + qschema = self.identifier_preparer.quote_identifier(schema) + master = '%s.sqlite_master' % qschema + else: + master = "sqlite_master" + s = ("SELECT name FROM %s " + "WHERE type='table' ORDER BY name") % (master,) + rs = connection.execute(s) + return [row[0] for row in rs] + + @reflection.cache + def get_temp_table_names(self, connection, **kw): + s = "SELECT name FROM sqlite_temp_master "\ + "WHERE type='table' ORDER BY name " + rs = connection.execute(s) + + return [row[0] for row in rs] + + @reflection.cache + def get_temp_view_names(self, connection, **kw): + s = "SELECT name FROM sqlite_temp_master "\ + "WHERE type='view' ORDER BY name " + rs = connection.execute(s) + + return [row[0] for row in rs] + + def has_table(self, connection, table_name, schema=None): + info = self._get_table_pragma( + connection, "table_info", table_name, schema=schema) + return bool(info) + + @reflection.cache + def get_view_names(self, connection, schema=None, **kw): + if schema is not None: + qschema = self.identifier_preparer.quote_identifier(schema) + master = '%s.sqlite_master' % qschema + else: + master = "sqlite_master" + s = ("SELECT name FROM %s " + "WHERE type='view' ORDER BY name") % (master,) + rs = connection.execute(s) + + return [row[0] for row in rs] + + @reflection.cache + def get_view_definition(self, connection, view_name, schema=None, **kw): + if schema is not None: + qschema = self.identifier_preparer.quote_identifier(schema) + master = '%s.sqlite_master' % qschema + s = ("SELECT sql FROM %s WHERE name = '%s'" + "AND type='view'") % (master, view_name) + rs = connection.execute(s) + else: + try: + s = ("SELECT sql FROM " + " (SELECT * FROM sqlite_master UNION ALL " + " SELECT * FROM sqlite_temp_master) " + "WHERE name = '%s' " + "AND type='view'") % view_name + rs = connection.execute(s) + except exc.DBAPIError: + s = ("SELECT sql FROM sqlite_master WHERE name = '%s' " + "AND type='view'") % view_name + rs = connection.execute(s) + + result = rs.fetchall() + if result: + return result[0].sql + + @reflection.cache + def get_columns(self, connection, table_name, schema=None, **kw): + info = self._get_table_pragma( + connection, "table_info", table_name, schema=schema) + + columns = [] + for row in info: + (name, type_, nullable, default, primary_key) = ( + row[1], row[2].upper(), not row[3], row[4], row[5]) + + columns.append(self._get_column_info(name, type_, nullable, + default, primary_key)) + return columns + + def _get_column_info(self, name, type_, nullable, default, primary_key): + coltype = self._resolve_type_affinity(type_) + + if default is not None: + default = util.text_type(default) + + return { + 'name': name, + 'type': coltype, + 'nullable': nullable, + 'default': default, + 'autoincrement': default is None, + 'primary_key': primary_key, + } + + def _resolve_type_affinity(self, type_): + """Return a data type from a reflected column, using affinity tules. + + SQLite's goal for universal compatibility introduces some complexity + during reflection, as a column's defined type might not actually be a + type that SQLite understands - or indeed, my not be defined *at all*. + Internally, SQLite handles this with a 'data type affinity' for each + column definition, mapping to one of 'TEXT', 'NUMERIC', 'INTEGER', + 'REAL', or 'NONE' (raw bits). The algorithm that determines this is + listed in http://www.sqlite.org/datatype3.html section 2.1. + + This method allows SQLAlchemy to support that algorithm, while still + providing access to smarter reflection utilities by regcognizing + column definitions that SQLite only supports through affinity (like + DATE and DOUBLE). + + """ + match = re.match(r'([\w ]+)(\(.*?\))?', type_) + if match: + coltype = match.group(1) + args = match.group(2) + else: + coltype = '' + args = '' + + if coltype in self.ischema_names: + coltype = self.ischema_names[coltype] + elif 'INT' in coltype: + coltype = sqltypes.INTEGER + elif 'CHAR' in coltype or 'CLOB' in coltype or 'TEXT' in coltype: + coltype = sqltypes.TEXT + elif 'BLOB' in coltype or not coltype: + coltype = sqltypes.NullType + elif 'REAL' in coltype or 'FLOA' in coltype or 'DOUB' in coltype: + coltype = sqltypes.REAL + else: + coltype = sqltypes.NUMERIC + + if args is not None: + args = re.findall(r'(\d+)', args) + try: + coltype = coltype(*[int(a) for a in args]) + except TypeError: + util.warn( + "Could not instantiate type %s with " + "reflected arguments %s; using no arguments." % + (coltype, args)) + coltype = coltype() + else: + coltype = coltype() + + return coltype + + @reflection.cache + def get_pk_constraint(self, connection, table_name, schema=None, **kw): + cols = self.get_columns(connection, table_name, schema, **kw) + pkeys = [] + for col in cols: + if col['primary_key']: + pkeys.append(col['name']) + return {'constrained_columns': pkeys, 'name': None} + + @reflection.cache + def get_foreign_keys(self, connection, table_name, schema=None, **kw): + # sqlite makes this *extremely difficult*. + # First, use the pragma to get the actual FKs. + pragma_fks = self._get_table_pragma( + connection, "foreign_key_list", + table_name, schema=schema + ) + + fks = {} + + for row in pragma_fks: + (numerical_id, rtbl, lcol, rcol) = ( + row[0], row[2], row[3], row[4]) + + if rcol is None: + rcol = lcol + + if self._broken_fk_pragma_quotes: + rtbl = re.sub(r'^[\"\[`\']|[\"\]`\']$', '', rtbl) + + if numerical_id in fks: + fk = fks[numerical_id] + else: + fk = fks[numerical_id] = { + 'name': None, + 'constrained_columns': [], + 'referred_schema': None, + 'referred_table': rtbl, + 'referred_columns': [], + } + fks[numerical_id] = fk + + fk['constrained_columns'].append(lcol) + fk['referred_columns'].append(rcol) + + def fk_sig(constrained_columns, referred_table, referred_columns): + return tuple(constrained_columns) + (referred_table,) + \ + tuple(referred_columns) + + # then, parse the actual SQL and attempt to find DDL that matches + # the names as well. SQLite saves the DDL in whatever format + # it was typed in as, so need to be liberal here. + + keys_by_signature = dict( + ( + fk_sig( + fk['constrained_columns'], + fk['referred_table'], fk['referred_columns']), + fk + ) for fk in fks.values() + ) + + table_data = self._get_table_sql(connection, table_name, schema=schema) + if table_data is None: + # system tables, etc. + return [] + + def parse_fks(): + FK_PATTERN = ( + '(?:CONSTRAINT (\w+) +)?' + 'FOREIGN KEY *\( *(.+?) *\) +' + 'REFERENCES +(?:(?:"(.+?)")|([a-z0-9_]+)) *\((.+?)\)' + ) + + for match in re.finditer(FK_PATTERN, table_data, re.I): + ( + constraint_name, constrained_columns, + referred_quoted_name, referred_name, + referred_columns) = match.group(1, 2, 3, 4, 5) + constrained_columns = list( + self._find_cols_in_sig(constrained_columns)) + if not referred_columns: + referred_columns = constrained_columns + else: + referred_columns = list( + self._find_cols_in_sig(referred_columns)) + referred_name = referred_quoted_name or referred_name + yield ( + constraint_name, constrained_columns, + referred_name, referred_columns) + fkeys = [] + + for ( + constraint_name, constrained_columns, + referred_name, referred_columns) in parse_fks(): + sig = fk_sig( + constrained_columns, referred_name, referred_columns) + if sig not in keys_by_signature: + util.warn( + "WARNING: SQL-parsed foreign key constraint " + "'%s' could not be located in PRAGMA " + "foreign_keys for table %s" % ( + sig, + table_name + )) + continue + key = keys_by_signature.pop(sig) + key['name'] = constraint_name + fkeys.append(key) + # assume the remainders are the unnamed, inline constraints, just + # use them as is as it's extremely difficult to parse inline + # constraints + fkeys.extend(keys_by_signature.values()) + return fkeys + + def _find_cols_in_sig(self, sig): + for match in re.finditer(r'(?:"(.+?)")|([a-z0-9_]+)', sig, re.I): + yield match.group(1) or match.group(2) + + @reflection.cache + def get_unique_constraints(self, connection, table_name, + schema=None, **kw): + + auto_index_by_sig = {} + for idx in self.get_indexes( + connection, table_name, schema=schema, + include_auto_indexes=True, **kw): + if not idx['name'].startswith("sqlite_autoindex"): + continue + sig = tuple(idx['column_names']) + auto_index_by_sig[sig] = idx + + table_data = self._get_table_sql( + connection, table_name, schema=schema, **kw) + if not table_data: + return [] + + unique_constraints = [] + + def parse_uqs(): + UNIQUE_PATTERN = '(?:CONSTRAINT "?(.+?)"? +)?UNIQUE *\((.+?)\)' + INLINE_UNIQUE_PATTERN = ( + '(?:(".+?")|([a-z0-9]+)) ' + '+[a-z0-9_ ]+? +UNIQUE') + + for match in re.finditer(UNIQUE_PATTERN, table_data, re.I): + name, cols = match.group(1, 2) + yield name, list(self._find_cols_in_sig(cols)) + + # we need to match inlines as well, as we seek to differentiate + # a UNIQUE constraint from a UNIQUE INDEX, even though these + # are kind of the same thing :) + for match in re.finditer(INLINE_UNIQUE_PATTERN, table_data, re.I): + cols = list( + self._find_cols_in_sig(match.group(1) or match.group(2))) + yield None, cols + + for name, cols in parse_uqs(): + sig = tuple(cols) + if sig in auto_index_by_sig: + auto_index_by_sig.pop(sig) + parsed_constraint = { + 'name': name, + 'column_names': cols + } + unique_constraints.append(parsed_constraint) + # NOTE: auto_index_by_sig might not be empty here, + # the PRIMARY KEY may have an entry. + return unique_constraints + + @reflection.cache + def get_indexes(self, connection, table_name, schema=None, **kw): + pragma_indexes = self._get_table_pragma( + connection, "index_list", table_name, schema=schema) + indexes = [] + + include_auto_indexes = kw.pop('include_auto_indexes', False) + for row in pragma_indexes: + # ignore implicit primary key index. + # http://www.mail-archive.com/sqlite-users@sqlite.org/msg30517.html + if (not include_auto_indexes and + row[1].startswith('sqlite_autoindex')): + continue + + indexes.append(dict(name=row[1], column_names=[], unique=row[2])) + + # loop thru unique indexes to get the column names. + for idx in indexes: + pragma_index = self._get_table_pragma( + connection, "index_info", idx['name']) + + for row in pragma_index: + idx['column_names'].append(row[2]) + return indexes + + @reflection.cache + def _get_table_sql(self, connection, table_name, schema=None, **kw): + try: + s = ("SELECT sql FROM " + " (SELECT * FROM sqlite_master UNION ALL " + " SELECT * FROM sqlite_temp_master) " + "WHERE name = '%s' " + "AND type = 'table'") % table_name + rs = connection.execute(s) + except exc.DBAPIError: + s = ("SELECT sql FROM sqlite_master WHERE name = '%s' " + "AND type = 'table'") % table_name + rs = connection.execute(s) + return rs.scalar() + + def _get_table_pragma(self, connection, pragma, table_name, schema=None): + quote = self.identifier_preparer.quote_identifier + if schema is not None: + statement = "PRAGMA %s." % quote(schema) + else: + statement = "PRAGMA " + qtable = quote(table_name) + statement = "%s%s(%s)" % (statement, pragma, qtable) + cursor = connection.execute(statement) + if not cursor._soft_closed: + # work around SQLite issue whereby cursor.description + # is blank when PRAGMA returns no rows: + # http://www.sqlite.org/cvstrac/tktview?tn=1884 + result = cursor.fetchall() + else: + result = [] + return result diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/sqlite/pysqlcipher.py b/lib/python3.4/site-packages/sqlalchemy/dialects/sqlite/pysqlcipher.py new file mode 100644 index 0000000..bbafc8d --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/sqlite/pysqlcipher.py @@ -0,0 +1,116 @@ +# sqlite/pysqlcipher.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +.. dialect:: sqlite+pysqlcipher + :name: pysqlcipher + :dbapi: pysqlcipher + :connectstring: sqlite+pysqlcipher://:passphrase/file_path[?kdf_iter=] + :url: https://pypi.python.org/pypi/pysqlcipher + + ``pysqlcipher`` is a fork of the standard ``pysqlite`` driver to make + use of the `SQLCipher `_ backend. + + .. versionadded:: 0.9.9 + +Driver +------ + +The driver here is the `pysqlcipher `_ +driver, which makes use of the SQLCipher engine. This system essentially +introduces new PRAGMA commands to SQLite which allows the setting of a +passphrase and other encryption parameters, allowing the database +file to be encrypted. + +Connect Strings +--------------- + +The format of the connect string is in every way the same as that +of the :mod:`~sqlalchemy.dialects.sqlite.pysqlite` driver, except that the +"password" field is now accepted, which should contain a passphrase:: + + e = create_engine('sqlite+pysqlcipher://:testing@/foo.db') + +For an absolute file path, two leading slashes should be used for the +database name:: + + e = create_engine('sqlite+pysqlcipher://:testing@//path/to/foo.db') + +A selection of additional encryption-related pragmas supported by SQLCipher +as documented at https://www.zetetic.net/sqlcipher/sqlcipher-api/ can be passed +in the query string, and will result in that PRAGMA being called for each +new connection. Currently, ``cipher``, ``kdf_iter`` +``cipher_page_size`` and ``cipher_use_hmac`` are supported:: + + e = create_engine('sqlite+pysqlcipher://:testing@/foo.db?cipher=aes-256-cfb&kdf_iter=64000') + + +Pooling Behavior +---------------- + +The driver makes a change to the default pool behavior of pysqlite +as described in :ref:`pysqlite_threading_pooling`. The pysqlcipher driver +has been observed to be significantly slower on connection than the +pysqlite driver, most likely due to the encryption overhead, so the +dialect here defaults to using the :class:`.SingletonThreadPool` +implementation, +instead of the :class:`.NullPool` pool used by pysqlite. As always, the pool +implementation is entirely configurable using the +:paramref:`.create_engine.poolclass` parameter; the :class:`.StaticPool` may +be more feasible for single-threaded use, or :class:`.NullPool` may be used +to prevent unencrypted connections from being held open for long periods of +time, at the expense of slower startup time for new connections. + + +""" +from __future__ import absolute_import +from .pysqlite import SQLiteDialect_pysqlite +from ...engine import url as _url +from ... import pool + + +class SQLiteDialect_pysqlcipher(SQLiteDialect_pysqlite): + driver = 'pysqlcipher' + + pragmas = ('kdf_iter', 'cipher', 'cipher_page_size', 'cipher_use_hmac') + + @classmethod + def dbapi(cls): + from pysqlcipher import dbapi2 as sqlcipher + return sqlcipher + + @classmethod + def get_pool_class(cls, url): + return pool.SingletonThreadPool + + def connect(self, *cargs, **cparams): + passphrase = cparams.pop('passphrase', '') + + pragmas = dict( + (key, cparams.pop(key, None)) for key in + self.pragmas + ) + + conn = super(SQLiteDialect_pysqlcipher, self).\ + connect(*cargs, **cparams) + conn.execute('pragma key="%s"' % passphrase) + for prag, value in pragmas.items(): + if value is not None: + conn.execute('pragma %s=%s' % (prag, value)) + + return conn + + def create_connect_args(self, url): + super_url = _url.URL( + url.drivername, username=url.username, + host=url.host, database=url.database, query=url.query) + c_args, opts = super(SQLiteDialect_pysqlcipher, self).\ + create_connect_args(super_url) + opts['passphrase'] = url.password + return c_args, opts + +dialect = SQLiteDialect_pysqlcipher diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/sqlite/pysqlite.py b/lib/python3.4/site-packages/sqlalchemy/dialects/sqlite/pysqlite.py new file mode 100644 index 0000000..33d04de --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/sqlite/pysqlite.py @@ -0,0 +1,377 @@ +# sqlite/pysqlite.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +.. dialect:: sqlite+pysqlite + :name: pysqlite + :dbapi: sqlite3 + :connectstring: sqlite+pysqlite:///file_path + :url: http://docs.python.org/library/sqlite3.html + + Note that ``pysqlite`` is the same driver as the ``sqlite3`` + module included with the Python distribution. + +Driver +------ + +When using Python 2.5 and above, the built in ``sqlite3`` driver is +already installed and no additional installation is needed. Otherwise, +the ``pysqlite2`` driver needs to be present. This is the same driver as +``sqlite3``, just with a different name. + +The ``pysqlite2`` driver will be loaded first, and if not found, ``sqlite3`` +is loaded. This allows an explicitly installed pysqlite driver to take +precedence over the built in one. As with all dialects, a specific +DBAPI module may be provided to :func:`~sqlalchemy.create_engine()` to control +this explicitly:: + + from sqlite3 import dbapi2 as sqlite + e = create_engine('sqlite+pysqlite:///file.db', module=sqlite) + + +Connect Strings +--------------- + +The file specification for the SQLite database is taken as the "database" +portion of the URL. Note that the format of a SQLAlchemy url is:: + + driver://user:pass@host/database + +This means that the actual filename to be used starts with the characters to +the **right** of the third slash. So connecting to a relative filepath +looks like:: + + # relative path + e = create_engine('sqlite:///path/to/database.db') + +An absolute path, which is denoted by starting with a slash, means you +need **four** slashes:: + + # absolute path + e = create_engine('sqlite:////path/to/database.db') + +To use a Windows path, regular drive specifications and backslashes can be +used. Double backslashes are probably needed:: + + # absolute path on Windows + e = create_engine('sqlite:///C:\\\\path\\\\to\\\\database.db') + +The sqlite ``:memory:`` identifier is the default if no filepath is +present. Specify ``sqlite://`` and nothing else:: + + # in-memory database + e = create_engine('sqlite://') + +Compatibility with sqlite3 "native" date and datetime types +----------------------------------------------------------- + +The pysqlite driver includes the sqlite3.PARSE_DECLTYPES and +sqlite3.PARSE_COLNAMES options, which have the effect of any column +or expression explicitly cast as "date" or "timestamp" will be converted +to a Python date or datetime object. The date and datetime types provided +with the pysqlite dialect are not currently compatible with these options, +since they render the ISO date/datetime including microseconds, which +pysqlite's driver does not. Additionally, SQLAlchemy does not at +this time automatically render the "cast" syntax required for the +freestanding functions "current_timestamp" and "current_date" to return +datetime/date types natively. Unfortunately, pysqlite +does not provide the standard DBAPI types in ``cursor.description``, +leaving SQLAlchemy with no way to detect these types on the fly +without expensive per-row type checks. + +Keeping in mind that pysqlite's parsing option is not recommended, +nor should be necessary, for use with SQLAlchemy, usage of PARSE_DECLTYPES +can be forced if one configures "native_datetime=True" on create_engine():: + + engine = create_engine('sqlite://', + connect_args={'detect_types': + sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES}, + native_datetime=True + ) + +With this flag enabled, the DATE and TIMESTAMP types (but note - not the +DATETIME or TIME types...confused yet ?) will not perform any bind parameter +or result processing. Execution of "func.current_date()" will return a string. +"func.current_timestamp()" is registered as returning a DATETIME type in +SQLAlchemy, so this function still receives SQLAlchemy-level result +processing. + +.. _pysqlite_threading_pooling: + +Threading/Pooling Behavior +--------------------------- + +Pysqlite's default behavior is to prohibit the usage of a single connection +in more than one thread. This is originally intended to work with older +versions of SQLite that did not support multithreaded operation under +various circumstances. In particular, older SQLite versions +did not allow a ``:memory:`` database to be used in multiple threads +under any circumstances. + +Pysqlite does include a now-undocumented flag known as +``check_same_thread`` which will disable this check, however note that +pysqlite connections are still not safe to use in concurrently in multiple +threads. In particular, any statement execution calls would need to be +externally mutexed, as Pysqlite does not provide for thread-safe propagation +of error messages among other things. So while even ``:memory:`` databases +can be shared among threads in modern SQLite, Pysqlite doesn't provide enough +thread-safety to make this usage worth it. + +SQLAlchemy sets up pooling to work with Pysqlite's default behavior: + +* When a ``:memory:`` SQLite database is specified, the dialect by default + will use :class:`.SingletonThreadPool`. This pool maintains a single + connection per thread, so that all access to the engine within the current + thread use the same ``:memory:`` database - other threads would access a + different ``:memory:`` database. +* When a file-based database is specified, the dialect will use + :class:`.NullPool` as the source of connections. This pool closes and + discards connections which are returned to the pool immediately. SQLite + file-based connections have extremely low overhead, so pooling is not + necessary. The scheme also prevents a connection from being used again in + a different thread and works best with SQLite's coarse-grained file locking. + + .. versionchanged:: 0.7 + Default selection of :class:`.NullPool` for SQLite file-based databases. + Previous versions select :class:`.SingletonThreadPool` by + default for all SQLite databases. + + +Using a Memory Database in Multiple Threads +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +To use a ``:memory:`` database in a multithreaded scenario, the same +connection object must be shared among threads, since the database exists +only within the scope of that connection. The +:class:`.StaticPool` implementation will maintain a single connection +globally, and the ``check_same_thread`` flag can be passed to Pysqlite +as ``False``:: + + from sqlalchemy.pool import StaticPool + engine = create_engine('sqlite://', + connect_args={'check_same_thread':False}, + poolclass=StaticPool) + +Note that using a ``:memory:`` database in multiple threads requires a recent +version of SQLite. + +Using Temporary Tables with SQLite +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Due to the way SQLite deals with temporary tables, if you wish to use a +temporary table in a file-based SQLite database across multiple checkouts +from the connection pool, such as when using an ORM :class:`.Session` where +the temporary table should continue to remain after :meth:`.Session.commit` or +:meth:`.Session.rollback` is called, a pool which maintains a single +connection must be used. Use :class:`.SingletonThreadPool` if the scope is +only needed within the current thread, or :class:`.StaticPool` is scope is +needed within multiple threads for this case:: + + # maintain the same connection per thread + from sqlalchemy.pool import SingletonThreadPool + engine = create_engine('sqlite:///mydb.db', + poolclass=SingletonThreadPool) + + + # maintain the same connection across all threads + from sqlalchemy.pool import StaticPool + engine = create_engine('sqlite:///mydb.db', + poolclass=StaticPool) + +Note that :class:`.SingletonThreadPool` should be configured for the number +of threads that are to be used; beyond that number, connections will be +closed out in a non deterministic way. + +Unicode +------- + +The pysqlite driver only returns Python ``unicode`` objects in result sets, +never plain strings, and accommodates ``unicode`` objects within bound +parameter values in all cases. Regardless of the SQLAlchemy string type in +use, string-based result values will by Python ``unicode`` in Python 2. +The :class:`.Unicode` type should still be used to indicate those columns that +require unicode, however, so that non-``unicode`` values passed inadvertently +will emit a warning. Pysqlite will emit an error if a non-``unicode`` string +is passed containing non-ASCII characters. + +.. _pysqlite_serializable: + +Serializable isolation / Savepoints / Transactional DDL +------------------------------------------------------- + +In the section :ref:`sqlite_concurrency`, we refer to the pysqlite +driver's assortment of issues that prevent several features of SQLite +from working correctly. The pysqlite DBAPI driver has several +long-standing bugs which impact the correctness of its transactional +behavior. In its default mode of operation, SQLite features such as +SERIALIZABLE isolation, transactional DDL, and SAVEPOINT support are +non-functional, and in order to use these features, workarounds must +be taken. + +The issue is essentially that the driver attempts to second-guess the user's +intent, failing to start transactions and sometimes ending them prematurely, in +an effort to minimize the SQLite databases's file locking behavior, even +though SQLite itself uses "shared" locks for read-only activities. + +SQLAlchemy chooses to not alter this behavior by default, as it is the +long-expected behavior of the pysqlite driver; if and when the pysqlite +driver attempts to repair these issues, that will be more of a driver towards +defaults for SQLAlchemy. + +The good news is that with a few events, we can implement transactional +support fully, by disabling pysqlite's feature entirely and emitting BEGIN +ourselves. This is achieved using two event listeners:: + + from sqlalchemy import create_engine, event + + engine = create_engine("sqlite:///myfile.db") + + @event.listens_for(engine, "connect") + def do_connect(dbapi_connection, connection_record): + # disable pysqlite's emitting of the BEGIN statement entirely. + # also stops it from emitting COMMIT before any DDL. + dbapi_connection.isolation_level = None + + @event.listens_for(engine, "begin") + def do_begin(conn): + # emit our own BEGIN + conn.execute("BEGIN") + +Above, we intercept a new pysqlite connection and disable any transactional +integration. Then, at the point at which SQLAlchemy knows that transaction +scope is to begin, we emit ``"BEGIN"`` ourselves. + +When we take control of ``"BEGIN"``, we can also control directly SQLite's +locking modes, introduced at `BEGIN TRANSACTION `_, +by adding the desired locking mode to our ``"BEGIN"``:: + + @event.listens_for(engine, "begin") + def do_begin(conn): + conn.execute("BEGIN EXCLUSIVE") + +.. seealso:: + + `BEGIN TRANSACTION `_ - on the SQLite site + + `sqlite3 SELECT does not BEGIN a transaction `_ - on the Python bug tracker + + `sqlite3 module breaks transactions and potentially corrupts data `_ - on the Python bug tracker + + +""" + +from sqlalchemy.dialects.sqlite.base import SQLiteDialect, DATETIME, DATE +from sqlalchemy import exc, pool +from sqlalchemy import types as sqltypes +from sqlalchemy import util + +import os + + +class _SQLite_pysqliteTimeStamp(DATETIME): + def bind_processor(self, dialect): + if dialect.native_datetime: + return None + else: + return DATETIME.bind_processor(self, dialect) + + def result_processor(self, dialect, coltype): + if dialect.native_datetime: + return None + else: + return DATETIME.result_processor(self, dialect, coltype) + + +class _SQLite_pysqliteDate(DATE): + def bind_processor(self, dialect): + if dialect.native_datetime: + return None + else: + return DATE.bind_processor(self, dialect) + + def result_processor(self, dialect, coltype): + if dialect.native_datetime: + return None + else: + return DATE.result_processor(self, dialect, coltype) + + +class SQLiteDialect_pysqlite(SQLiteDialect): + default_paramstyle = 'qmark' + + colspecs = util.update_copy( + SQLiteDialect.colspecs, + { + sqltypes.Date: _SQLite_pysqliteDate, + sqltypes.TIMESTAMP: _SQLite_pysqliteTimeStamp, + } + ) + + if not util.py2k: + description_encoding = None + + driver = 'pysqlite' + + def __init__(self, **kwargs): + SQLiteDialect.__init__(self, **kwargs) + + if self.dbapi is not None: + sqlite_ver = self.dbapi.version_info + if sqlite_ver < (2, 1, 3): + util.warn( + ("The installed version of pysqlite2 (%s) is out-dated " + "and will cause errors in some cases. Version 2.1.3 " + "or greater is recommended.") % + '.'.join([str(subver) for subver in sqlite_ver])) + + @classmethod + def dbapi(cls): + try: + from pysqlite2 import dbapi2 as sqlite + except ImportError as e: + try: + from sqlite3 import dbapi2 as sqlite # try 2.5+ stdlib name. + except ImportError: + raise e + return sqlite + + @classmethod + def get_pool_class(cls, url): + if url.database and url.database != ':memory:': + return pool.NullPool + else: + return pool.SingletonThreadPool + + def _get_server_version_info(self, connection): + return self.dbapi.sqlite_version_info + + def create_connect_args(self, url): + if url.username or url.password or url.host or url.port: + raise exc.ArgumentError( + "Invalid SQLite URL: %s\n" + "Valid SQLite URL forms are:\n" + " sqlite:///:memory: (or, sqlite://)\n" + " sqlite:///relative/path/to/file.db\n" + " sqlite:////absolute/path/to/file.db" % (url,)) + filename = url.database or ':memory:' + if filename != ':memory:': + filename = os.path.abspath(filename) + + opts = url.query.copy() + util.coerce_kw_type(opts, 'timeout', float) + util.coerce_kw_type(opts, 'isolation_level', str) + util.coerce_kw_type(opts, 'detect_types', int) + util.coerce_kw_type(opts, 'check_same_thread', bool) + util.coerce_kw_type(opts, 'cached_statements', int) + + return ([filename], opts) + + def is_disconnect(self, e, connection, cursor): + return isinstance(e, self.dbapi.ProgrammingError) and \ + "Cannot operate on a closed database." in str(e) + +dialect = SQLiteDialect_pysqlite diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/__init__.py b/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/__init__.py new file mode 100644 index 0000000..18535ed --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/__init__.py @@ -0,0 +1,28 @@ +# sybase/__init__.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from sqlalchemy.dialects.sybase import base, pysybase, pyodbc + +# default dialect +base.dialect = pyodbc.dialect + +from .base import CHAR, VARCHAR, TIME, NCHAR, NVARCHAR,\ + TEXT, DATE, DATETIME, FLOAT, NUMERIC,\ + BIGINT, INT, INTEGER, SMALLINT, BINARY,\ + VARBINARY, UNITEXT, UNICHAR, UNIVARCHAR,\ + IMAGE, BIT, MONEY, SMALLMONEY, TINYINT,\ + dialect + + +__all__ = ( + 'CHAR', 'VARCHAR', 'TIME', 'NCHAR', 'NVARCHAR', + 'TEXT', 'DATE', 'DATETIME', 'FLOAT', 'NUMERIC', + 'BIGINT', 'INT', 'INTEGER', 'SMALLINT', 'BINARY', + 'VARBINARY', 'UNITEXT', 'UNICHAR', 'UNIVARCHAR', + 'IMAGE', 'BIT', 'MONEY', 'SMALLMONEY', 'TINYINT', + 'dialect' +) diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/base.py b/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/base.py new file mode 100644 index 0000000..1e38534 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/base.py @@ -0,0 +1,825 @@ +# sybase/base.py +# Copyright (C) 2010-2016 the SQLAlchemy authors and contributors +# +# get_select_precolumns(), limit_clause() implementation +# copyright (C) 2007 Fisch Asset Management +# AG http://www.fam.ch, with coding by Alexander Houben +# alexander.houben@thor-solutions.ch +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" + +.. dialect:: sybase + :name: Sybase + +.. note:: + + The Sybase dialect functions on current SQLAlchemy versions + but is not regularly tested, and may have many issues and + caveats not currently handled. + +""" +import operator +import re + +from sqlalchemy.sql import compiler, expression, text, bindparam +from sqlalchemy.engine import default, base, reflection +from sqlalchemy import types as sqltypes +from sqlalchemy.sql import operators as sql_operators +from sqlalchemy import schema as sa_schema +from sqlalchemy import util, sql, exc + +from sqlalchemy.types import CHAR, VARCHAR, TIME, NCHAR, NVARCHAR,\ + TEXT, DATE, DATETIME, FLOAT, NUMERIC,\ + BIGINT, INT, INTEGER, SMALLINT, BINARY,\ + VARBINARY, DECIMAL, TIMESTAMP, Unicode,\ + UnicodeText, REAL + +RESERVED_WORDS = set([ + "add", "all", "alter", "and", + "any", "as", "asc", "backup", + "begin", "between", "bigint", "binary", + "bit", "bottom", "break", "by", + "call", "capability", "cascade", "case", + "cast", "char", "char_convert", "character", + "check", "checkpoint", "close", "comment", + "commit", "connect", "constraint", "contains", + "continue", "convert", "create", "cross", + "cube", "current", "current_timestamp", "current_user", + "cursor", "date", "dbspace", "deallocate", + "dec", "decimal", "declare", "default", + "delete", "deleting", "desc", "distinct", + "do", "double", "drop", "dynamic", + "else", "elseif", "encrypted", "end", + "endif", "escape", "except", "exception", + "exec", "execute", "existing", "exists", + "externlogin", "fetch", "first", "float", + "for", "force", "foreign", "forward", + "from", "full", "goto", "grant", + "group", "having", "holdlock", "identified", + "if", "in", "index", "index_lparen", + "inner", "inout", "insensitive", "insert", + "inserting", "install", "instead", "int", + "integer", "integrated", "intersect", "into", + "iq", "is", "isolation", "join", + "key", "lateral", "left", "like", + "lock", "login", "long", "match", + "membership", "message", "mode", "modify", + "natural", "new", "no", "noholdlock", + "not", "notify", "null", "numeric", + "of", "off", "on", "open", + "option", "options", "or", "order", + "others", "out", "outer", "over", + "passthrough", "precision", "prepare", "primary", + "print", "privileges", "proc", "procedure", + "publication", "raiserror", "readtext", "real", + "reference", "references", "release", "remote", + "remove", "rename", "reorganize", "resource", + "restore", "restrict", "return", "revoke", + "right", "rollback", "rollup", "save", + "savepoint", "scroll", "select", "sensitive", + "session", "set", "setuser", "share", + "smallint", "some", "sqlcode", "sqlstate", + "start", "stop", "subtrans", "subtransaction", + "synchronize", "syntax_error", "table", "temporary", + "then", "time", "timestamp", "tinyint", + "to", "top", "tran", "trigger", + "truncate", "tsequal", "unbounded", "union", + "unique", "unknown", "unsigned", "update", + "updating", "user", "using", "validate", + "values", "varbinary", "varchar", "variable", + "varying", "view", "wait", "waitfor", + "when", "where", "while", "window", + "with", "with_cube", "with_lparen", "with_rollup", + "within", "work", "writetext", +]) + + +class _SybaseUnitypeMixin(object): + """these types appear to return a buffer object.""" + + def result_processor(self, dialect, coltype): + def process(value): + if value is not None: + return str(value) # decode("ucs-2") + else: + return None + return process + + +class UNICHAR(_SybaseUnitypeMixin, sqltypes.Unicode): + __visit_name__ = 'UNICHAR' + + +class UNIVARCHAR(_SybaseUnitypeMixin, sqltypes.Unicode): + __visit_name__ = 'UNIVARCHAR' + + +class UNITEXT(_SybaseUnitypeMixin, sqltypes.UnicodeText): + __visit_name__ = 'UNITEXT' + + +class TINYINT(sqltypes.Integer): + __visit_name__ = 'TINYINT' + + +class BIT(sqltypes.TypeEngine): + __visit_name__ = 'BIT' + + +class MONEY(sqltypes.TypeEngine): + __visit_name__ = "MONEY" + + +class SMALLMONEY(sqltypes.TypeEngine): + __visit_name__ = "SMALLMONEY" + + +class UNIQUEIDENTIFIER(sqltypes.TypeEngine): + __visit_name__ = "UNIQUEIDENTIFIER" + + +class IMAGE(sqltypes.LargeBinary): + __visit_name__ = 'IMAGE' + + +class SybaseTypeCompiler(compiler.GenericTypeCompiler): + def visit_large_binary(self, type_, **kw): + return self.visit_IMAGE(type_) + + def visit_boolean(self, type_, **kw): + return self.visit_BIT(type_) + + def visit_unicode(self, type_, **kw): + return self.visit_NVARCHAR(type_) + + def visit_UNICHAR(self, type_, **kw): + return "UNICHAR(%d)" % type_.length + + def visit_UNIVARCHAR(self, type_, **kw): + return "UNIVARCHAR(%d)" % type_.length + + def visit_UNITEXT(self, type_, **kw): + return "UNITEXT" + + def visit_TINYINT(self, type_, **kw): + return "TINYINT" + + def visit_IMAGE(self, type_, **kw): + return "IMAGE" + + def visit_BIT(self, type_, **kw): + return "BIT" + + def visit_MONEY(self, type_, **kw): + return "MONEY" + + def visit_SMALLMONEY(self, type_, **kw): + return "SMALLMONEY" + + def visit_UNIQUEIDENTIFIER(self, type_, **kw): + return "UNIQUEIDENTIFIER" + +ischema_names = { + 'bigint': BIGINT, + 'int': INTEGER, + 'integer': INTEGER, + 'smallint': SMALLINT, + 'tinyint': TINYINT, + 'unsigned bigint': BIGINT, # TODO: unsigned flags + 'unsigned int': INTEGER, # TODO: unsigned flags + 'unsigned smallint': SMALLINT, # TODO: unsigned flags + 'numeric': NUMERIC, + 'decimal': DECIMAL, + 'dec': DECIMAL, + 'float': FLOAT, + 'double': NUMERIC, # TODO + 'double precision': NUMERIC, # TODO + 'real': REAL, + 'smallmoney': SMALLMONEY, + 'money': MONEY, + 'smalldatetime': DATETIME, + 'datetime': DATETIME, + 'date': DATE, + 'time': TIME, + 'char': CHAR, + 'character': CHAR, + 'varchar': VARCHAR, + 'character varying': VARCHAR, + 'char varying': VARCHAR, + 'unichar': UNICHAR, + 'unicode character': UNIVARCHAR, + 'nchar': NCHAR, + 'national char': NCHAR, + 'national character': NCHAR, + 'nvarchar': NVARCHAR, + 'nchar varying': NVARCHAR, + 'national char varying': NVARCHAR, + 'national character varying': NVARCHAR, + 'text': TEXT, + 'unitext': UNITEXT, + 'binary': BINARY, + 'varbinary': VARBINARY, + 'image': IMAGE, + 'bit': BIT, + + # not in documentation for ASE 15.7 + 'long varchar': TEXT, # TODO + 'timestamp': TIMESTAMP, + 'uniqueidentifier': UNIQUEIDENTIFIER, + +} + + +class SybaseInspector(reflection.Inspector): + + def __init__(self, conn): + reflection.Inspector.__init__(self, conn) + + def get_table_id(self, table_name, schema=None): + """Return the table id from `table_name` and `schema`.""" + + return self.dialect.get_table_id(self.bind, table_name, schema, + info_cache=self.info_cache) + + +class SybaseExecutionContext(default.DefaultExecutionContext): + _enable_identity_insert = False + + def set_ddl_autocommit(self, connection, value): + """Must be implemented by subclasses to accommodate DDL executions. + + "connection" is the raw unwrapped DBAPI connection. "value" + is True or False. when True, the connection should be configured + such that a DDL can take place subsequently. when False, + a DDL has taken place and the connection should be resumed + into non-autocommit mode. + + """ + raise NotImplementedError() + + def pre_exec(self): + if self.isinsert: + tbl = self.compiled.statement.table + seq_column = tbl._autoincrement_column + insert_has_sequence = seq_column is not None + + if insert_has_sequence: + self._enable_identity_insert = \ + seq_column.key in self.compiled_parameters[0] + else: + self._enable_identity_insert = False + + if self._enable_identity_insert: + self.cursor.execute( + "SET IDENTITY_INSERT %s ON" % + self.dialect.identifier_preparer.format_table(tbl)) + + if self.isddl: + # TODO: to enhance this, we can detect "ddl in tran" on the + # database settings. this error message should be improved to + # include a note about that. + if not self.should_autocommit: + raise exc.InvalidRequestError( + "The Sybase dialect only supports " + "DDL in 'autocommit' mode at this time.") + + self.root_connection.engine.logger.info( + "AUTOCOMMIT (Assuming no Sybase 'ddl in tran')") + + self.set_ddl_autocommit( + self.root_connection.connection.connection, + True) + + def post_exec(self): + if self.isddl: + self.set_ddl_autocommit(self.root_connection, False) + + if self._enable_identity_insert: + self.cursor.execute( + "SET IDENTITY_INSERT %s OFF" % + self.dialect.identifier_preparer. + format_table(self.compiled.statement.table) + ) + + def get_lastrowid(self): + cursor = self.create_cursor() + cursor.execute("SELECT @@identity AS lastrowid") + lastrowid = cursor.fetchone()[0] + cursor.close() + return lastrowid + + +class SybaseSQLCompiler(compiler.SQLCompiler): + ansi_bind_rules = True + + extract_map = util.update_copy( + compiler.SQLCompiler.extract_map, + { + 'doy': 'dayofyear', + 'dow': 'weekday', + 'milliseconds': 'millisecond' + }) + + def get_select_precolumns(self, select, **kw): + s = select._distinct and "DISTINCT " or "" + # TODO: don't think Sybase supports + # bind params for FIRST / TOP + limit = select._limit + if limit: + # if select._limit == 1: + # s += "FIRST " + # else: + # s += "TOP %s " % (select._limit,) + s += "TOP %s " % (limit,) + offset = select._offset + if offset: + if not limit: + # FIXME: sybase doesn't allow an offset without a limit + # so use a huge value for TOP here + s += "TOP 1000000 " + s += "START AT %s " % (offset + 1,) + return s + + def get_from_hint_text(self, table, text): + return text + + def limit_clause(self, select, **kw): + # Limit in sybase is after the select keyword + return "" + + def visit_extract(self, extract, **kw): + field = self.extract_map.get(extract.field, extract.field) + return 'DATEPART("%s", %s)' % ( + field, self.process(extract.expr, **kw)) + + def visit_now_func(self, fn, **kw): + return "GETDATE()" + + def for_update_clause(self, select): + # "FOR UPDATE" is only allowed on "DECLARE CURSOR" + # which SQLAlchemy doesn't use + return '' + + def order_by_clause(self, select, **kw): + kw['literal_binds'] = True + order_by = self.process(select._order_by_clause, **kw) + + # SybaseSQL only allows ORDER BY in subqueries if there is a LIMIT + if order_by and (not self.is_subquery() or select._limit): + return " ORDER BY " + order_by + else: + return "" + + +class SybaseDDLCompiler(compiler.DDLCompiler): + def get_column_specification(self, column, **kwargs): + colspec = self.preparer.format_column(column) + " " + \ + self.dialect.type_compiler.process( + column.type, type_expression=column) + + if column.table is None: + raise exc.CompileError( + "The Sybase dialect requires Table-bound " + "columns in order to generate DDL") + seq_col = column.table._autoincrement_column + + # install a IDENTITY Sequence if we have an implicit IDENTITY column + if seq_col is column: + sequence = isinstance(column.default, sa_schema.Sequence) \ + and column.default + if sequence: + start, increment = sequence.start or 1, \ + sequence.increment or 1 + else: + start, increment = 1, 1 + if (start, increment) == (1, 1): + colspec += " IDENTITY" + else: + # TODO: need correct syntax for this + colspec += " IDENTITY(%s,%s)" % (start, increment) + else: + default = self.get_column_default_string(column) + if default is not None: + colspec += " DEFAULT " + default + + if column.nullable is not None: + if not column.nullable or column.primary_key: + colspec += " NOT NULL" + else: + colspec += " NULL" + + return colspec + + def visit_drop_index(self, drop): + index = drop.element + return "\nDROP INDEX %s.%s" % ( + self.preparer.quote_identifier(index.table.name), + self._prepared_index_name(drop.element, + include_schema=False) + ) + + +class SybaseIdentifierPreparer(compiler.IdentifierPreparer): + reserved_words = RESERVED_WORDS + + +class SybaseDialect(default.DefaultDialect): + name = 'sybase' + supports_unicode_statements = False + supports_sane_rowcount = False + supports_sane_multi_rowcount = False + + supports_native_boolean = False + supports_unicode_binds = False + postfetch_lastrowid = True + + colspecs = {} + ischema_names = ischema_names + + type_compiler = SybaseTypeCompiler + statement_compiler = SybaseSQLCompiler + ddl_compiler = SybaseDDLCompiler + preparer = SybaseIdentifierPreparer + inspector = SybaseInspector + + construct_arguments = [] + + def _get_default_schema_name(self, connection): + return connection.scalar( + text("SELECT user_name() as user_name", + typemap={'user_name': Unicode}) + ) + + def initialize(self, connection): + super(SybaseDialect, self).initialize(connection) + if self.server_version_info is not None and\ + self.server_version_info < (15, ): + self.max_identifier_length = 30 + else: + self.max_identifier_length = 255 + + def get_table_id(self, connection, table_name, schema=None, **kw): + """Fetch the id for schema.table_name. + + Several reflection methods require the table id. The idea for using + this method is that it can be fetched one time and cached for + subsequent calls. + + """ + + table_id = None + if schema is None: + schema = self.default_schema_name + + TABLEID_SQL = text(""" + SELECT o.id AS id + FROM sysobjects o JOIN sysusers u ON o.uid=u.uid + WHERE u.name = :schema_name + AND o.name = :table_name + AND o.type in ('U', 'V') + """) + + if util.py2k: + if isinstance(schema, unicode): + schema = schema.encode("ascii") + if isinstance(table_name, unicode): + table_name = table_name.encode("ascii") + result = connection.execute(TABLEID_SQL, + schema_name=schema, + table_name=table_name) + table_id = result.scalar() + if table_id is None: + raise exc.NoSuchTableError(table_name) + return table_id + + @reflection.cache + def get_columns(self, connection, table_name, schema=None, **kw): + table_id = self.get_table_id(connection, table_name, schema, + info_cache=kw.get("info_cache")) + + COLUMN_SQL = text(""" + SELECT col.name AS name, + t.name AS type, + (col.status & 8) AS nullable, + (col.status & 128) AS autoincrement, + com.text AS 'default', + col.prec AS precision, + col.scale AS scale, + col.length AS length + FROM systypes t, syscolumns col LEFT OUTER JOIN syscomments com ON + col.cdefault = com.id + WHERE col.usertype = t.usertype + AND col.id = :table_id + ORDER BY col.colid + """) + + results = connection.execute(COLUMN_SQL, table_id=table_id) + + columns = [] + for (name, type_, nullable, autoincrement, default, precision, scale, + length) in results: + col_info = self._get_column_info(name, type_, bool(nullable), + bool(autoincrement), + default, precision, scale, + length) + columns.append(col_info) + + return columns + + def _get_column_info(self, name, type_, nullable, autoincrement, default, + precision, scale, length): + + coltype = self.ischema_names.get(type_, None) + + kwargs = {} + + if coltype in (NUMERIC, DECIMAL): + args = (precision, scale) + elif coltype == FLOAT: + args = (precision,) + elif coltype in (CHAR, VARCHAR, UNICHAR, UNIVARCHAR, NCHAR, NVARCHAR): + args = (length,) + else: + args = () + + if coltype: + coltype = coltype(*args, **kwargs) + # is this necessary + # if is_array: + # coltype = ARRAY(coltype) + else: + util.warn("Did not recognize type '%s' of column '%s'" % + (type_, name)) + coltype = sqltypes.NULLTYPE + + if default: + default = re.sub("DEFAULT", "", default).strip() + default = re.sub("^'(.*)'$", lambda m: m.group(1), default) + else: + default = None + + column_info = dict(name=name, type=coltype, nullable=nullable, + default=default, autoincrement=autoincrement) + return column_info + + @reflection.cache + def get_foreign_keys(self, connection, table_name, schema=None, **kw): + + table_id = self.get_table_id(connection, table_name, schema, + info_cache=kw.get("info_cache")) + + table_cache = {} + column_cache = {} + foreign_keys = [] + + table_cache[table_id] = {"name": table_name, "schema": schema} + + COLUMN_SQL = text(""" + SELECT c.colid AS id, c.name AS name + FROM syscolumns c + WHERE c.id = :table_id + """) + + results = connection.execute(COLUMN_SQL, table_id=table_id) + columns = {} + for col in results: + columns[col["id"]] = col["name"] + column_cache[table_id] = columns + + REFCONSTRAINT_SQL = text(""" + SELECT o.name AS name, r.reftabid AS reftable_id, + r.keycnt AS 'count', + r.fokey1 AS fokey1, r.fokey2 AS fokey2, r.fokey3 AS fokey3, + r.fokey4 AS fokey4, r.fokey5 AS fokey5, r.fokey6 AS fokey6, + r.fokey7 AS fokey7, r.fokey1 AS fokey8, r.fokey9 AS fokey9, + r.fokey10 AS fokey10, r.fokey11 AS fokey11, r.fokey12 AS fokey12, + r.fokey13 AS fokey13, r.fokey14 AS fokey14, r.fokey15 AS fokey15, + r.fokey16 AS fokey16, + r.refkey1 AS refkey1, r.refkey2 AS refkey2, r.refkey3 AS refkey3, + r.refkey4 AS refkey4, r.refkey5 AS refkey5, r.refkey6 AS refkey6, + r.refkey7 AS refkey7, r.refkey1 AS refkey8, r.refkey9 AS refkey9, + r.refkey10 AS refkey10, r.refkey11 AS refkey11, + r.refkey12 AS refkey12, r.refkey13 AS refkey13, + r.refkey14 AS refkey14, r.refkey15 AS refkey15, + r.refkey16 AS refkey16 + FROM sysreferences r JOIN sysobjects o on r.tableid = o.id + WHERE r.tableid = :table_id + """) + referential_constraints = connection.execute( + REFCONSTRAINT_SQL, table_id=table_id).fetchall() + + REFTABLE_SQL = text(""" + SELECT o.name AS name, u.name AS 'schema' + FROM sysobjects o JOIN sysusers u ON o.uid = u.uid + WHERE o.id = :table_id + """) + + for r in referential_constraints: + reftable_id = r["reftable_id"] + + if reftable_id not in table_cache: + c = connection.execute(REFTABLE_SQL, table_id=reftable_id) + reftable = c.fetchone() + c.close() + table_info = {"name": reftable["name"], "schema": None} + if (schema is not None or + reftable["schema"] != self.default_schema_name): + table_info["schema"] = reftable["schema"] + + table_cache[reftable_id] = table_info + results = connection.execute(COLUMN_SQL, table_id=reftable_id) + reftable_columns = {} + for col in results: + reftable_columns[col["id"]] = col["name"] + column_cache[reftable_id] = reftable_columns + + reftable = table_cache[reftable_id] + reftable_columns = column_cache[reftable_id] + + constrained_columns = [] + referred_columns = [] + for i in range(1, r["count"] + 1): + constrained_columns.append(columns[r["fokey%i" % i]]) + referred_columns.append(reftable_columns[r["refkey%i" % i]]) + + fk_info = { + "constrained_columns": constrained_columns, + "referred_schema": reftable["schema"], + "referred_table": reftable["name"], + "referred_columns": referred_columns, + "name": r["name"] + } + + foreign_keys.append(fk_info) + + return foreign_keys + + @reflection.cache + def get_indexes(self, connection, table_name, schema=None, **kw): + table_id = self.get_table_id(connection, table_name, schema, + info_cache=kw.get("info_cache")) + + INDEX_SQL = text(""" + SELECT object_name(i.id) AS table_name, + i.keycnt AS 'count', + i.name AS name, + (i.status & 0x2) AS 'unique', + index_col(object_name(i.id), i.indid, 1) AS col_1, + index_col(object_name(i.id), i.indid, 2) AS col_2, + index_col(object_name(i.id), i.indid, 3) AS col_3, + index_col(object_name(i.id), i.indid, 4) AS col_4, + index_col(object_name(i.id), i.indid, 5) AS col_5, + index_col(object_name(i.id), i.indid, 6) AS col_6, + index_col(object_name(i.id), i.indid, 7) AS col_7, + index_col(object_name(i.id), i.indid, 8) AS col_8, + index_col(object_name(i.id), i.indid, 9) AS col_9, + index_col(object_name(i.id), i.indid, 10) AS col_10, + index_col(object_name(i.id), i.indid, 11) AS col_11, + index_col(object_name(i.id), i.indid, 12) AS col_12, + index_col(object_name(i.id), i.indid, 13) AS col_13, + index_col(object_name(i.id), i.indid, 14) AS col_14, + index_col(object_name(i.id), i.indid, 15) AS col_15, + index_col(object_name(i.id), i.indid, 16) AS col_16 + FROM sysindexes i, sysobjects o + WHERE o.id = i.id + AND o.id = :table_id + AND (i.status & 2048) = 0 + AND i.indid BETWEEN 1 AND 254 + """) + + results = connection.execute(INDEX_SQL, table_id=table_id) + indexes = [] + for r in results: + column_names = [] + for i in range(1, r["count"]): + column_names.append(r["col_%i" % (i,)]) + index_info = {"name": r["name"], + "unique": bool(r["unique"]), + "column_names": column_names} + indexes.append(index_info) + + return indexes + + @reflection.cache + def get_pk_constraint(self, connection, table_name, schema=None, **kw): + table_id = self.get_table_id(connection, table_name, schema, + info_cache=kw.get("info_cache")) + + PK_SQL = text(""" + SELECT object_name(i.id) AS table_name, + i.keycnt AS 'count', + i.name AS name, + index_col(object_name(i.id), i.indid, 1) AS pk_1, + index_col(object_name(i.id), i.indid, 2) AS pk_2, + index_col(object_name(i.id), i.indid, 3) AS pk_3, + index_col(object_name(i.id), i.indid, 4) AS pk_4, + index_col(object_name(i.id), i.indid, 5) AS pk_5, + index_col(object_name(i.id), i.indid, 6) AS pk_6, + index_col(object_name(i.id), i.indid, 7) AS pk_7, + index_col(object_name(i.id), i.indid, 8) AS pk_8, + index_col(object_name(i.id), i.indid, 9) AS pk_9, + index_col(object_name(i.id), i.indid, 10) AS pk_10, + index_col(object_name(i.id), i.indid, 11) AS pk_11, + index_col(object_name(i.id), i.indid, 12) AS pk_12, + index_col(object_name(i.id), i.indid, 13) AS pk_13, + index_col(object_name(i.id), i.indid, 14) AS pk_14, + index_col(object_name(i.id), i.indid, 15) AS pk_15, + index_col(object_name(i.id), i.indid, 16) AS pk_16 + FROM sysindexes i, sysobjects o + WHERE o.id = i.id + AND o.id = :table_id + AND (i.status & 2048) = 2048 + AND i.indid BETWEEN 1 AND 254 + """) + + results = connection.execute(PK_SQL, table_id=table_id) + pks = results.fetchone() + results.close() + + constrained_columns = [] + if pks: + for i in range(1, pks["count"] + 1): + constrained_columns.append(pks["pk_%i" % (i,)]) + return {"constrained_columns": constrained_columns, + "name": pks["name"]} + else: + return {"constrained_columns": [], "name": None} + + @reflection.cache + def get_schema_names(self, connection, **kw): + + SCHEMA_SQL = text("SELECT u.name AS name FROM sysusers u") + + schemas = connection.execute(SCHEMA_SQL) + + return [s["name"] for s in schemas] + + @reflection.cache + def get_table_names(self, connection, schema=None, **kw): + if schema is None: + schema = self.default_schema_name + + TABLE_SQL = text(""" + SELECT o.name AS name + FROM sysobjects o JOIN sysusers u ON o.uid = u.uid + WHERE u.name = :schema_name + AND o.type = 'U' + """) + + if util.py2k: + if isinstance(schema, unicode): + schema = schema.encode("ascii") + + tables = connection.execute(TABLE_SQL, schema_name=schema) + + return [t["name"] for t in tables] + + @reflection.cache + def get_view_definition(self, connection, view_name, schema=None, **kw): + if schema is None: + schema = self.default_schema_name + + VIEW_DEF_SQL = text(""" + SELECT c.text + FROM syscomments c JOIN sysobjects o ON c.id = o.id + WHERE o.name = :view_name + AND o.type = 'V' + """) + + if util.py2k: + if isinstance(view_name, unicode): + view_name = view_name.encode("ascii") + + view = connection.execute(VIEW_DEF_SQL, view_name=view_name) + + return view.scalar() + + @reflection.cache + def get_view_names(self, connection, schema=None, **kw): + if schema is None: + schema = self.default_schema_name + + VIEW_SQL = text(""" + SELECT o.name AS name + FROM sysobjects o JOIN sysusers u ON o.uid = u.uid + WHERE u.name = :schema_name + AND o.type = 'V' + """) + + if util.py2k: + if isinstance(schema, unicode): + schema = schema.encode("ascii") + views = connection.execute(VIEW_SQL, schema_name=schema) + + return [v["name"] for v in views] + + def has_table(self, connection, table_name, schema=None): + try: + self.get_table_id(connection, table_name, schema) + except exc.NoSuchTableError: + return False + else: + return True diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/mxodbc.py b/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/mxodbc.py new file mode 100644 index 0000000..60e6510 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/mxodbc.py @@ -0,0 +1,33 @@ +# sybase/mxodbc.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +""" + +.. dialect:: sybase+mxodbc + :name: mxODBC + :dbapi: mxodbc + :connectstring: sybase+mxodbc://:@ + :url: http://www.egenix.com/ + +.. note:: + + This dialect is a stub only and is likely non functional at this time. + + +""" +from sqlalchemy.dialects.sybase.base import SybaseDialect +from sqlalchemy.dialects.sybase.base import SybaseExecutionContext +from sqlalchemy.connectors.mxodbc import MxODBCConnector + + +class SybaseExecutionContext_mxodbc(SybaseExecutionContext): + pass + + +class SybaseDialect_mxodbc(MxODBCConnector, SybaseDialect): + execution_ctx_cls = SybaseExecutionContext_mxodbc + +dialect = SybaseDialect_mxodbc diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/pyodbc.py b/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/pyodbc.py new file mode 100644 index 0000000..348ca32 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/pyodbc.py @@ -0,0 +1,86 @@ +# sybase/pyodbc.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +.. dialect:: sybase+pyodbc + :name: PyODBC + :dbapi: pyodbc + :connectstring: sybase+pyodbc://:@\ +[/] + :url: http://pypi.python.org/pypi/pyodbc/ + + +Unicode Support +--------------- + +The pyodbc driver currently supports usage of these Sybase types with +Unicode or multibyte strings:: + + CHAR + NCHAR + NVARCHAR + TEXT + VARCHAR + +Currently *not* supported are:: + + UNICHAR + UNITEXT + UNIVARCHAR + +""" + +from sqlalchemy.dialects.sybase.base import SybaseDialect,\ + SybaseExecutionContext +from sqlalchemy.connectors.pyodbc import PyODBCConnector +from sqlalchemy import types as sqltypes, processors +import decimal + + +class _SybNumeric_pyodbc(sqltypes.Numeric): + """Turns Decimals with adjusted() < -6 into floats. + + It's not yet known how to get decimals with many + significant digits or very large adjusted() into Sybase + via pyodbc. + + """ + + def bind_processor(self, dialect): + super_process = super(_SybNumeric_pyodbc, self).\ + bind_processor(dialect) + + def process(value): + if self.asdecimal and \ + isinstance(value, decimal.Decimal): + + if value.adjusted() < -6: + return processors.to_float(value) + + if super_process: + return super_process(value) + else: + return value + return process + + +class SybaseExecutionContext_pyodbc(SybaseExecutionContext): + def set_ddl_autocommit(self, connection, value): + if value: + connection.autocommit = True + else: + connection.autocommit = False + + +class SybaseDialect_pyodbc(PyODBCConnector, SybaseDialect): + execution_ctx_cls = SybaseExecutionContext_pyodbc + + colspecs = { + sqltypes.Numeric: _SybNumeric_pyodbc, + } + +dialect = SybaseDialect_pyodbc diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/pysybase.py b/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/pysybase.py new file mode 100644 index 0000000..41ca47f --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/pysybase.py @@ -0,0 +1,102 @@ +# sybase/pysybase.py +# Copyright (C) 2010-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +.. dialect:: sybase+pysybase + :name: Python-Sybase + :dbapi: Sybase + :connectstring: sybase+pysybase://:@/\ +[database name] + :url: http://python-sybase.sourceforge.net/ + +Unicode Support +--------------- + +The python-sybase driver does not appear to support non-ASCII strings of any +kind at this time. + +""" + +from sqlalchemy import types as sqltypes, processors +from sqlalchemy.dialects.sybase.base import SybaseDialect, \ + SybaseExecutionContext, SybaseSQLCompiler + + +class _SybNumeric(sqltypes.Numeric): + def result_processor(self, dialect, type_): + if not self.asdecimal: + return processors.to_float + else: + return sqltypes.Numeric.result_processor(self, dialect, type_) + + +class SybaseExecutionContext_pysybase(SybaseExecutionContext): + + def set_ddl_autocommit(self, dbapi_connection, value): + if value: + # call commit() on the Sybase connection directly, + # to avoid any side effects of calling a Connection + # transactional method inside of pre_exec() + dbapi_connection.commit() + + def pre_exec(self): + SybaseExecutionContext.pre_exec(self) + + for param in self.parameters: + for key in list(param): + param["@" + key] = param[key] + del param[key] + + +class SybaseSQLCompiler_pysybase(SybaseSQLCompiler): + def bindparam_string(self, name, **kw): + return "@" + name + + +class SybaseDialect_pysybase(SybaseDialect): + driver = 'pysybase' + execution_ctx_cls = SybaseExecutionContext_pysybase + statement_compiler = SybaseSQLCompiler_pysybase + + colspecs = { + sqltypes.Numeric: _SybNumeric, + sqltypes.Float: sqltypes.Float + } + + @classmethod + def dbapi(cls): + import Sybase + return Sybase + + def create_connect_args(self, url): + opts = url.translate_connect_args(username='user', password='passwd') + + return ([opts.pop('host')], opts) + + def do_executemany(self, cursor, statement, parameters, context=None): + # calling python-sybase executemany yields: + # TypeError: string too long for buffer + for param in parameters: + cursor.execute(statement, param) + + def _get_server_version_info(self, connection): + vers = connection.scalar("select @@version_number") + # i.e. 15500, 15000, 12500 == (15, 5, 0, 0), (15, 0, 0, 0), + # (12, 5, 0, 0) + return (vers / 1000, vers % 1000 / 100, vers % 100 / 10, vers % 10) + + def is_disconnect(self, e, connection, cursor): + if isinstance(e, (self.dbapi.OperationalError, + self.dbapi.ProgrammingError)): + msg = str(e) + return ('Unable to complete network request to host' in msg or + 'Invalid connection state' in msg or + 'Invalid cursor state' in msg) + else: + return False + +dialect = SybaseDialect_pysybase diff --git a/lib/python3.4/site-packages/sqlalchemy/engine/__init__.py b/lib/python3.4/site-packages/sqlalchemy/engine/__init__.py new file mode 100644 index 0000000..09054d9 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/engine/__init__.py @@ -0,0 +1,433 @@ +# engine/__init__.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""SQL connections, SQL execution and high-level DB-API interface. + +The engine package defines the basic components used to interface +DB-API modules with higher-level statement construction, +connection-management, execution and result contexts. The primary +"entry point" class into this package is the Engine and its public +constructor ``create_engine()``. + +This package includes: + +base.py + Defines interface classes and some implementation classes which + comprise the basic components used to interface between a DB-API, + constructed and plain-text statements, connections, transactions, + and results. + +default.py + Contains default implementations of some of the components defined + in base.py. All current database dialects use the classes in + default.py as base classes for their own database-specific + implementations. + +strategies.py + The mechanics of constructing ``Engine`` objects are represented + here. Defines the ``EngineStrategy`` class which represents how + to go from arguments specified to the ``create_engine()`` + function, to a fully constructed ``Engine``, including + initialization of connection pooling, dialects, and specific + subclasses of ``Engine``. + +threadlocal.py + The ``TLEngine`` class is defined here, which is a subclass of + the generic ``Engine`` and tracks ``Connection`` and + ``Transaction`` objects against the identity of the current + thread. This allows certain programming patterns based around + the concept of a "thread-local connection" to be possible. + The ``TLEngine`` is created by using the "threadlocal" engine + strategy in conjunction with the ``create_engine()`` function. + +url.py + Defines the ``URL`` class which represents the individual + components of a string URL passed to ``create_engine()``. Also + defines a basic module-loading strategy for the dialect specifier + within a URL. +""" + +from .interfaces import ( + Connectable, + Dialect, + ExecutionContext, + ExceptionContext, + + # backwards compat + Compiled, + TypeCompiler +) + +from .base import ( + Connection, + Engine, + NestedTransaction, + RootTransaction, + Transaction, + TwoPhaseTransaction, +) + +from .result import ( + BaseRowProxy, + BufferedColumnResultProxy, + BufferedColumnRow, + BufferedRowResultProxy, + FullyBufferedResultProxy, + ResultProxy, + RowProxy, +) + +from .util import ( + connection_memoize +) + + +from . import util, strategies + +# backwards compat +from ..sql import ddl + +default_strategy = 'plain' + + +def create_engine(*args, **kwargs): + """Create a new :class:`.Engine` instance. + + The standard calling form is to send the URL as the + first positional argument, usually a string + that indicates database dialect and connection arguments:: + + + engine = create_engine("postgresql://scott:tiger@localhost/test") + + Additional keyword arguments may then follow it which + establish various options on the resulting :class:`.Engine` + and its underlying :class:`.Dialect` and :class:`.Pool` + constructs:: + + engine = create_engine("mysql://scott:tiger@hostname/dbname", + encoding='latin1', echo=True) + + The string form of the URL is + ``dialect[+driver]://user:password@host/dbname[?key=value..]``, where + ``dialect`` is a database name such as ``mysql``, ``oracle``, + ``postgresql``, etc., and ``driver`` the name of a DBAPI, such as + ``psycopg2``, ``pyodbc``, ``cx_oracle``, etc. Alternatively, + the URL can be an instance of :class:`~sqlalchemy.engine.url.URL`. + + ``**kwargs`` takes a wide variety of options which are routed + towards their appropriate components. Arguments may be specific to + the :class:`.Engine`, the underlying :class:`.Dialect`, as well as the + :class:`.Pool`. Specific dialects also accept keyword arguments that + are unique to that dialect. Here, we describe the parameters + that are common to most :func:`.create_engine()` usage. + + Once established, the newly resulting :class:`.Engine` will + request a connection from the underlying :class:`.Pool` once + :meth:`.Engine.connect` is called, or a method which depends on it + such as :meth:`.Engine.execute` is invoked. The :class:`.Pool` in turn + will establish the first actual DBAPI connection when this request + is received. The :func:`.create_engine` call itself does **not** + establish any actual DBAPI connections directly. + + .. seealso:: + + :doc:`/core/engines` + + :doc:`/dialects/index` + + :ref:`connections_toplevel` + + :param case_sensitive=True: if False, result column names + will match in a case-insensitive fashion, that is, + ``row['SomeColumn']``. + + .. versionchanged:: 0.8 + By default, result row names match case-sensitively. + In version 0.7 and prior, all matches were case-insensitive. + + :param connect_args: a dictionary of options which will be + passed directly to the DBAPI's ``connect()`` method as + additional keyword arguments. See the example + at :ref:`custom_dbapi_args`. + + :param convert_unicode=False: if set to True, sets + the default behavior of ``convert_unicode`` on the + :class:`.String` type to ``True``, regardless + of a setting of ``False`` on an individual + :class:`.String` type, thus causing all :class:`.String` + -based columns + to accommodate Python ``unicode`` objects. This flag + is useful as an engine-wide setting when using a + DBAPI that does not natively support Python + ``unicode`` objects and raises an error when + one is received (such as pyodbc with FreeTDS). + + See :class:`.String` for further details on + what this flag indicates. + + :param creator: a callable which returns a DBAPI connection. + This creation function will be passed to the underlying + connection pool and will be used to create all new database + connections. Usage of this function causes connection + parameters specified in the URL argument to be bypassed. + + :param echo=False: if True, the Engine will log all statements + as well as a repr() of their parameter lists to the engines + logger, which defaults to sys.stdout. The ``echo`` attribute of + ``Engine`` can be modified at any time to turn logging on and + off. If set to the string ``"debug"``, result rows will be + printed to the standard output as well. This flag ultimately + controls a Python logger; see :ref:`dbengine_logging` for + information on how to configure logging directly. + + :param echo_pool=False: if True, the connection pool will log + all checkouts/checkins to the logging stream, which defaults to + sys.stdout. This flag ultimately controls a Python logger; see + :ref:`dbengine_logging` for information on how to configure logging + directly. + + :param encoding: Defaults to ``utf-8``. This is the string + encoding used by SQLAlchemy for string encode/decode + operations which occur within SQLAlchemy, **outside of + the DBAPI.** Most modern DBAPIs feature some degree of + direct support for Python ``unicode`` objects, + what you see in Python 2 as a string of the form + ``u'some string'``. For those scenarios where the + DBAPI is detected as not supporting a Python ``unicode`` + object, this encoding is used to determine the + source/destination encoding. It is **not used** + for those cases where the DBAPI handles unicode + directly. + + To properly configure a system to accommodate Python + ``unicode`` objects, the DBAPI should be + configured to handle unicode to the greatest + degree as is appropriate - see + the notes on unicode pertaining to the specific + target database in use at :ref:`dialect_toplevel`. + + Areas where string encoding may need to be accommodated + outside of the DBAPI include zero or more of: + + * the values passed to bound parameters, corresponding to + the :class:`.Unicode` type or the :class:`.String` type + when ``convert_unicode`` is ``True``; + * the values returned in result set columns corresponding + to the :class:`.Unicode` type or the :class:`.String` + type when ``convert_unicode`` is ``True``; + * the string SQL statement passed to the DBAPI's + ``cursor.execute()`` method; + * the string names of the keys in the bound parameter + dictionary passed to the DBAPI's ``cursor.execute()`` + as well as ``cursor.setinputsizes()`` methods; + * the string column names retrieved from the DBAPI's + ``cursor.description`` attribute. + + When using Python 3, the DBAPI is required to support + *all* of the above values as Python ``unicode`` objects, + which in Python 3 are just known as ``str``. In Python 2, + the DBAPI does not specify unicode behavior at all, + so SQLAlchemy must make decisions for each of the above + values on a per-DBAPI basis - implementations are + completely inconsistent in their behavior. + + :param execution_options: Dictionary execution options which will + be applied to all connections. See + :meth:`~sqlalchemy.engine.Connection.execution_options` + + :param implicit_returning=True: When ``True``, a RETURNING- + compatible construct, if available, will be used to + fetch newly generated primary key values when a single row + INSERT statement is emitted with no existing returning() + clause. This applies to those backends which support RETURNING + or a compatible construct, including Postgresql, Firebird, Oracle, + Microsoft SQL Server. Set this to ``False`` to disable + the automatic usage of RETURNING. + + :param isolation_level: this string parameter is interpreted by various + dialects in order to affect the transaction isolation level of the + database connection. The parameter essentially accepts some subset of + these string arguments: ``"SERIALIZABLE"``, ``"REPEATABLE_READ"``, + ``"READ_COMMITTED"``, ``"READ_UNCOMMITTED"`` and ``"AUTOCOMMIT"``. + Behavior here varies per backend, and + individual dialects should be consulted directly. + + Note that the isolation level can also be set on a per-:class:`.Connection` + basis as well, using the + :paramref:`.Connection.execution_options.isolation_level` + feature. + + .. seealso:: + + :attr:`.Connection.default_isolation_level` - view default level + + :paramref:`.Connection.execution_options.isolation_level` + - set per :class:`.Connection` isolation level + + :ref:`SQLite Transaction Isolation ` + + :ref:`Postgresql Transaction Isolation ` + + :ref:`MySQL Transaction Isolation ` + + :ref:`session_transaction_isolation` - for the ORM + + :param label_length=None: optional integer value which limits + the size of dynamically generated column labels to that many + characters. If less than 6, labels are generated as + "_(counter)". If ``None``, the value of + ``dialect.max_identifier_length`` is used instead. + + :param listeners: A list of one or more + :class:`~sqlalchemy.interfaces.PoolListener` objects which will + receive connection pool events. + + :param logging_name: String identifier which will be used within + the "name" field of logging records generated within the + "sqlalchemy.engine" logger. Defaults to a hexstring of the + object's id. + + :param max_overflow=10: the number of connections to allow in + connection pool "overflow", that is connections that can be + opened above and beyond the pool_size setting, which defaults + to five. this is only used with :class:`~sqlalchemy.pool.QueuePool`. + + :param module=None: reference to a Python module object (the module + itself, not its string name). Specifies an alternate DBAPI module to + be used by the engine's dialect. Each sub-dialect references a + specific DBAPI which will be imported before first connect. This + parameter causes the import to be bypassed, and the given module to + be used instead. Can be used for testing of DBAPIs as well as to + inject "mock" DBAPI implementations into the :class:`.Engine`. + + :param paramstyle=None: The `paramstyle `_ + to use when rendering bound parameters. This style defaults to the + one recommended by the DBAPI itself, which is retrieved from the + ``.paramstyle`` attribute of the DBAPI. However, most DBAPIs accept + more than one paramstyle, and in particular it may be desirable + to change a "named" paramstyle into a "positional" one, or vice versa. + When this attribute is passed, it should be one of the values + ``"qmark"``, ``"numeric"``, ``"named"``, ``"format"`` or + ``"pyformat"``, and should correspond to a parameter style known + to be supported by the DBAPI in use. + + :param pool=None: an already-constructed instance of + :class:`~sqlalchemy.pool.Pool`, such as a + :class:`~sqlalchemy.pool.QueuePool` instance. If non-None, this + pool will be used directly as the underlying connection pool + for the engine, bypassing whatever connection parameters are + present in the URL argument. For information on constructing + connection pools manually, see :ref:`pooling_toplevel`. + + :param poolclass=None: a :class:`~sqlalchemy.pool.Pool` + subclass, which will be used to create a connection pool + instance using the connection parameters given in the URL. Note + this differs from ``pool`` in that you don't actually + instantiate the pool in this case, you just indicate what type + of pool to be used. + + :param pool_logging_name: String identifier which will be used within + the "name" field of logging records generated within the + "sqlalchemy.pool" logger. Defaults to a hexstring of the object's + id. + + :param pool_size=5: the number of connections to keep open + inside the connection pool. This used with + :class:`~sqlalchemy.pool.QueuePool` as + well as :class:`~sqlalchemy.pool.SingletonThreadPool`. With + :class:`~sqlalchemy.pool.QueuePool`, a ``pool_size`` setting + of 0 indicates no limit; to disable pooling, set ``poolclass`` to + :class:`~sqlalchemy.pool.NullPool` instead. + + :param pool_recycle=-1: this setting causes the pool to recycle + connections after the given number of seconds has passed. It + defaults to -1, or no timeout. For example, setting to 3600 + means connections will be recycled after one hour. Note that + MySQL in particular will disconnect automatically if no + activity is detected on a connection for eight hours (although + this is configurable with the MySQLDB connection itself and the + server configuration as well). + + :param pool_reset_on_return='rollback': set the "reset on return" + behavior of the pool, which is whether ``rollback()``, + ``commit()``, or nothing is called upon connections + being returned to the pool. See the docstring for + ``reset_on_return`` at :class:`.Pool`. + + .. versionadded:: 0.7.6 + + :param pool_timeout=30: number of seconds to wait before giving + up on getting a connection from the pool. This is only used + with :class:`~sqlalchemy.pool.QueuePool`. + + :param strategy='plain': selects alternate engine implementations. + Currently available are: + + * the ``threadlocal`` strategy, which is described in + :ref:`threadlocal_strategy`; + * the ``mock`` strategy, which dispatches all statement + execution to a function passed as the argument ``executor``. + See `example in the FAQ + `_. + + :param executor=None: a function taking arguments + ``(sql, *multiparams, **params)``, to which the ``mock`` strategy will + dispatch all statement execution. Used only by ``strategy='mock'``. + + """ + + strategy = kwargs.pop('strategy', default_strategy) + strategy = strategies.strategies[strategy] + return strategy.create(*args, **kwargs) + + +def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs): + """Create a new Engine instance using a configuration dictionary. + + The dictionary is typically produced from a config file. + + The keys of interest to ``engine_from_config()`` should be prefixed, e.g. + ``sqlalchemy.url``, ``sqlalchemy.echo``, etc. The 'prefix' argument + indicates the prefix to be searched for. Each matching key (after the + prefix is stripped) is treated as though it were the corresponding keyword + argument to a :func:`.create_engine` call. + + The only required key is (assuming the default prefix) ``sqlalchemy.url``, + which provides the :ref:`database URL `. + + A select set of keyword arguments will be "coerced" to their + expected type based on string values. The set of arguments + is extensible per-dialect using the ``engine_config_types`` accessor. + + :param configuration: A dictionary (typically produced from a config file, + but this is not a requirement). Items whose keys start with the value + of 'prefix' will have that prefix stripped, and will then be passed to + :ref:`create_engine`. + + :param prefix: Prefix to match and then strip from keys + in 'configuration'. + + :param kwargs: Each keyword argument to ``engine_from_config()`` itself + overrides the corresponding item taken from the 'configuration' + dictionary. Keyword arguments should *not* be prefixed. + + """ + + options = dict((key[len(prefix):], configuration[key]) + for key in configuration + if key.startswith(prefix)) + options['_coerce_config'] = True + options.update(kwargs) + url = options.pop('url') + return create_engine(url, **options) + + +__all__ = ( + 'create_engine', + 'engine_from_config', +) diff --git a/lib/python3.4/site-packages/sqlalchemy/engine/base.py b/lib/python3.4/site-packages/sqlalchemy/engine/base.py new file mode 100644 index 0000000..80edd95 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/engine/base.py @@ -0,0 +1,2134 @@ +# engine/base.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +from __future__ import with_statement + +"""Defines :class:`.Connection` and :class:`.Engine`. + +""" + + +import sys +from .. import exc, util, log, interfaces +from ..sql import util as sql_util +from .interfaces import Connectable, ExceptionContext +from .util import _distill_params +import contextlib + + +class Connection(Connectable): + """Provides high-level functionality for a wrapped DB-API connection. + + Provides execution support for string-based SQL statements as well as + :class:`.ClauseElement`, :class:`.Compiled` and :class:`.DefaultGenerator` + objects. Provides a :meth:`begin` method to return :class:`.Transaction` + objects. + + The Connection object is **not** thread-safe. While a Connection can be + shared among threads using properly synchronized access, it is still + possible that the underlying DBAPI connection may not support shared + access between threads. Check the DBAPI documentation for details. + + The Connection object represents a single dbapi connection checked out + from the connection pool. In this state, the connection pool has no affect + upon the connection, including its expiration or timeout state. For the + connection pool to properly manage connections, connections should be + returned to the connection pool (i.e. ``connection.close()``) whenever the + connection is not in use. + + .. index:: + single: thread safety; Connection + + """ + + def __init__(self, engine, connection=None, close_with_result=False, + _branch_from=None, _execution_options=None, + _dispatch=None, + _has_events=None): + """Construct a new Connection. + + The constructor here is not public and is only called only by an + :class:`.Engine`. See :meth:`.Engine.connect` and + :meth:`.Engine.contextual_connect` methods. + + """ + self.engine = engine + self.dialect = engine.dialect + self.__branch_from = _branch_from + self.__branch = _branch_from is not None + + if _branch_from: + self.__connection = connection + self._execution_options = _execution_options + self._echo = _branch_from._echo + self.should_close_with_result = False + self.dispatch = _dispatch + self._has_events = _branch_from._has_events + else: + self.__connection = connection \ + if connection is not None else engine.raw_connection() + self.__transaction = None + self.__savepoint_seq = 0 + self.should_close_with_result = close_with_result + self.__invalid = False + self.__can_reconnect = True + self._echo = self.engine._should_log_info() + + if _has_events is None: + # if _has_events is sent explicitly as False, + # then don't join the dispatch of the engine; we don't + # want to handle any of the engine's events in that case. + self.dispatch = self.dispatch._join(engine.dispatch) + self._has_events = _has_events or ( + _has_events is None and engine._has_events) + + assert not _execution_options + self._execution_options = engine._execution_options + + if self._has_events or self.engine._has_events: + self.dispatch.engine_connect(self, self.__branch) + + def _branch(self): + """Return a new Connection which references this Connection's + engine and connection; but does not have close_with_result enabled, + and also whose close() method does nothing. + + The Core uses this very sparingly, only in the case of + custom SQL default functions that are to be INSERTed as the + primary key of a row where we need to get the value back, so we have + to invoke it distinctly - this is a very uncommon case. + + Userland code accesses _branch() when the connect() or + contextual_connect() methods are called. The branched connection + acts as much as possible like the parent, except that it stays + connected when a close() event occurs. + + """ + if self.__branch_from: + return self.__branch_from._branch() + else: + return self.engine._connection_cls( + self.engine, + self.__connection, + _branch_from=self, + _execution_options=self._execution_options, + _has_events=self._has_events, + _dispatch=self.dispatch) + + @property + def _root(self): + """return the 'root' connection. + + Returns 'self' if this connection is not a branch, else + returns the root connection from which we ultimately branched. + + """ + + if self.__branch_from: + return self.__branch_from + else: + return self + + def _clone(self): + """Create a shallow copy of this Connection. + + """ + c = self.__class__.__new__(self.__class__) + c.__dict__ = self.__dict__.copy() + return c + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + self.close() + + def execution_options(self, **opt): + """ Set non-SQL options for the connection which take effect + during execution. + + The method returns a copy of this :class:`.Connection` which references + the same underlying DBAPI connection, but also defines the given + execution options which will take effect for a call to + :meth:`execute`. As the new :class:`.Connection` references the same + underlying resource, it's usually a good idea to ensure that the copies + will be discarded immediately, which is implicit if used as in:: + + result = connection.execution_options(stream_results=True).\\ + execute(stmt) + + Note that any key/value can be passed to + :meth:`.Connection.execution_options`, and it will be stored in the + ``_execution_options`` dictionary of the :class:`.Connection`. It + is suitable for usage by end-user schemes to communicate with + event listeners, for example. + + The keywords that are currently recognized by SQLAlchemy itself + include all those listed under :meth:`.Executable.execution_options`, + as well as others that are specific to :class:`.Connection`. + + :param autocommit: Available on: Connection, statement. + When True, a COMMIT will be invoked after execution + when executed in 'autocommit' mode, i.e. when an explicit + transaction is not begun on the connection. Note that DBAPI + connections by default are always in a transaction - SQLAlchemy uses + rules applied to different kinds of statements to determine if + COMMIT will be invoked in order to provide its "autocommit" feature. + Typically, all INSERT/UPDATE/DELETE statements as well as + CREATE/DROP statements have autocommit behavior enabled; SELECT + constructs do not. Use this option when invoking a SELECT or other + specific SQL construct where COMMIT is desired (typically when + calling stored procedures and such), and an explicit + transaction is not in progress. + + :param compiled_cache: Available on: Connection. + A dictionary where :class:`.Compiled` objects + will be cached when the :class:`.Connection` compiles a clause + expression into a :class:`.Compiled` object. + It is the user's responsibility to + manage the size of this dictionary, which will have keys + corresponding to the dialect, clause element, the column + names within the VALUES or SET clause of an INSERT or UPDATE, + as well as the "batch" mode for an INSERT or UPDATE statement. + The format of this dictionary is not guaranteed to stay the + same in future releases. + + Note that the ORM makes use of its own "compiled" caches for + some operations, including flush operations. The caching + used by the ORM internally supersedes a cache dictionary + specified here. + + :param isolation_level: Available on: :class:`.Connection`. + Set the transaction isolation level for + the lifespan of this :class:`.Connection` object (*not* the + underyling DBAPI connection, for which the level is reset + to its original setting upon termination of this + :class:`.Connection` object). + + Valid values include + those string values accepted by the + :paramref:`.create_engine.isolation_level` + parameter passed to :func:`.create_engine`. These levels are + semi-database specific; see individual dialect documentation for + valid levels. + + Note that this option necessarily affects the underlying + DBAPI connection for the lifespan of the originating + :class:`.Connection`, and is not per-execution. This + setting is not removed until the underlying DBAPI connection + is returned to the connection pool, i.e. + the :meth:`.Connection.close` method is called. + + .. warning:: The ``isolation_level`` execution option should + **not** be used when a transaction is already established, that + is, the :meth:`.Connection.begin` method or similar has been + called. A database cannot change the isolation level on a + transaction in progress, and different DBAPIs and/or + SQLAlchemy dialects may implicitly roll back or commit + the transaction, or not affect the connection at all. + + .. versionchanged:: 0.9.9 A warning is emitted when the + ``isolation_level`` execution option is used after a + transaction has been started with :meth:`.Connection.begin` + or similar. + + .. note:: The ``isolation_level`` execution option is implicitly + reset if the :class:`.Connection` is invalidated, e.g. via + the :meth:`.Connection.invalidate` method, or if a + disconnection error occurs. The new connection produced after + the invalidation will not have the isolation level re-applied + to it automatically. + + .. seealso:: + + :paramref:`.create_engine.isolation_level` + - set per :class:`.Engine` isolation level + + :meth:`.Connection.get_isolation_level` - view current level + + :ref:`SQLite Transaction Isolation ` + + :ref:`Postgresql Transaction Isolation ` + + :ref:`MySQL Transaction Isolation ` + + :ref:`session_transaction_isolation` - for the ORM + + :param no_parameters: When ``True``, if the final parameter + list or dictionary is totally empty, will invoke the + statement on the cursor as ``cursor.execute(statement)``, + not passing the parameter collection at all. + Some DBAPIs such as psycopg2 and mysql-python consider + percent signs as significant only when parameters are + present; this option allows code to generate SQL + containing percent signs (and possibly other characters) + that is neutral regarding whether it's executed by the DBAPI + or piped into a script that's later invoked by + command line tools. + + .. versionadded:: 0.7.6 + + :param stream_results: Available on: Connection, statement. + Indicate to the dialect that results should be + "streamed" and not pre-buffered, if possible. This is a limitation + of many DBAPIs. The flag is currently understood only by the + psycopg2 dialect. + + """ + c = self._clone() + c._execution_options = c._execution_options.union(opt) + if self._has_events or self.engine._has_events: + self.dispatch.set_connection_execution_options(c, opt) + self.dialect.set_connection_execution_options(c, opt) + return c + + @property + def closed(self): + """Return True if this connection is closed.""" + + return '_Connection__connection' not in self.__dict__ \ + and not self.__can_reconnect + + @property + def invalidated(self): + """Return True if this connection was invalidated.""" + + return self._root.__invalid + + @property + def connection(self): + """The underlying DB-API connection managed by this Connection. + + .. seealso:: + + + :ref:`dbapi_connections` + + """ + + try: + return self.__connection + except AttributeError: + try: + return self._revalidate_connection() + except Exception as e: + self._handle_dbapi_exception(e, None, None, None, None) + + def get_isolation_level(self): + """Return the current isolation level assigned to this + :class:`.Connection`. + + This will typically be the default isolation level as determined + by the dialect, unless if the + :paramref:`.Connection.execution_options.isolation_level` + feature has been used to alter the isolation level on a + per-:class:`.Connection` basis. + + This attribute will typically perform a live SQL operation in order + to procure the current isolation level, so the value returned is the + actual level on the underlying DBAPI connection regardless of how + this state was set. Compare to the + :attr:`.Connection.default_isolation_level` accessor + which returns the dialect-level setting without performing a SQL + query. + + .. versionadded:: 0.9.9 + + .. seealso:: + + :attr:`.Connection.default_isolation_level` - view default level + + :paramref:`.create_engine.isolation_level` + - set per :class:`.Engine` isolation level + + :paramref:`.Connection.execution_options.isolation_level` + - set per :class:`.Connection` isolation level + + """ + try: + return self.dialect.get_isolation_level(self.connection) + except Exception as e: + self._handle_dbapi_exception(e, None, None, None, None) + + @property + def default_isolation_level(self): + """The default isolation level assigned to this :class:`.Connection`. + + This is the isolation level setting that the :class:`.Connection` + has when first procured via the :meth:`.Engine.connect` method. + This level stays in place until the + :paramref:`.Connection.execution_options.isolation_level` is used + to change the setting on a per-:class:`.Connection` basis. + + Unlike :meth:`.Connection.get_isolation_level`, this attribute is set + ahead of time from the first connection procured by the dialect, + so SQL query is not invoked when this accessor is called. + + .. versionadded:: 0.9.9 + + .. seealso:: + + :meth:`.Connection.get_isolation_level` - view current level + + :paramref:`.create_engine.isolation_level` + - set per :class:`.Engine` isolation level + + :paramref:`.Connection.execution_options.isolation_level` + - set per :class:`.Connection` isolation level + + """ + return self.dialect.default_isolation_level + + def _revalidate_connection(self): + if self.__branch_from: + return self.__branch_from._revalidate_connection() + if self.__can_reconnect and self.__invalid: + if self.__transaction is not None: + raise exc.InvalidRequestError( + "Can't reconnect until invalid " + "transaction is rolled back") + self.__connection = self.engine.raw_connection(_connection=self) + self.__invalid = False + return self.__connection + raise exc.ResourceClosedError("This Connection is closed") + + @property + def _connection_is_valid(self): + # use getattr() for is_valid to support exceptions raised in + # dialect initializer, where the connection is not wrapped in + # _ConnectionFairy + + return getattr(self.__connection, 'is_valid', False) + + @property + def _still_open_and_connection_is_valid(self): + return \ + not self.closed and \ + not self.invalidated and \ + getattr(self.__connection, 'is_valid', False) + + @property + def info(self): + """Info dictionary associated with the underlying DBAPI connection + referred to by this :class:`.Connection`, allowing user-defined + data to be associated with the connection. + + The data here will follow along with the DBAPI connection including + after it is returned to the connection pool and used again + in subsequent instances of :class:`.Connection`. + + """ + + return self.connection.info + + def connect(self): + """Returns a branched version of this :class:`.Connection`. + + The :meth:`.Connection.close` method on the returned + :class:`.Connection` can be called and this + :class:`.Connection` will remain open. + + This method provides usage symmetry with + :meth:`.Engine.connect`, including for usage + with context managers. + + """ + + return self._branch() + + def contextual_connect(self, **kwargs): + """Returns a branched version of this :class:`.Connection`. + + The :meth:`.Connection.close` method on the returned + :class:`.Connection` can be called and this + :class:`.Connection` will remain open. + + This method provides usage symmetry with + :meth:`.Engine.contextual_connect`, including for usage + with context managers. + + """ + + return self._branch() + + def invalidate(self, exception=None): + """Invalidate the underlying DBAPI connection associated with + this :class:`.Connection`. + + The underlying DBAPI connection is literally closed (if + possible), and is discarded. Its source connection pool will + typically lazily create a new connection to replace it. + + Upon the next use (where "use" typically means using the + :meth:`.Connection.execute` method or similar), + this :class:`.Connection` will attempt to + procure a new DBAPI connection using the services of the + :class:`.Pool` as a source of connectivty (e.g. a "reconnection"). + + If a transaction was in progress (e.g. the + :meth:`.Connection.begin` method has been called) when + :meth:`.Connection.invalidate` method is called, at the DBAPI + level all state associated with this transaction is lost, as + the DBAPI connection is closed. The :class:`.Connection` + will not allow a reconnection to proceed until the + :class:`.Transaction` object is ended, by calling the + :meth:`.Transaction.rollback` method; until that point, any attempt at + continuing to use the :class:`.Connection` will raise an + :class:`~sqlalchemy.exc.InvalidRequestError`. + This is to prevent applications from accidentally + continuing an ongoing transactional operations despite the + fact that the transaction has been lost due to an + invalidation. + + The :meth:`.Connection.invalidate` method, just like auto-invalidation, + will at the connection pool level invoke the + :meth:`.PoolEvents.invalidate` event. + + .. seealso:: + + :ref:`pool_connection_invalidation` + + """ + + if self.invalidated: + return + + if self.closed: + raise exc.ResourceClosedError("This Connection is closed") + + if self._root._connection_is_valid: + self._root.__connection.invalidate(exception) + del self._root.__connection + self._root.__invalid = True + + def detach(self): + """Detach the underlying DB-API connection from its connection pool. + + E.g.:: + + with engine.connect() as conn: + conn.detach() + conn.execute("SET search_path TO schema1, schema2") + + # work with connection + + # connection is fully closed (since we used "with:", can + # also call .close()) + + This :class:`.Connection` instance will remain usable. When closed + (or exited from a context manager context as above), + the DB-API connection will be literally closed and not + returned to its originating pool. + + This method can be used to insulate the rest of an application + from a modified state on a connection (such as a transaction + isolation level or similar). + + """ + + self.__connection.detach() + + def begin(self): + """Begin a transaction and return a transaction handle. + + The returned object is an instance of :class:`.Transaction`. + This object represents the "scope" of the transaction, + which completes when either the :meth:`.Transaction.rollback` + or :meth:`.Transaction.commit` method is called. + + Nested calls to :meth:`.begin` on the same :class:`.Connection` + will return new :class:`.Transaction` objects that represent + an emulated transaction within the scope of the enclosing + transaction, that is:: + + trans = conn.begin() # outermost transaction + trans2 = conn.begin() # "nested" + trans2.commit() # does nothing + trans.commit() # actually commits + + Calls to :meth:`.Transaction.commit` only have an effect + when invoked via the outermost :class:`.Transaction` object, though the + :meth:`.Transaction.rollback` method of any of the + :class:`.Transaction` objects will roll back the + transaction. + + See also: + + :meth:`.Connection.begin_nested` - use a SAVEPOINT + + :meth:`.Connection.begin_twophase` - use a two phase /XID transaction + + :meth:`.Engine.begin` - context manager available from + :class:`.Engine`. + + """ + if self.__branch_from: + return self.__branch_from.begin() + + if self.__transaction is None: + self.__transaction = RootTransaction(self) + return self.__transaction + else: + return Transaction(self, self.__transaction) + + def begin_nested(self): + """Begin a nested transaction and return a transaction handle. + + The returned object is an instance of :class:`.NestedTransaction`. + + Nested transactions require SAVEPOINT support in the + underlying database. Any transaction in the hierarchy may + ``commit`` and ``rollback``, however the outermost transaction + still controls the overall ``commit`` or ``rollback`` of the + transaction of a whole. + + See also :meth:`.Connection.begin`, + :meth:`.Connection.begin_twophase`. + """ + if self.__branch_from: + return self.__branch_from.begin_nested() + + if self.__transaction is None: + self.__transaction = RootTransaction(self) + else: + self.__transaction = NestedTransaction(self, self.__transaction) + return self.__transaction + + def begin_twophase(self, xid=None): + """Begin a two-phase or XA transaction and return a transaction + handle. + + The returned object is an instance of :class:`.TwoPhaseTransaction`, + which in addition to the methods provided by + :class:`.Transaction`, also provides a + :meth:`~.TwoPhaseTransaction.prepare` method. + + :param xid: the two phase transaction id. If not supplied, a + random id will be generated. + + See also :meth:`.Connection.begin`, + :meth:`.Connection.begin_twophase`. + + """ + + if self.__branch_from: + return self.__branch_from.begin_twophase(xid=xid) + + if self.__transaction is not None: + raise exc.InvalidRequestError( + "Cannot start a two phase transaction when a transaction " + "is already in progress.") + if xid is None: + xid = self.engine.dialect.create_xid() + self.__transaction = TwoPhaseTransaction(self, xid) + return self.__transaction + + def recover_twophase(self): + return self.engine.dialect.do_recover_twophase(self) + + def rollback_prepared(self, xid, recover=False): + self.engine.dialect.do_rollback_twophase(self, xid, recover=recover) + + def commit_prepared(self, xid, recover=False): + self.engine.dialect.do_commit_twophase(self, xid, recover=recover) + + def in_transaction(self): + """Return True if a transaction is in progress.""" + return self._root.__transaction is not None + + def _begin_impl(self, transaction): + assert not self.__branch_from + + if self._echo: + self.engine.logger.info("BEGIN (implicit)") + + if self._has_events or self.engine._has_events: + self.dispatch.begin(self) + + try: + self.engine.dialect.do_begin(self.connection) + if self.connection._reset_agent is None: + self.connection._reset_agent = transaction + except Exception as e: + self._handle_dbapi_exception(e, None, None, None, None) + + def _rollback_impl(self): + assert not self.__branch_from + + if self._has_events or self.engine._has_events: + self.dispatch.rollback(self) + + if self._still_open_and_connection_is_valid: + if self._echo: + self.engine.logger.info("ROLLBACK") + try: + self.engine.dialect.do_rollback(self.connection) + except Exception as e: + self._handle_dbapi_exception(e, None, None, None, None) + finally: + if not self.__invalid and \ + self.connection._reset_agent is self.__transaction: + self.connection._reset_agent = None + self.__transaction = None + else: + self.__transaction = None + + def _commit_impl(self, autocommit=False): + assert not self.__branch_from + + if self._has_events or self.engine._has_events: + self.dispatch.commit(self) + + if self._echo: + self.engine.logger.info("COMMIT") + try: + self.engine.dialect.do_commit(self.connection) + except Exception as e: + self._handle_dbapi_exception(e, None, None, None, None) + finally: + if not self.__invalid and \ + self.connection._reset_agent is self.__transaction: + self.connection._reset_agent = None + self.__transaction = None + + def _savepoint_impl(self, name=None): + assert not self.__branch_from + + if self._has_events or self.engine._has_events: + self.dispatch.savepoint(self, name) + + if name is None: + self.__savepoint_seq += 1 + name = 'sa_savepoint_%s' % self.__savepoint_seq + if self._still_open_and_connection_is_valid: + self.engine.dialect.do_savepoint(self, name) + return name + + def _rollback_to_savepoint_impl(self, name, context): + assert not self.__branch_from + + if self._has_events or self.engine._has_events: + self.dispatch.rollback_savepoint(self, name, context) + + if self._still_open_and_connection_is_valid: + self.engine.dialect.do_rollback_to_savepoint(self, name) + self.__transaction = context + + def _release_savepoint_impl(self, name, context): + assert not self.__branch_from + + if self._has_events or self.engine._has_events: + self.dispatch.release_savepoint(self, name, context) + + if self._still_open_and_connection_is_valid: + self.engine.dialect.do_release_savepoint(self, name) + self.__transaction = context + + def _begin_twophase_impl(self, transaction): + assert not self.__branch_from + + if self._echo: + self.engine.logger.info("BEGIN TWOPHASE (implicit)") + if self._has_events or self.engine._has_events: + self.dispatch.begin_twophase(self, transaction.xid) + + if self._still_open_and_connection_is_valid: + self.engine.dialect.do_begin_twophase(self, transaction.xid) + + if self.connection._reset_agent is None: + self.connection._reset_agent = transaction + + def _prepare_twophase_impl(self, xid): + assert not self.__branch_from + + if self._has_events or self.engine._has_events: + self.dispatch.prepare_twophase(self, xid) + + if self._still_open_and_connection_is_valid: + assert isinstance(self.__transaction, TwoPhaseTransaction) + self.engine.dialect.do_prepare_twophase(self, xid) + + def _rollback_twophase_impl(self, xid, is_prepared): + assert not self.__branch_from + + if self._has_events or self.engine._has_events: + self.dispatch.rollback_twophase(self, xid, is_prepared) + + if self._still_open_and_connection_is_valid: + assert isinstance(self.__transaction, TwoPhaseTransaction) + try: + self.engine.dialect.do_rollback_twophase( + self, xid, is_prepared) + finally: + if self.connection._reset_agent is self.__transaction: + self.connection._reset_agent = None + self.__transaction = None + else: + self.__transaction = None + + def _commit_twophase_impl(self, xid, is_prepared): + assert not self.__branch_from + + if self._has_events or self.engine._has_events: + self.dispatch.commit_twophase(self, xid, is_prepared) + + if self._still_open_and_connection_is_valid: + assert isinstance(self.__transaction, TwoPhaseTransaction) + try: + self.engine.dialect.do_commit_twophase(self, xid, is_prepared) + finally: + if self.connection._reset_agent is self.__transaction: + self.connection._reset_agent = None + self.__transaction = None + else: + self.__transaction = None + + def _autorollback(self): + if not self._root.in_transaction(): + self._root._rollback_impl() + + def close(self): + """Close this :class:`.Connection`. + + This results in a release of the underlying database + resources, that is, the DBAPI connection referenced + internally. The DBAPI connection is typically restored + back to the connection-holding :class:`.Pool` referenced + by the :class:`.Engine` that produced this + :class:`.Connection`. Any transactional state present on + the DBAPI connection is also unconditionally released via + the DBAPI connection's ``rollback()`` method, regardless + of any :class:`.Transaction` object that may be + outstanding with regards to this :class:`.Connection`. + + After :meth:`~.Connection.close` is called, the + :class:`.Connection` is permanently in a closed state, + and will allow no further operations. + + """ + if self.__branch_from: + try: + del self.__connection + except AttributeError: + pass + finally: + self.__can_reconnect = False + return + try: + conn = self.__connection + except AttributeError: + pass + else: + + conn.close() + if conn._reset_agent is self.__transaction: + conn._reset_agent = None + + # the close() process can end up invalidating us, + # as the pool will call our transaction as the "reset_agent" + # for rollback(), which can then cause an invalidation + if not self.__invalid: + del self.__connection + self.__can_reconnect = False + self.__transaction = None + + def scalar(self, object, *multiparams, **params): + """Executes and returns the first column of the first row. + + The underlying result/cursor is closed after execution. + """ + + return self.execute(object, *multiparams, **params).scalar() + + def execute(self, object, *multiparams, **params): + """Executes the a SQL statement construct and returns a + :class:`.ResultProxy`. + + :param object: The statement to be executed. May be + one of: + + * a plain string + * any :class:`.ClauseElement` construct that is also + a subclass of :class:`.Executable`, such as a + :func:`~.expression.select` construct + * a :class:`.FunctionElement`, such as that generated + by :data:`.func`, will be automatically wrapped in + a SELECT statement, which is then executed. + * a :class:`.DDLElement` object + * a :class:`.DefaultGenerator` object + * a :class:`.Compiled` object + + :param \*multiparams/\**params: represent bound parameter + values to be used in the execution. Typically, + the format is either a collection of one or more + dictionaries passed to \*multiparams:: + + conn.execute( + table.insert(), + {"id":1, "value":"v1"}, + {"id":2, "value":"v2"} + ) + + ...or individual key/values interpreted by \**params:: + + conn.execute( + table.insert(), id=1, value="v1" + ) + + In the case that a plain SQL string is passed, and the underlying + DBAPI accepts positional bind parameters, a collection of tuples + or individual values in \*multiparams may be passed:: + + conn.execute( + "INSERT INTO table (id, value) VALUES (?, ?)", + (1, "v1"), (2, "v2") + ) + + conn.execute( + "INSERT INTO table (id, value) VALUES (?, ?)", + 1, "v1" + ) + + Note above, the usage of a question mark "?" or other + symbol is contingent upon the "paramstyle" accepted by the DBAPI + in use, which may be any of "qmark", "named", "pyformat", "format", + "numeric". See `pep-249 `_ + for details on paramstyle. + + To execute a textual SQL statement which uses bound parameters in a + DBAPI-agnostic way, use the :func:`~.expression.text` construct. + + """ + if isinstance(object, util.string_types[0]): + return self._execute_text(object, multiparams, params) + try: + meth = object._execute_on_connection + except AttributeError: + raise exc.InvalidRequestError( + "Unexecutable object type: %s" % + type(object)) + else: + return meth(self, multiparams, params) + + def _execute_function(self, func, multiparams, params): + """Execute a sql.FunctionElement object.""" + + return self._execute_clauseelement(func.select(), + multiparams, params) + + def _execute_default(self, default, multiparams, params): + """Execute a schema.ColumnDefault object.""" + + if self._has_events or self.engine._has_events: + for fn in self.dispatch.before_execute: + default, multiparams, params = \ + fn(self, default, multiparams, params) + + try: + try: + conn = self.__connection + except AttributeError: + conn = self._revalidate_connection() + + dialect = self.dialect + ctx = dialect.execution_ctx_cls._init_default( + dialect, self, conn) + except Exception as e: + self._handle_dbapi_exception(e, None, None, None, None) + + ret = ctx._exec_default(default, None) + if self.should_close_with_result: + self.close() + + if self._has_events or self.engine._has_events: + self.dispatch.after_execute(self, + default, multiparams, params, ret) + + return ret + + def _execute_ddl(self, ddl, multiparams, params): + """Execute a schema.DDL object.""" + + if self._has_events or self.engine._has_events: + for fn in self.dispatch.before_execute: + ddl, multiparams, params = \ + fn(self, ddl, multiparams, params) + + dialect = self.dialect + + compiled = ddl.compile(dialect=dialect) + ret = self._execute_context( + dialect, + dialect.execution_ctx_cls._init_ddl, + compiled, + None, + compiled + ) + if self._has_events or self.engine._has_events: + self.dispatch.after_execute(self, + ddl, multiparams, params, ret) + return ret + + def _execute_clauseelement(self, elem, multiparams, params): + """Execute a sql.ClauseElement object.""" + + if self._has_events or self.engine._has_events: + for fn in self.dispatch.before_execute: + elem, multiparams, params = \ + fn(self, elem, multiparams, params) + + distilled_params = _distill_params(multiparams, params) + if distilled_params: + # note this is usually dict but we support RowProxy + # as well; but dict.keys() as an iterable is OK + keys = distilled_params[0].keys() + else: + keys = [] + + dialect = self.dialect + if 'compiled_cache' in self._execution_options: + key = dialect, elem, tuple(sorted(keys)), len(distilled_params) > 1 + compiled_sql = self._execution_options['compiled_cache'].get(key) + if compiled_sql is None: + compiled_sql = elem.compile( + dialect=dialect, column_keys=keys, + inline=len(distilled_params) > 1) + self._execution_options['compiled_cache'][key] = compiled_sql + else: + compiled_sql = elem.compile( + dialect=dialect, column_keys=keys, + inline=len(distilled_params) > 1) + + ret = self._execute_context( + dialect, + dialect.execution_ctx_cls._init_compiled, + compiled_sql, + distilled_params, + compiled_sql, distilled_params + ) + if self._has_events or self.engine._has_events: + self.dispatch.after_execute(self, + elem, multiparams, params, ret) + return ret + + def _execute_compiled(self, compiled, multiparams, params): + """Execute a sql.Compiled object.""" + + if self._has_events or self.engine._has_events: + for fn in self.dispatch.before_execute: + compiled, multiparams, params = \ + fn(self, compiled, multiparams, params) + + dialect = self.dialect + parameters = _distill_params(multiparams, params) + ret = self._execute_context( + dialect, + dialect.execution_ctx_cls._init_compiled, + compiled, + parameters, + compiled, parameters + ) + if self._has_events or self.engine._has_events: + self.dispatch.after_execute(self, + compiled, multiparams, params, ret) + return ret + + def _execute_text(self, statement, multiparams, params): + """Execute a string SQL statement.""" + + if self._has_events or self.engine._has_events: + for fn in self.dispatch.before_execute: + statement, multiparams, params = \ + fn(self, statement, multiparams, params) + + dialect = self.dialect + parameters = _distill_params(multiparams, params) + ret = self._execute_context( + dialect, + dialect.execution_ctx_cls._init_statement, + statement, + parameters, + statement, parameters + ) + if self._has_events or self.engine._has_events: + self.dispatch.after_execute(self, + statement, multiparams, params, ret) + return ret + + def _execute_context(self, dialect, constructor, + statement, parameters, + *args): + """Create an :class:`.ExecutionContext` and execute, returning + a :class:`.ResultProxy`.""" + + try: + try: + conn = self.__connection + except AttributeError: + conn = self._revalidate_connection() + + context = constructor(dialect, self, conn, *args) + except Exception as e: + self._handle_dbapi_exception( + e, + util.text_type(statement), parameters, + None, None) + + if context.compiled: + context.pre_exec() + + cursor, statement, parameters = context.cursor, \ + context.statement, \ + context.parameters + + if not context.executemany: + parameters = parameters[0] + + if self._has_events or self.engine._has_events: + for fn in self.dispatch.before_cursor_execute: + statement, parameters = \ + fn(self, cursor, statement, parameters, + context, context.executemany) + + if self._echo: + self.engine.logger.info(statement) + self.engine.logger.info( + "%r", + sql_util._repr_params(parameters, batches=10) + ) + + evt_handled = False + try: + if context.executemany: + if self.dialect._has_events: + for fn in self.dialect.dispatch.do_executemany: + if fn(cursor, statement, parameters, context): + evt_handled = True + break + if not evt_handled: + self.dialect.do_executemany( + cursor, + statement, + parameters, + context) + elif not parameters and context.no_parameters: + if self.dialect._has_events: + for fn in self.dialect.dispatch.do_execute_no_params: + if fn(cursor, statement, context): + evt_handled = True + break + if not evt_handled: + self.dialect.do_execute_no_params( + cursor, + statement, + context) + else: + if self.dialect._has_events: + for fn in self.dialect.dispatch.do_execute: + if fn(cursor, statement, parameters, context): + evt_handled = True + break + if not evt_handled: + self.dialect.do_execute( + cursor, + statement, + parameters, + context) + except Exception as e: + self._handle_dbapi_exception( + e, + statement, + parameters, + cursor, + context) + + if self._has_events or self.engine._has_events: + self.dispatch.after_cursor_execute(self, cursor, + statement, + parameters, + context, + context.executemany) + + if context.compiled: + context.post_exec() + + if context.is_crud or context.is_text: + result = context._setup_crud_result_proxy() + else: + result = context.get_result_proxy() + if result._metadata is None: + result._soft_close(_autoclose_connection=False) + + if context.should_autocommit and self._root.__transaction is None: + self._root._commit_impl(autocommit=True) + + if result._soft_closed and self.should_close_with_result: + self.close() + + return result + + def _cursor_execute(self, cursor, statement, parameters, context=None): + """Execute a statement + params on the given cursor. + + Adds appropriate logging and exception handling. + + This method is used by DefaultDialect for special-case + executions, such as for sequences and column defaults. + The path of statement execution in the majority of cases + terminates at _execute_context(). + + """ + if self._has_events or self.engine._has_events: + for fn in self.dispatch.before_cursor_execute: + statement, parameters = \ + fn(self, cursor, statement, parameters, + context, + False) + + if self._echo: + self.engine.logger.info(statement) + self.engine.logger.info("%r", parameters) + try: + for fn in () if not self.dialect._has_events \ + else self.dialect.dispatch.do_execute: + if fn(cursor, statement, parameters, context): + break + else: + self.dialect.do_execute( + cursor, + statement, + parameters, + context) + except Exception as e: + self._handle_dbapi_exception( + e, + statement, + parameters, + cursor, + context) + + if self._has_events or self.engine._has_events: + self.dispatch.after_cursor_execute(self, cursor, + statement, + parameters, + context, + False) + + def _safe_close_cursor(self, cursor): + """Close the given cursor, catching exceptions + and turning into log warnings. + + """ + try: + cursor.close() + except Exception: + # log the error through the connection pool's logger. + self.engine.pool.logger.error( + "Error closing cursor", exc_info=True) + + _reentrant_error = False + _is_disconnect = False + + def _handle_dbapi_exception(self, + e, + statement, + parameters, + cursor, + context): + exc_info = sys.exc_info() + + if context and context.exception is None: + context.exception = e + + if not self._is_disconnect: + self._is_disconnect = \ + isinstance(e, self.dialect.dbapi.Error) and \ + not self.closed and \ + self.dialect.is_disconnect( + e, + self.__connection if not self.invalidated else None, + cursor) + if context: + context.is_disconnect = self._is_disconnect + + invalidate_pool_on_disconnect = True + + if self._reentrant_error: + util.raise_from_cause( + exc.DBAPIError.instance(statement, + parameters, + e, + self.dialect.dbapi.Error, + dialect=self.dialect), + exc_info + ) + self._reentrant_error = True + try: + # non-DBAPI error - if we already got a context, + # or there's no string statement, don't wrap it + should_wrap = isinstance(e, self.dialect.dbapi.Error) or \ + (statement is not None and context is None) + + if should_wrap: + sqlalchemy_exception = exc.DBAPIError.instance( + statement, + parameters, + e, + self.dialect.dbapi.Error, + connection_invalidated=self._is_disconnect, + dialect=self.dialect) + else: + sqlalchemy_exception = None + + newraise = None + + if (self._has_events or self.engine._has_events) and \ + not self._execution_options.get( + 'skip_user_error_events', False): + # legacy dbapi_error event + if should_wrap and context: + self.dispatch.dbapi_error(self, + cursor, + statement, + parameters, + context, + e) + + # new handle_error event + ctx = ExceptionContextImpl( + e, sqlalchemy_exception, self.engine, + self, cursor, statement, + parameters, context, self._is_disconnect) + + for fn in self.dispatch.handle_error: + try: + # handler returns an exception; + # call next handler in a chain + per_fn = fn(ctx) + if per_fn is not None: + ctx.chained_exception = newraise = per_fn + except Exception as _raised: + # handler raises an exception - stop processing + newraise = _raised + break + + if sqlalchemy_exception and \ + self._is_disconnect != ctx.is_disconnect: + sqlalchemy_exception.connection_invalidated = \ + self._is_disconnect = ctx.is_disconnect + + # set up potentially user-defined value for + # invalidate pool. + invalidate_pool_on_disconnect = \ + ctx.invalidate_pool_on_disconnect + + if should_wrap and context: + context.handle_dbapi_exception(e) + + if not self._is_disconnect: + if cursor: + self._safe_close_cursor(cursor) + self._autorollback() + + if newraise: + util.raise_from_cause(newraise, exc_info) + elif should_wrap: + util.raise_from_cause( + sqlalchemy_exception, + exc_info + ) + else: + util.reraise(*exc_info) + + finally: + del self._reentrant_error + if self._is_disconnect: + del self._is_disconnect + if not self.invalidated: + dbapi_conn_wrapper = self.__connection + if invalidate_pool_on_disconnect: + self.engine.pool._invalidate(dbapi_conn_wrapper, e) + self.invalidate(e) + if self.should_close_with_result: + self.close() + + @classmethod + def _handle_dbapi_exception_noconnection(cls, e, dialect, engine): + + exc_info = sys.exc_info() + + is_disconnect = dialect.is_disconnect(e, None, None) + + should_wrap = isinstance(e, dialect.dbapi.Error) + + if should_wrap: + sqlalchemy_exception = exc.DBAPIError.instance( + None, + None, + e, + dialect.dbapi.Error, + connection_invalidated=is_disconnect) + else: + sqlalchemy_exception = None + + newraise = None + + if engine._has_events: + ctx = ExceptionContextImpl( + e, sqlalchemy_exception, engine, None, None, None, + None, None, is_disconnect) + for fn in engine.dispatch.handle_error: + try: + # handler returns an exception; + # call next handler in a chain + per_fn = fn(ctx) + if per_fn is not None: + ctx.chained_exception = newraise = per_fn + except Exception as _raised: + # handler raises an exception - stop processing + newraise = _raised + break + + if sqlalchemy_exception and \ + is_disconnect != ctx.is_disconnect: + sqlalchemy_exception.connection_invalidated = \ + is_disconnect = ctx.is_disconnect + + if newraise: + util.raise_from_cause(newraise, exc_info) + elif should_wrap: + util.raise_from_cause( + sqlalchemy_exception, + exc_info + ) + else: + util.reraise(*exc_info) + + def default_schema_name(self): + return self.engine.dialect.get_default_schema_name(self) + + def transaction(self, callable_, *args, **kwargs): + """Execute the given function within a transaction boundary. + + The function is passed this :class:`.Connection` + as the first argument, followed by the given \*args and \**kwargs, + e.g.:: + + def do_something(conn, x, y): + conn.execute("some statement", {'x':x, 'y':y}) + + conn.transaction(do_something, 5, 10) + + The operations inside the function are all invoked within the + context of a single :class:`.Transaction`. + Upon success, the transaction is committed. If an + exception is raised, the transaction is rolled back + before propagating the exception. + + .. note:: + + The :meth:`.transaction` method is superseded by + the usage of the Python ``with:`` statement, which can + be used with :meth:`.Connection.begin`:: + + with conn.begin(): + conn.execute("some statement", {'x':5, 'y':10}) + + As well as with :meth:`.Engine.begin`:: + + with engine.begin() as conn: + conn.execute("some statement", {'x':5, 'y':10}) + + See also: + + :meth:`.Engine.begin` - engine-level transactional + context + + :meth:`.Engine.transaction` - engine-level version of + :meth:`.Connection.transaction` + + """ + + trans = self.begin() + try: + ret = self.run_callable(callable_, *args, **kwargs) + trans.commit() + return ret + except: + with util.safe_reraise(): + trans.rollback() + + def run_callable(self, callable_, *args, **kwargs): + """Given a callable object or function, execute it, passing + a :class:`.Connection` as the first argument. + + The given \*args and \**kwargs are passed subsequent + to the :class:`.Connection` argument. + + This function, along with :meth:`.Engine.run_callable`, + allows a function to be run with a :class:`.Connection` + or :class:`.Engine` object without the need to know + which one is being dealt with. + + """ + return callable_(self, *args, **kwargs) + + def _run_visitor(self, visitorcallable, element, **kwargs): + visitorcallable(self.dialect, self, + **kwargs).traverse_single(element) + + +class ExceptionContextImpl(ExceptionContext): + """Implement the :class:`.ExceptionContext` interface.""" + + def __init__(self, exception, sqlalchemy_exception, + engine, connection, cursor, statement, parameters, + context, is_disconnect): + self.engine = engine + self.connection = connection + self.sqlalchemy_exception = sqlalchemy_exception + self.original_exception = exception + self.execution_context = context + self.statement = statement + self.parameters = parameters + self.is_disconnect = is_disconnect + + +class Transaction(object): + """Represent a database transaction in progress. + + The :class:`.Transaction` object is procured by + calling the :meth:`~.Connection.begin` method of + :class:`.Connection`:: + + from sqlalchemy import create_engine + engine = create_engine("postgresql://scott:tiger@localhost/test") + connection = engine.connect() + trans = connection.begin() + connection.execute("insert into x (a, b) values (1, 2)") + trans.commit() + + The object provides :meth:`.rollback` and :meth:`.commit` + methods in order to control transaction boundaries. It + also implements a context manager interface so that + the Python ``with`` statement can be used with the + :meth:`.Connection.begin` method:: + + with connection.begin(): + connection.execute("insert into x (a, b) values (1, 2)") + + The Transaction object is **not** threadsafe. + + See also: :meth:`.Connection.begin`, :meth:`.Connection.begin_twophase`, + :meth:`.Connection.begin_nested`. + + .. index:: + single: thread safety; Transaction + """ + + def __init__(self, connection, parent): + self.connection = connection + self._actual_parent = parent + self.is_active = True + + @property + def _parent(self): + return self._actual_parent or self + + def close(self): + """Close this :class:`.Transaction`. + + If this transaction is the base transaction in a begin/commit + nesting, the transaction will rollback(). Otherwise, the + method returns. + + This is used to cancel a Transaction without affecting the scope of + an enclosing transaction. + + """ + if not self._parent.is_active: + return + if self._parent is self: + self.rollback() + + def rollback(self): + """Roll back this :class:`.Transaction`. + + """ + if not self._parent.is_active: + return + self._do_rollback() + self.is_active = False + + def _do_rollback(self): + self._parent.rollback() + + def commit(self): + """Commit this :class:`.Transaction`.""" + + if not self._parent.is_active: + raise exc.InvalidRequestError("This transaction is inactive") + self._do_commit() + self.is_active = False + + def _do_commit(self): + pass + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + if type is None and self.is_active: + try: + self.commit() + except: + with util.safe_reraise(): + self.rollback() + else: + self.rollback() + + +class RootTransaction(Transaction): + def __init__(self, connection): + super(RootTransaction, self).__init__(connection, None) + self.connection._begin_impl(self) + + def _do_rollback(self): + if self.is_active: + self.connection._rollback_impl() + + def _do_commit(self): + if self.is_active: + self.connection._commit_impl() + + +class NestedTransaction(Transaction): + """Represent a 'nested', or SAVEPOINT transaction. + + A new :class:`.NestedTransaction` object may be procured + using the :meth:`.Connection.begin_nested` method. + + The interface is the same as that of :class:`.Transaction`. + + """ + + def __init__(self, connection, parent): + super(NestedTransaction, self).__init__(connection, parent) + self._savepoint = self.connection._savepoint_impl() + + def _do_rollback(self): + if self.is_active: + self.connection._rollback_to_savepoint_impl( + self._savepoint, self._parent) + + def _do_commit(self): + if self.is_active: + self.connection._release_savepoint_impl( + self._savepoint, self._parent) + + +class TwoPhaseTransaction(Transaction): + """Represent a two-phase transaction. + + A new :class:`.TwoPhaseTransaction` object may be procured + using the :meth:`.Connection.begin_twophase` method. + + The interface is the same as that of :class:`.Transaction` + with the addition of the :meth:`prepare` method. + + """ + + def __init__(self, connection, xid): + super(TwoPhaseTransaction, self).__init__(connection, None) + self._is_prepared = False + self.xid = xid + self.connection._begin_twophase_impl(self) + + def prepare(self): + """Prepare this :class:`.TwoPhaseTransaction`. + + After a PREPARE, the transaction can be committed. + + """ + if not self._parent.is_active: + raise exc.InvalidRequestError("This transaction is inactive") + self.connection._prepare_twophase_impl(self.xid) + self._is_prepared = True + + def _do_rollback(self): + self.connection._rollback_twophase_impl(self.xid, self._is_prepared) + + def _do_commit(self): + self.connection._commit_twophase_impl(self.xid, self._is_prepared) + + +class Engine(Connectable, log.Identified): + """ + Connects a :class:`~sqlalchemy.pool.Pool` and + :class:`~sqlalchemy.engine.interfaces.Dialect` together to provide a + source of database connectivity and behavior. + + An :class:`.Engine` object is instantiated publicly using the + :func:`~sqlalchemy.create_engine` function. + + See also: + + :doc:`/core/engines` + + :ref:`connections_toplevel` + + """ + + _execution_options = util.immutabledict() + _has_events = False + _connection_cls = Connection + + def __init__(self, pool, dialect, url, + logging_name=None, echo=None, proxy=None, + execution_options=None + ): + self.pool = pool + self.url = url + self.dialect = dialect + self.pool._dialect = dialect + if logging_name: + self.logging_name = logging_name + self.echo = echo + self.engine = self + log.instance_logger(self, echoflag=echo) + if proxy: + interfaces.ConnectionProxy._adapt_listener(self, proxy) + if execution_options: + self.update_execution_options(**execution_options) + + def update_execution_options(self, **opt): + """Update the default execution_options dictionary + of this :class:`.Engine`. + + The given keys/values in \**opt are added to the + default execution options that will be used for + all connections. The initial contents of this dictionary + can be sent via the ``execution_options`` parameter + to :func:`.create_engine`. + + .. seealso:: + + :meth:`.Connection.execution_options` + + :meth:`.Engine.execution_options` + + """ + self._execution_options = \ + self._execution_options.union(opt) + self.dispatch.set_engine_execution_options(self, opt) + self.dialect.set_engine_execution_options(self, opt) + + def execution_options(self, **opt): + """Return a new :class:`.Engine` that will provide + :class:`.Connection` objects with the given execution options. + + The returned :class:`.Engine` remains related to the original + :class:`.Engine` in that it shares the same connection pool and + other state: + + * The :class:`.Pool` used by the new :class:`.Engine` is the + same instance. The :meth:`.Engine.dispose` method will replace + the connection pool instance for the parent engine as well + as this one. + * Event listeners are "cascaded" - meaning, the new :class:`.Engine` + inherits the events of the parent, and new events can be associated + with the new :class:`.Engine` individually. + * The logging configuration and logging_name is copied from the parent + :class:`.Engine`. + + The intent of the :meth:`.Engine.execution_options` method is + to implement "sharding" schemes where multiple :class:`.Engine` + objects refer to the same connection pool, but are differentiated + by options that would be consumed by a custom event:: + + primary_engine = create_engine("mysql://") + shard1 = primary_engine.execution_options(shard_id="shard1") + shard2 = primary_engine.execution_options(shard_id="shard2") + + Above, the ``shard1`` engine serves as a factory for + :class:`.Connection` objects that will contain the execution option + ``shard_id=shard1``, and ``shard2`` will produce :class:`.Connection` + objects that contain the execution option ``shard_id=shard2``. + + An event handler can consume the above execution option to perform + a schema switch or other operation, given a connection. Below + we emit a MySQL ``use`` statement to switch databases, at the same + time keeping track of which database we've established using the + :attr:`.Connection.info` dictionary, which gives us a persistent + storage space that follows the DBAPI connection:: + + from sqlalchemy import event + from sqlalchemy.engine import Engine + + shards = {"default": "base", shard_1: "db1", "shard_2": "db2"} + + @event.listens_for(Engine, "before_cursor_execute") + def _switch_shard(conn, cursor, stmt, + params, context, executemany): + shard_id = conn._execution_options.get('shard_id', "default") + current_shard = conn.info.get("current_shard", None) + + if current_shard != shard_id: + cursor.execute("use %s" % shards[shard_id]) + conn.info["current_shard"] = shard_id + + .. versionadded:: 0.8 + + .. seealso:: + + :meth:`.Connection.execution_options` - update execution options + on a :class:`.Connection` object. + + :meth:`.Engine.update_execution_options` - update the execution + options for a given :class:`.Engine` in place. + + """ + return OptionEngine(self, opt) + + @property + def name(self): + """String name of the :class:`~sqlalchemy.engine.interfaces.Dialect` + in use by this :class:`Engine`.""" + + return self.dialect.name + + @property + def driver(self): + """Driver name of the :class:`~sqlalchemy.engine.interfaces.Dialect` + in use by this :class:`Engine`.""" + + return self.dialect.driver + + echo = log.echo_property() + + def __repr__(self): + return 'Engine(%r)' % self.url + + def dispose(self): + """Dispose of the connection pool used by this :class:`.Engine`. + + This has the effect of fully closing all **currently checked in** + database connections. Connections that are still checked out + will **not** be closed, however they will no longer be associated + with this :class:`.Engine`, so when they are closed individually, + eventually the :class:`.Pool` which they are associated with will + be garbage collected and they will be closed out fully, if + not already closed on checkin. + + A new connection pool is created immediately after the old one has + been disposed. This new pool, like all SQLAlchemy connection pools, + does not make any actual connections to the database until one is + first requested, so as long as the :class:`.Engine` isn't used again, + no new connections will be made. + + .. seealso:: + + :ref:`engine_disposal` + + """ + self.pool.dispose() + self.pool = self.pool.recreate() + self.dispatch.engine_disposed(self) + + def _execute_default(self, default): + with self.contextual_connect() as conn: + return conn._execute_default(default, (), {}) + + @contextlib.contextmanager + def _optional_conn_ctx_manager(self, connection=None): + if connection is None: + with self.contextual_connect() as conn: + yield conn + else: + yield connection + + def _run_visitor(self, visitorcallable, element, + connection=None, **kwargs): + with self._optional_conn_ctx_manager(connection) as conn: + conn._run_visitor(visitorcallable, element, **kwargs) + + class _trans_ctx(object): + def __init__(self, conn, transaction, close_with_result): + self.conn = conn + self.transaction = transaction + self.close_with_result = close_with_result + + def __enter__(self): + return self.conn + + def __exit__(self, type, value, traceback): + if type is not None: + self.transaction.rollback() + else: + self.transaction.commit() + if not self.close_with_result: + self.conn.close() + + def begin(self, close_with_result=False): + """Return a context manager delivering a :class:`.Connection` + with a :class:`.Transaction` established. + + E.g.:: + + with engine.begin() as conn: + conn.execute("insert into table (x, y, z) values (1, 2, 3)") + conn.execute("my_special_procedure(5)") + + Upon successful operation, the :class:`.Transaction` + is committed. If an error is raised, the :class:`.Transaction` + is rolled back. + + The ``close_with_result`` flag is normally ``False``, and indicates + that the :class:`.Connection` will be closed when the operation + is complete. When set to ``True``, it indicates the + :class:`.Connection` is in "single use" mode, where the + :class:`.ResultProxy` returned by the first call to + :meth:`.Connection.execute` will close the :class:`.Connection` when + that :class:`.ResultProxy` has exhausted all result rows. + + .. versionadded:: 0.7.6 + + See also: + + :meth:`.Engine.connect` - procure a :class:`.Connection` from + an :class:`.Engine`. + + :meth:`.Connection.begin` - start a :class:`.Transaction` + for a particular :class:`.Connection`. + + """ + conn = self.contextual_connect(close_with_result=close_with_result) + try: + trans = conn.begin() + except: + with util.safe_reraise(): + conn.close() + return Engine._trans_ctx(conn, trans, close_with_result) + + def transaction(self, callable_, *args, **kwargs): + """Execute the given function within a transaction boundary. + + The function is passed a :class:`.Connection` newly procured + from :meth:`.Engine.contextual_connect` as the first argument, + followed by the given \*args and \**kwargs. + + e.g.:: + + def do_something(conn, x, y): + conn.execute("some statement", {'x':x, 'y':y}) + + engine.transaction(do_something, 5, 10) + + The operations inside the function are all invoked within the + context of a single :class:`.Transaction`. + Upon success, the transaction is committed. If an + exception is raised, the transaction is rolled back + before propagating the exception. + + .. note:: + + The :meth:`.transaction` method is superseded by + the usage of the Python ``with:`` statement, which can + be used with :meth:`.Engine.begin`:: + + with engine.begin() as conn: + conn.execute("some statement", {'x':5, 'y':10}) + + See also: + + :meth:`.Engine.begin` - engine-level transactional + context + + :meth:`.Connection.transaction` - connection-level version of + :meth:`.Engine.transaction` + + """ + + with self.contextual_connect() as conn: + return conn.transaction(callable_, *args, **kwargs) + + def run_callable(self, callable_, *args, **kwargs): + """Given a callable object or function, execute it, passing + a :class:`.Connection` as the first argument. + + The given \*args and \**kwargs are passed subsequent + to the :class:`.Connection` argument. + + This function, along with :meth:`.Connection.run_callable`, + allows a function to be run with a :class:`.Connection` + or :class:`.Engine` object without the need to know + which one is being dealt with. + + """ + with self.contextual_connect() as conn: + return conn.run_callable(callable_, *args, **kwargs) + + def execute(self, statement, *multiparams, **params): + """Executes the given construct and returns a :class:`.ResultProxy`. + + The arguments are the same as those used by + :meth:`.Connection.execute`. + + Here, a :class:`.Connection` is acquired using the + :meth:`~.Engine.contextual_connect` method, and the statement executed + with that connection. The returned :class:`.ResultProxy` is flagged + such that when the :class:`.ResultProxy` is exhausted and its + underlying cursor is closed, the :class:`.Connection` created here + will also be closed, which allows its associated DBAPI connection + resource to be returned to the connection pool. + + """ + + connection = self.contextual_connect(close_with_result=True) + return connection.execute(statement, *multiparams, **params) + + def scalar(self, statement, *multiparams, **params): + return self.execute(statement, *multiparams, **params).scalar() + + def _execute_clauseelement(self, elem, multiparams=None, params=None): + connection = self.contextual_connect(close_with_result=True) + return connection._execute_clauseelement(elem, multiparams, params) + + def _execute_compiled(self, compiled, multiparams, params): + connection = self.contextual_connect(close_with_result=True) + return connection._execute_compiled(compiled, multiparams, params) + + def connect(self, **kwargs): + """Return a new :class:`.Connection` object. + + The :class:`.Connection` object is a facade that uses a DBAPI + connection internally in order to communicate with the database. This + connection is procured from the connection-holding :class:`.Pool` + referenced by this :class:`.Engine`. When the + :meth:`~.Connection.close` method of the :class:`.Connection` object + is called, the underlying DBAPI connection is then returned to the + connection pool, where it may be used again in a subsequent call to + :meth:`~.Engine.connect`. + + """ + + return self._connection_cls(self, **kwargs) + + def contextual_connect(self, close_with_result=False, **kwargs): + """Return a :class:`.Connection` object which may be part of some + ongoing context. + + By default, this method does the same thing as :meth:`.Engine.connect`. + Subclasses of :class:`.Engine` may override this method + to provide contextual behavior. + + :param close_with_result: When True, the first :class:`.ResultProxy` + created by the :class:`.Connection` will call the + :meth:`.Connection.close` method of that connection as soon as any + pending result rows are exhausted. This is used to supply the + "connectionless execution" behavior provided by the + :meth:`.Engine.execute` method. + + """ + + return self._connection_cls( + self, + self._wrap_pool_connect(self.pool.connect, None), + close_with_result=close_with_result, + **kwargs) + + def table_names(self, schema=None, connection=None): + """Return a list of all table names available in the database. + + :param schema: Optional, retrieve names from a non-default schema. + + :param connection: Optional, use a specified connection. Default is + the ``contextual_connect`` for this ``Engine``. + """ + + with self._optional_conn_ctx_manager(connection) as conn: + if not schema: + schema = self.dialect.default_schema_name + return self.dialect.get_table_names(conn, schema) + + def has_table(self, table_name, schema=None): + """Return True if the given backend has a table of the given name. + + .. seealso:: + + :ref:`metadata_reflection_inspector` - detailed schema inspection + using the :class:`.Inspector` interface. + + :class:`.quoted_name` - used to pass quoting information along + with a schema identifier. + + """ + return self.run_callable(self.dialect.has_table, table_name, schema) + + def _wrap_pool_connect(self, fn, connection): + dialect = self.dialect + try: + return fn() + except dialect.dbapi.Error as e: + if connection is None: + Connection._handle_dbapi_exception_noconnection( + e, dialect, self) + else: + util.reraise(*sys.exc_info()) + + def raw_connection(self, _connection=None): + """Return a "raw" DBAPI connection from the connection pool. + + The returned object is a proxied version of the DBAPI + connection object used by the underlying driver in use. + The object will have all the same behavior as the real DBAPI + connection, except that its ``close()`` method will result in the + connection being returned to the pool, rather than being closed + for real. + + This method provides direct DBAPI connection access for + special situations when the API provided by :class:`.Connection` + is not needed. When a :class:`.Connection` object is already + present, the DBAPI connection is available using + the :attr:`.Connection.connection` accessor. + + .. seealso:: + + :ref:`dbapi_connections` + + """ + return self._wrap_pool_connect( + self.pool.unique_connection, _connection) + + +class OptionEngine(Engine): + def __init__(self, proxied, execution_options): + self._proxied = proxied + self.url = proxied.url + self.dialect = proxied.dialect + self.logging_name = proxied.logging_name + self.echo = proxied.echo + log.instance_logger(self, echoflag=self.echo) + self.dispatch = self.dispatch._join(proxied.dispatch) + self._execution_options = proxied._execution_options + self.update_execution_options(**execution_options) + + def _get_pool(self): + return self._proxied.pool + + def _set_pool(self, pool): + self._proxied.pool = pool + + pool = property(_get_pool, _set_pool) + + def _get_has_events(self): + return self._proxied._has_events or \ + self.__dict__.get('_has_events', False) + + def _set_has_events(self, value): + self.__dict__['_has_events'] = value + + _has_events = property(_get_has_events, _set_has_events) diff --git a/lib/python3.4/site-packages/sqlalchemy/engine/default.py b/lib/python3.4/site-packages/sqlalchemy/engine/default.py new file mode 100644 index 0000000..9798d13 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/engine/default.py @@ -0,0 +1,1026 @@ +# engine/default.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Default implementations of per-dialect sqlalchemy.engine classes. + +These are semi-private implementation classes which are only of importance +to database dialect authors; dialects will usually use the classes here +as the base class for their own corresponding classes. + +""" + +import re +import random +from . import reflection, interfaces, result +from ..sql import compiler, expression +from .. import types as sqltypes +from .. import exc, util, pool, processors +import codecs +import weakref +from .. import event + +AUTOCOMMIT_REGEXP = re.compile( + r'\s*(?:UPDATE|INSERT|CREATE|DELETE|DROP|ALTER)', + re.I | re.UNICODE) + + +class DefaultDialect(interfaces.Dialect): + """Default implementation of Dialect""" + + statement_compiler = compiler.SQLCompiler + ddl_compiler = compiler.DDLCompiler + type_compiler = compiler.GenericTypeCompiler + preparer = compiler.IdentifierPreparer + supports_alter = True + + # the first value we'd get for an autoincrement + # column. + default_sequence_base = 1 + + # most DBAPIs happy with this for execute(). + # not cx_oracle. + execute_sequence_format = tuple + + supports_views = True + supports_sequences = False + sequences_optional = False + preexecute_autoincrement_sequences = False + postfetch_lastrowid = True + implicit_returning = False + + supports_right_nested_joins = True + + supports_native_enum = False + supports_native_boolean = False + + supports_simple_order_by_label = True + + engine_config_types = util.immutabledict([ + ('convert_unicode', util.bool_or_str('force')), + ('pool_timeout', util.asint), + ('echo', util.bool_or_str('debug')), + ('echo_pool', util.bool_or_str('debug')), + ('pool_recycle', util.asint), + ('pool_size', util.asint), + ('max_overflow', util.asint), + ('pool_threadlocal', util.asbool), + ]) + + # if the NUMERIC type + # returns decimal.Decimal. + # *not* the FLOAT type however. + supports_native_decimal = False + + if util.py3k: + supports_unicode_statements = True + supports_unicode_binds = True + returns_unicode_strings = True + description_encoding = None + else: + supports_unicode_statements = False + supports_unicode_binds = False + returns_unicode_strings = False + description_encoding = 'use_encoding' + + name = 'default' + + # length at which to truncate + # any identifier. + max_identifier_length = 9999 + + # length at which to truncate + # the name of an index. + # Usually None to indicate + # 'use max_identifier_length'. + # thanks to MySQL, sigh + max_index_name_length = None + + supports_sane_rowcount = True + supports_sane_multi_rowcount = True + dbapi_type_map = {} + colspecs = {} + default_paramstyle = 'named' + supports_default_values = False + supports_empty_insert = True + supports_multivalues_insert = False + + server_version_info = None + + construct_arguments = None + """Optional set of argument specifiers for various SQLAlchemy + constructs, typically schema items. + + To implement, establish as a series of tuples, as in:: + + construct_arguments = [ + (schema.Index, { + "using": False, + "where": None, + "ops": None + }) + ] + + If the above construct is established on the Postgresql dialect, + the :class:`.Index` construct will now accept the keyword arguments + ``postgresql_using``, ``postgresql_where``, nad ``postgresql_ops``. + Any other argument specified to the constructor of :class:`.Index` + which is prefixed with ``postgresql_`` will raise :class:`.ArgumentError`. + + A dialect which does not include a ``construct_arguments`` member will + not participate in the argument validation system. For such a dialect, + any argument name is accepted by all participating constructs, within + the namespace of arguments prefixed with that dialect name. The rationale + here is so that third-party dialects that haven't yet implemented this + feature continue to function in the old way. + + .. versionadded:: 0.9.2 + + .. seealso:: + + :class:`.DialectKWArgs` - implementing base class which consumes + :attr:`.DefaultDialect.construct_arguments` + + + """ + + # indicates symbol names are + # UPPERCASEd if they are case insensitive + # within the database. + # if this is True, the methods normalize_name() + # and denormalize_name() must be provided. + requires_name_normalize = False + + reflection_options = () + + dbapi_exception_translation_map = util.immutabledict() + """mapping used in the extremely unusual case that a DBAPI's + published exceptions don't actually have the __name__ that they + are linked towards. + + .. versionadded:: 1.0.5 + + """ + + def __init__(self, convert_unicode=False, + encoding='utf-8', paramstyle=None, dbapi=None, + implicit_returning=None, + supports_right_nested_joins=None, + case_sensitive=True, + supports_native_boolean=None, + label_length=None, **kwargs): + + if not getattr(self, 'ported_sqla_06', True): + util.warn( + "The %s dialect is not yet ported to the 0.6 format" % + self.name) + + self.convert_unicode = convert_unicode + self.encoding = encoding + self.positional = False + self._ischema = None + self.dbapi = dbapi + if paramstyle is not None: + self.paramstyle = paramstyle + elif self.dbapi is not None: + self.paramstyle = self.dbapi.paramstyle + else: + self.paramstyle = self.default_paramstyle + if implicit_returning is not None: + self.implicit_returning = implicit_returning + self.positional = self.paramstyle in ('qmark', 'format', 'numeric') + self.identifier_preparer = self.preparer(self) + self.type_compiler = self.type_compiler(self) + if supports_right_nested_joins is not None: + self.supports_right_nested_joins = supports_right_nested_joins + if supports_native_boolean is not None: + self.supports_native_boolean = supports_native_boolean + self.case_sensitive = case_sensitive + + if label_length and label_length > self.max_identifier_length: + raise exc.ArgumentError( + "Label length of %d is greater than this dialect's" + " maximum identifier length of %d" % + (label_length, self.max_identifier_length)) + self.label_length = label_length + + if self.description_encoding == 'use_encoding': + self._description_decoder = \ + processors.to_unicode_processor_factory( + encoding + ) + elif self.description_encoding is not None: + self._description_decoder = \ + processors.to_unicode_processor_factory( + self.description_encoding + ) + self._encoder = codecs.getencoder(self.encoding) + self._decoder = processors.to_unicode_processor_factory(self.encoding) + + @util.memoized_property + def _type_memos(self): + return weakref.WeakKeyDictionary() + + @property + def dialect_description(self): + return self.name + "+" + self.driver + + @classmethod + def get_pool_class(cls, url): + return getattr(cls, 'poolclass', pool.QueuePool) + + def initialize(self, connection): + try: + self.server_version_info = \ + self._get_server_version_info(connection) + except NotImplementedError: + self.server_version_info = None + try: + self.default_schema_name = \ + self._get_default_schema_name(connection) + except NotImplementedError: + self.default_schema_name = None + + try: + self.default_isolation_level = \ + self.get_isolation_level(connection.connection) + except NotImplementedError: + self.default_isolation_level = None + + self.returns_unicode_strings = self._check_unicode_returns(connection) + + if self.description_encoding is not None and \ + self._check_unicode_description(connection): + self._description_decoder = self.description_encoding = None + + self.do_rollback(connection.connection) + + def on_connect(self): + """return a callable which sets up a newly created DBAPI connection. + + This is used to set dialect-wide per-connection options such as + isolation modes, unicode modes, etc. + + If a callable is returned, it will be assembled into a pool listener + that receives the direct DBAPI connection, with all wrappers removed. + + If None is returned, no listener will be generated. + + """ + return None + + def _check_unicode_returns(self, connection, additional_tests=None): + if util.py2k and not self.supports_unicode_statements: + cast_to = util.binary_type + else: + cast_to = util.text_type + + if self.positional: + parameters = self.execute_sequence_format() + else: + parameters = {} + + def check_unicode(test): + statement = cast_to( + expression.select([test]).compile(dialect=self)) + try: + cursor = connection.connection.cursor() + connection._cursor_execute(cursor, statement, parameters) + row = cursor.fetchone() + cursor.close() + except exc.DBAPIError as de: + # note that _cursor_execute() will have closed the cursor + # if an exception is thrown. + util.warn("Exception attempting to " + "detect unicode returns: %r" % de) + return False + else: + return isinstance(row[0], util.text_type) + + tests = [ + # detect plain VARCHAR + expression.cast( + expression.literal_column("'test plain returns'"), + sqltypes.VARCHAR(60) + ), + # detect if there's an NVARCHAR type with different behavior + # available + expression.cast( + expression.literal_column("'test unicode returns'"), + sqltypes.Unicode(60) + ), + ] + + if additional_tests: + tests += additional_tests + + results = set([check_unicode(test) for test in tests]) + + if results.issuperset([True, False]): + return "conditional" + else: + return results == set([True]) + + def _check_unicode_description(self, connection): + # all DBAPIs on Py2K return cursor.description as encoded, + # until pypy2.1beta2 with sqlite, so let's just check it - + # it's likely others will start doing this too in Py2k. + + if util.py2k and not self.supports_unicode_statements: + cast_to = util.binary_type + else: + cast_to = util.text_type + + cursor = connection.connection.cursor() + try: + cursor.execute( + cast_to( + expression.select([ + expression.literal_column("'x'").label("some_label") + ]).compile(dialect=self) + ) + ) + return isinstance(cursor.description[0][0], util.text_type) + finally: + cursor.close() + + def type_descriptor(self, typeobj): + """Provide a database-specific :class:`.TypeEngine` object, given + the generic object which comes from the types module. + + This method looks for a dictionary called + ``colspecs`` as a class or instance-level variable, + and passes on to :func:`.types.adapt_type`. + + """ + return sqltypes.adapt_type(typeobj, self.colspecs) + + def reflecttable( + self, connection, table, include_columns, exclude_columns): + insp = reflection.Inspector.from_engine(connection) + return insp.reflecttable(table, include_columns, exclude_columns) + + def get_pk_constraint(self, conn, table_name, schema=None, **kw): + """Compatibility method, adapts the result of get_primary_keys() + for those dialects which don't implement get_pk_constraint(). + + """ + return { + 'constrained_columns': + self.get_primary_keys(conn, table_name, + schema=schema, **kw) + } + + def validate_identifier(self, ident): + if len(ident) > self.max_identifier_length: + raise exc.IdentifierError( + "Identifier '%s' exceeds maximum length of %d characters" % + (ident, self.max_identifier_length) + ) + + def connect(self, *cargs, **cparams): + return self.dbapi.connect(*cargs, **cparams) + + def create_connect_args(self, url): + opts = url.translate_connect_args() + opts.update(url.query) + return [[], opts] + + def set_engine_execution_options(self, engine, opts): + if 'isolation_level' in opts: + isolation_level = opts['isolation_level'] + + @event.listens_for(engine, "engine_connect") + def set_isolation(connection, branch): + if not branch: + self._set_connection_isolation(connection, isolation_level) + + def set_connection_execution_options(self, connection, opts): + if 'isolation_level' in opts: + self._set_connection_isolation(connection, opts['isolation_level']) + + def _set_connection_isolation(self, connection, level): + if connection.in_transaction(): + util.warn( + "Connection is already established with a Transaction; " + "setting isolation_level may implicitly rollback or commit " + "the existing transaction, or have no effect until " + "next transaction") + self.set_isolation_level(connection.connection, level) + connection.connection._connection_record.\ + finalize_callback.append(self.reset_isolation_level) + + def do_begin(self, dbapi_connection): + pass + + def do_rollback(self, dbapi_connection): + dbapi_connection.rollback() + + def do_commit(self, dbapi_connection): + dbapi_connection.commit() + + def do_close(self, dbapi_connection): + dbapi_connection.close() + + def create_xid(self): + """Create a random two-phase transaction ID. + + This id will be passed to do_begin_twophase(), do_rollback_twophase(), + do_commit_twophase(). Its format is unspecified. + """ + + return "_sa_%032x" % random.randint(0, 2 ** 128) + + def do_savepoint(self, connection, name): + connection.execute(expression.SavepointClause(name)) + + def do_rollback_to_savepoint(self, connection, name): + connection.execute(expression.RollbackToSavepointClause(name)) + + def do_release_savepoint(self, connection, name): + connection.execute(expression.ReleaseSavepointClause(name)) + + def do_executemany(self, cursor, statement, parameters, context=None): + cursor.executemany(statement, parameters) + + def do_execute(self, cursor, statement, parameters, context=None): + cursor.execute(statement, parameters) + + def do_execute_no_params(self, cursor, statement, context=None): + cursor.execute(statement) + + def is_disconnect(self, e, connection, cursor): + return False + + def reset_isolation_level(self, dbapi_conn): + # default_isolation_level is read from the first connection + # after the initial set of 'isolation_level', if any, so is + # the configured default of this dialect. + self.set_isolation_level(dbapi_conn, self.default_isolation_level) + + +class DefaultExecutionContext(interfaces.ExecutionContext): + isinsert = False + isupdate = False + isdelete = False + is_crud = False + is_text = False + isddl = False + executemany = False + compiled = None + statement = None + result_column_struct = None + _is_implicit_returning = False + _is_explicit_returning = False + + # a hook for SQLite's translation of + # result column names + _translate_colname = None + + @classmethod + def _init_ddl(cls, dialect, connection, dbapi_connection, compiled_ddl): + """Initialize execution context for a DDLElement construct.""" + + self = cls.__new__(cls) + self.root_connection = connection + self._dbapi_connection = dbapi_connection + self.dialect = connection.dialect + + self.compiled = compiled = compiled_ddl + self.isddl = True + + self.execution_options = compiled.statement._execution_options + if connection._execution_options: + self.execution_options = dict(self.execution_options) + self.execution_options.update(connection._execution_options) + + if not dialect.supports_unicode_statements: + self.unicode_statement = util.text_type(compiled) + self.statement = dialect._encoder(self.unicode_statement)[0] + else: + self.statement = self.unicode_statement = util.text_type(compiled) + + self.cursor = self.create_cursor() + self.compiled_parameters = [] + + if dialect.positional: + self.parameters = [dialect.execute_sequence_format()] + else: + self.parameters = [{}] + + return self + + @classmethod + def _init_compiled(cls, dialect, connection, dbapi_connection, + compiled, parameters): + """Initialize execution context for a Compiled construct.""" + + self = cls.__new__(cls) + self.root_connection = connection + self._dbapi_connection = dbapi_connection + self.dialect = connection.dialect + + self.compiled = compiled + + if not compiled.can_execute: + raise exc.ArgumentError("Not an executable clause") + + self.execution_options = compiled.statement._execution_options.union( + connection._execution_options) + + self.result_column_struct = ( + compiled._result_columns, compiled._ordered_columns) + + self.unicode_statement = util.text_type(compiled) + if not dialect.supports_unicode_statements: + self.statement = self.unicode_statement.encode( + self.dialect.encoding) + else: + self.statement = self.unicode_statement + + self.isinsert = compiled.isinsert + self.isupdate = compiled.isupdate + self.isdelete = compiled.isdelete + self.is_text = compiled.isplaintext + + if not parameters: + self.compiled_parameters = [compiled.construct_params()] + else: + self.compiled_parameters = \ + [compiled.construct_params(m, _group_number=grp) for + grp, m in enumerate(parameters)] + + self.executemany = len(parameters) > 1 + + self.cursor = self.create_cursor() + + if self.isinsert or self.isupdate or self.isdelete: + self.is_crud = True + self._is_explicit_returning = bool(compiled.statement._returning) + self._is_implicit_returning = bool( + compiled.returning and not compiled.statement._returning) + + if not self.isdelete: + if self.compiled.prefetch: + if self.executemany: + self._process_executemany_defaults() + else: + self._process_executesingle_defaults() + + processors = compiled._bind_processors + + # Convert the dictionary of bind parameter values + # into a dict or list to be sent to the DBAPI's + # execute() or executemany() method. + parameters = [] + if dialect.positional: + for compiled_params in self.compiled_parameters: + param = [] + for key in self.compiled.positiontup: + if key in processors: + param.append(processors[key](compiled_params[key])) + else: + param.append(compiled_params[key]) + parameters.append(dialect.execute_sequence_format(param)) + else: + encode = not dialect.supports_unicode_statements + for compiled_params in self.compiled_parameters: + + if encode: + param = dict( + ( + dialect._encoder(key)[0], + processors[key](compiled_params[key]) + if key in processors + else compiled_params[key] + ) + for key in compiled_params + ) + else: + param = dict( + ( + key, + processors[key](compiled_params[key]) + if key in processors + else compiled_params[key] + ) + for key in compiled_params + ) + + parameters.append(param) + self.parameters = dialect.execute_sequence_format(parameters) + + return self + + @classmethod + def _init_statement(cls, dialect, connection, dbapi_connection, + statement, parameters): + """Initialize execution context for a string SQL statement.""" + + self = cls.__new__(cls) + self.root_connection = connection + self._dbapi_connection = dbapi_connection + self.dialect = connection.dialect + self.is_text = True + + # plain text statement + self.execution_options = connection._execution_options + + if not parameters: + if self.dialect.positional: + self.parameters = [dialect.execute_sequence_format()] + else: + self.parameters = [{}] + elif isinstance(parameters[0], dialect.execute_sequence_format): + self.parameters = parameters + elif isinstance(parameters[0], dict): + if dialect.supports_unicode_statements: + self.parameters = parameters + else: + self.parameters = [ + dict((dialect._encoder(k)[0], d[k]) for k in d) + for d in parameters + ] or [{}] + else: + self.parameters = [dialect.execute_sequence_format(p) + for p in parameters] + + self.executemany = len(parameters) > 1 + + if not dialect.supports_unicode_statements and \ + isinstance(statement, util.text_type): + self.unicode_statement = statement + self.statement = dialect._encoder(statement)[0] + else: + self.statement = self.unicode_statement = statement + + self.cursor = self.create_cursor() + return self + + @classmethod + def _init_default(cls, dialect, connection, dbapi_connection): + """Initialize execution context for a ColumnDefault construct.""" + + self = cls.__new__(cls) + self.root_connection = connection + self._dbapi_connection = dbapi_connection + self.dialect = connection.dialect + self.execution_options = connection._execution_options + self.cursor = self.create_cursor() + return self + + @util.memoized_property + def engine(self): + return self.root_connection.engine + + @util.memoized_property + def postfetch_cols(self): + return self.compiled.postfetch + + @util.memoized_property + def prefetch_cols(self): + return self.compiled.prefetch + + @util.memoized_property + def returning_cols(self): + self.compiled.returning + + @util.memoized_property + def no_parameters(self): + return self.execution_options.get("no_parameters", False) + + @util.memoized_property + def should_autocommit(self): + autocommit = self.execution_options.get('autocommit', + not self.compiled and + self.statement and + expression.PARSE_AUTOCOMMIT + or False) + + if autocommit is expression.PARSE_AUTOCOMMIT: + return self.should_autocommit_text(self.unicode_statement) + else: + return autocommit + + def _execute_scalar(self, stmt, type_): + """Execute a string statement on the current cursor, returning a + scalar result. + + Used to fire off sequences, default phrases, and "select lastrowid" + types of statements individually or in the context of a parent INSERT + or UPDATE statement. + + """ + + conn = self.root_connection + if isinstance(stmt, util.text_type) and \ + not self.dialect.supports_unicode_statements: + stmt = self.dialect._encoder(stmt)[0] + + if self.dialect.positional: + default_params = self.dialect.execute_sequence_format() + else: + default_params = {} + + conn._cursor_execute(self.cursor, stmt, default_params, context=self) + r = self.cursor.fetchone()[0] + if type_ is not None: + # apply type post processors to the result + proc = type_._cached_result_processor( + self.dialect, + self.cursor.description[0][1] + ) + if proc: + return proc(r) + return r + + @property + def connection(self): + return self.root_connection._branch() + + def should_autocommit_text(self, statement): + return AUTOCOMMIT_REGEXP.match(statement) + + def create_cursor(self): + return self._dbapi_connection.cursor() + + def pre_exec(self): + pass + + def post_exec(self): + pass + + def get_result_processor(self, type_, colname, coltype): + """Return a 'result processor' for a given type as present in + cursor.description. + + This has a default implementation that dialects can override + for context-sensitive result type handling. + + """ + return type_._cached_result_processor(self.dialect, coltype) + + def get_lastrowid(self): + """return self.cursor.lastrowid, or equivalent, after an INSERT. + + This may involve calling special cursor functions, + issuing a new SELECT on the cursor (or a new one), + or returning a stored value that was + calculated within post_exec(). + + This function will only be called for dialects + which support "implicit" primary key generation, + keep preexecute_autoincrement_sequences set to False, + and when no explicit id value was bound to the + statement. + + The function is called once, directly after + post_exec() and before the transaction is committed + or ResultProxy is generated. If the post_exec() + method assigns a value to `self._lastrowid`, the + value is used in place of calling get_lastrowid(). + + Note that this method is *not* equivalent to the + ``lastrowid`` method on ``ResultProxy``, which is a + direct proxy to the DBAPI ``lastrowid`` accessor + in all cases. + + """ + return self.cursor.lastrowid + + def handle_dbapi_exception(self, e): + pass + + def get_result_proxy(self): + return result.ResultProxy(self) + + @property + def rowcount(self): + return self.cursor.rowcount + + def supports_sane_rowcount(self): + return self.dialect.supports_sane_rowcount + + def supports_sane_multi_rowcount(self): + return self.dialect.supports_sane_multi_rowcount + + def _setup_crud_result_proxy(self): + if self.isinsert and \ + not self.executemany: + if not self._is_implicit_returning and \ + not self.compiled.inline and \ + self.dialect.postfetch_lastrowid: + + self._setup_ins_pk_from_lastrowid() + + elif not self._is_implicit_returning: + self._setup_ins_pk_from_empty() + + result = self.get_result_proxy() + + if self.isinsert: + if self._is_implicit_returning: + row = result.fetchone() + self.returned_defaults = row + self._setup_ins_pk_from_implicit_returning(row) + result._soft_close(_autoclose_connection=False) + result._metadata = None + elif not self._is_explicit_returning: + result._soft_close(_autoclose_connection=False) + result._metadata = None + elif self.isupdate and self._is_implicit_returning: + row = result.fetchone() + self.returned_defaults = row + result._soft_close(_autoclose_connection=False) + result._metadata = None + + elif result._metadata is None: + # no results, get rowcount + # (which requires open cursor on some drivers + # such as kintersbasdb, mxodbc) + result.rowcount + result._soft_close(_autoclose_connection=False) + return result + + def _setup_ins_pk_from_lastrowid(self): + key_getter = self.compiled._key_getters_for_crud_column[2] + table = self.compiled.statement.table + compiled_params = self.compiled_parameters[0] + + lastrowid = self.get_lastrowid() + if lastrowid is not None: + autoinc_col = table._autoincrement_column + if autoinc_col is not None: + # apply type post processors to the lastrowid + proc = autoinc_col.type._cached_result_processor( + self.dialect, None) + if proc is not None: + lastrowid = proc(lastrowid) + self.inserted_primary_key = [ + lastrowid if c is autoinc_col else + compiled_params.get(key_getter(c), None) + for c in table.primary_key + ] + else: + # don't have a usable lastrowid, so + # do the same as _setup_ins_pk_from_empty + self.inserted_primary_key = [ + compiled_params.get(key_getter(c), None) + for c in table.primary_key + ] + + def _setup_ins_pk_from_empty(self): + key_getter = self.compiled._key_getters_for_crud_column[2] + table = self.compiled.statement.table + compiled_params = self.compiled_parameters[0] + self.inserted_primary_key = [ + compiled_params.get(key_getter(c), None) + for c in table.primary_key + ] + + def _setup_ins_pk_from_implicit_returning(self, row): + key_getter = self.compiled._key_getters_for_crud_column[2] + table = self.compiled.statement.table + compiled_params = self.compiled_parameters[0] + + self.inserted_primary_key = [ + row[col] if value is None else value + for col, value in [ + (col, compiled_params.get(key_getter(col), None)) + for col in table.primary_key + ] + ] + + def lastrow_has_defaults(self): + return (self.isinsert or self.isupdate) and \ + bool(self.compiled.postfetch) + + def set_input_sizes(self, translate=None, exclude_types=None): + """Given a cursor and ClauseParameters, call the appropriate + style of ``setinputsizes()`` on the cursor, using DB-API types + from the bind parameter's ``TypeEngine`` objects. + + This method only called by those dialects which require it, + currently cx_oracle. + + """ + + if not hasattr(self.compiled, 'bind_names'): + return + + types = dict( + (self.compiled.bind_names[bindparam], bindparam.type) + for bindparam in self.compiled.bind_names) + + if self.dialect.positional: + inputsizes = [] + for key in self.compiled.positiontup: + typeengine = types[key] + dbtype = typeengine.dialect_impl(self.dialect).\ + get_dbapi_type(self.dialect.dbapi) + if dbtype is not None and \ + (not exclude_types or dbtype not in exclude_types): + inputsizes.append(dbtype) + try: + self.cursor.setinputsizes(*inputsizes) + except Exception as e: + self.root_connection._handle_dbapi_exception( + e, None, None, None, self) + else: + inputsizes = {} + for key in self.compiled.bind_names.values(): + typeengine = types[key] + dbtype = typeengine.dialect_impl(self.dialect).\ + get_dbapi_type(self.dialect.dbapi) + if dbtype is not None and \ + (not exclude_types or dbtype not in exclude_types): + if translate: + key = translate.get(key, key) + if not self.dialect.supports_unicode_binds: + key = self.dialect._encoder(key)[0] + inputsizes[key] = dbtype + try: + self.cursor.setinputsizes(**inputsizes) + except Exception as e: + self.root_connection._handle_dbapi_exception( + e, None, None, None, self) + + def _exec_default(self, default, type_): + if default.is_sequence: + return self.fire_sequence(default, type_) + elif default.is_callable: + return default.arg(self) + elif default.is_clause_element: + # TODO: expensive branching here should be + # pulled into _exec_scalar() + conn = self.connection + c = expression.select([default.arg]).compile(bind=conn) + return conn._execute_compiled(c, (), {}).scalar() + else: + return default.arg + + def get_insert_default(self, column): + if column.default is None: + return None + else: + return self._exec_default(column.default, column.type) + + def get_update_default(self, column): + if column.onupdate is None: + return None + else: + return self._exec_default(column.onupdate, column.type) + + def _process_executemany_defaults(self): + key_getter = self.compiled._key_getters_for_crud_column[2] + + prefetch = self.compiled.prefetch + scalar_defaults = {} + + # pre-determine scalar Python-side defaults + # to avoid many calls of get_insert_default()/ + # get_update_default() + for c in prefetch: + if self.isinsert and c.default and c.default.is_scalar: + scalar_defaults[c] = c.default.arg + elif self.isupdate and c.onupdate and c.onupdate.is_scalar: + scalar_defaults[c] = c.onupdate.arg + + for param in self.compiled_parameters: + self.current_parameters = param + for c in prefetch: + if c in scalar_defaults: + val = scalar_defaults[c] + elif self.isinsert: + val = self.get_insert_default(c) + else: + val = self.get_update_default(c) + if val is not None: + param[key_getter(c)] = val + del self.current_parameters + + def _process_executesingle_defaults(self): + key_getter = self.compiled._key_getters_for_crud_column[2] + prefetch = self.compiled.prefetch + self.current_parameters = compiled_parameters = \ + self.compiled_parameters[0] + + for c in prefetch: + if self.isinsert: + if c.default and \ + not c.default.is_sequence and c.default.is_scalar: + val = c.default.arg + else: + val = self.get_insert_default(c) + else: + val = self.get_update_default(c) + + if val is not None: + compiled_parameters[key_getter(c)] = val + del self.current_parameters + + +DefaultDialect.execution_ctx_cls = DefaultExecutionContext diff --git a/lib/python3.4/site-packages/sqlalchemy/engine/interfaces.py b/lib/python3.4/site-packages/sqlalchemy/engine/interfaces.py new file mode 100644 index 0000000..1948342 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/engine/interfaces.py @@ -0,0 +1,1152 @@ +# engine/interfaces.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Define core interfaces used by the engine system.""" + +from .. import util, event + +# backwards compat +from ..sql.compiler import Compiled, TypeCompiler + + +class Dialect(object): + """Define the behavior of a specific database and DB-API combination. + + Any aspect of metadata definition, SQL query generation, + execution, result-set handling, or anything else which varies + between databases is defined under the general category of the + Dialect. The Dialect acts as a factory for other + database-specific object implementations including + ExecutionContext, Compiled, DefaultGenerator, and TypeEngine. + + All Dialects implement the following attributes: + + name + identifying name for the dialect from a DBAPI-neutral point of view + (i.e. 'sqlite') + + driver + identifying name for the dialect's DBAPI + + positional + True if the paramstyle for this Dialect is positional. + + paramstyle + the paramstyle to be used (some DB-APIs support multiple + paramstyles). + + convert_unicode + True if Unicode conversion should be applied to all ``str`` + types. + + encoding + type of encoding to use for unicode, usually defaults to + 'utf-8'. + + statement_compiler + a :class:`.Compiled` class used to compile SQL statements + + ddl_compiler + a :class:`.Compiled` class used to compile DDL statements + + server_version_info + a tuple containing a version number for the DB backend in use. + This value is only available for supporting dialects, and is + typically populated during the initial connection to the database. + + default_schema_name + the name of the default schema. This value is only available for + supporting dialects, and is typically populated during the + initial connection to the database. + + execution_ctx_cls + a :class:`.ExecutionContext` class used to handle statement execution + + execute_sequence_format + either the 'tuple' or 'list' type, depending on what cursor.execute() + accepts for the second argument (they vary). + + preparer + a :class:`~sqlalchemy.sql.compiler.IdentifierPreparer` class used to + quote identifiers. + + supports_alter + ``True`` if the database supports ``ALTER TABLE``. + + max_identifier_length + The maximum length of identifier names. + + supports_unicode_statements + Indicate whether the DB-API can receive SQL statements as Python + unicode strings + + supports_unicode_binds + Indicate whether the DB-API can receive string bind parameters + as Python unicode strings + + supports_sane_rowcount + Indicate whether the dialect properly implements rowcount for + ``UPDATE`` and ``DELETE`` statements. + + supports_sane_multi_rowcount + Indicate whether the dialect properly implements rowcount for + ``UPDATE`` and ``DELETE`` statements when executed via + executemany. + + preexecute_autoincrement_sequences + True if 'implicit' primary key functions must be executed separately + in order to get their value. This is currently oriented towards + Postgresql. + + implicit_returning + use RETURNING or equivalent during INSERT execution in order to load + newly generated primary keys and other column defaults in one execution, + which are then available via inserted_primary_key. + If an insert statement has returning() specified explicitly, + the "implicit" functionality is not used and inserted_primary_key + will not be available. + + dbapi_type_map + A mapping of DB-API type objects present in this Dialect's + DB-API implementation mapped to TypeEngine implementations used + by the dialect. + + This is used to apply types to result sets based on the DB-API + types present in cursor.description; it only takes effect for + result sets against textual statements where no explicit + typemap was present. + + colspecs + A dictionary of TypeEngine classes from sqlalchemy.types mapped + to subclasses that are specific to the dialect class. This + dictionary is class-level only and is not accessed from the + dialect instance itself. + + supports_default_values + Indicates if the construct ``INSERT INTO tablename DEFAULT + VALUES`` is supported + + supports_sequences + Indicates if the dialect supports CREATE SEQUENCE or similar. + + sequences_optional + If True, indicates if the "optional" flag on the Sequence() construct + should signal to not generate a CREATE SEQUENCE. Applies only to + dialects that support sequences. Currently used only to allow Postgresql + SERIAL to be used on a column that specifies Sequence() for usage on + other backends. + + supports_native_enum + Indicates if the dialect supports a native ENUM construct. + This will prevent types.Enum from generating a CHECK + constraint when that type is used. + + supports_native_boolean + Indicates if the dialect supports a native boolean construct. + This will prevent types.Boolean from generating a CHECK + constraint when that type is used. + + dbapi_exception_translation_map + A dictionary of names that will contain as values the names of + pep-249 exceptions ("IntegrityError", "OperationalError", etc) + keyed to alternate class names, to support the case where a + DBAPI has exception classes that aren't named as they are + referred to (e.g. IntegrityError = MyException). In the vast + majority of cases this dictionary is empty. + + .. versionadded:: 1.0.5 + + """ + + _has_events = False + + def create_connect_args(self, url): + """Build DB-API compatible connection arguments. + + Given a :class:`~sqlalchemy.engine.url.URL` object, returns a tuple + consisting of a `*args`/`**kwargs` suitable to send directly + to the dbapi's connect function. + + """ + + raise NotImplementedError() + + @classmethod + def type_descriptor(cls, typeobj): + """Transform a generic type to a dialect-specific type. + + Dialect classes will usually use the + :func:`.types.adapt_type` function in the types module to + accomplish this. + + The returned result is cached *per dialect class* so can + contain no dialect-instance state. + + """ + + raise NotImplementedError() + + def initialize(self, connection): + """Called during strategized creation of the dialect with a + connection. + + Allows dialects to configure options based on server version info or + other properties. + + The connection passed here is a SQLAlchemy Connection object, + with full capabilities. + + The initialize() method of the base dialect should be called via + super(). + + """ + + pass + + def reflecttable( + self, connection, table, include_columns, exclude_columns): + """Load table description from the database. + + Given a :class:`.Connection` and a + :class:`~sqlalchemy.schema.Table` object, reflect its columns and + properties from the database. + + The implementation of this method is provided by + :meth:`.DefaultDialect.reflecttable`, which makes use of + :class:`.Inspector` to retrieve column information. + + Dialects should **not** seek to implement this method, and should + instead implement individual schema inspection operations such as + :meth:`.Dialect.get_columns`, :meth:`.Dialect.get_pk_constraint`, + etc. + + """ + + raise NotImplementedError() + + def get_columns(self, connection, table_name, schema=None, **kw): + """Return information about columns in `table_name`. + + Given a :class:`.Connection`, a string + `table_name`, and an optional string `schema`, return column + information as a list of dictionaries with these keys: + + name + the column's name + + type + [sqlalchemy.types#TypeEngine] + + nullable + boolean + + default + the column's default value + + autoincrement + boolean + + sequence + a dictionary of the form + {'name' : str, 'start' :int, 'increment': int, 'minvalue': int, + 'maxvalue': int, 'nominvalue': bool, 'nomaxvalue': bool, + 'cycle': bool} + + Additional column attributes may be present. + """ + + raise NotImplementedError() + + def get_primary_keys(self, connection, table_name, schema=None, **kw): + """Return information about primary keys in `table_name`. + + + Deprecated. This method is only called by the default + implementation of :meth:`.Dialect.get_pk_constraint`. Dialects should + instead implement the :meth:`.Dialect.get_pk_constraint` method + directly. + + """ + + raise NotImplementedError() + + def get_pk_constraint(self, connection, table_name, schema=None, **kw): + """Return information about the primary key constraint on + table_name`. + + Given a :class:`.Connection`, a string + `table_name`, and an optional string `schema`, return primary + key information as a dictionary with these keys: + + constrained_columns + a list of column names that make up the primary key + + name + optional name of the primary key constraint. + + """ + raise NotImplementedError() + + def get_foreign_keys(self, connection, table_name, schema=None, **kw): + """Return information about foreign_keys in `table_name`. + + Given a :class:`.Connection`, a string + `table_name`, and an optional string `schema`, return foreign + key information as a list of dicts with these keys: + + name + the constraint's name + + constrained_columns + a list of column names that make up the foreign key + + referred_schema + the name of the referred schema + + referred_table + the name of the referred table + + referred_columns + a list of column names in the referred table that correspond to + constrained_columns + """ + + raise NotImplementedError() + + def get_table_names(self, connection, schema=None, **kw): + """Return a list of table names for `schema`.""" + + raise NotImplementedError() + + def get_temp_table_names(self, connection, schema=None, **kw): + """Return a list of temporary table names on the given connection, + if supported by the underlying backend. + + """ + + raise NotImplementedError() + + def get_view_names(self, connection, schema=None, **kw): + """Return a list of all view names available in the database. + + schema: + Optional, retrieve names from a non-default schema. + """ + + raise NotImplementedError() + + def get_temp_view_names(self, connection, schema=None, **kw): + """Return a list of temporary view names on the given connection, + if supported by the underlying backend. + + """ + + raise NotImplementedError() + + def get_view_definition(self, connection, view_name, schema=None, **kw): + """Return view definition. + + Given a :class:`.Connection`, a string + `view_name`, and an optional string `schema`, return the view + definition. + """ + + raise NotImplementedError() + + def get_indexes(self, connection, table_name, schema=None, **kw): + """Return information about indexes in `table_name`. + + Given a :class:`.Connection`, a string + `table_name` and an optional string `schema`, return index + information as a list of dictionaries with these keys: + + name + the index's name + + column_names + list of column names in order + + unique + boolean + """ + + raise NotImplementedError() + + def get_unique_constraints( + self, connection, table_name, schema=None, **kw): + """Return information about unique constraints in `table_name`. + + Given a string `table_name` and an optional string `schema`, return + unique constraint information as a list of dicts with these keys: + + name + the unique constraint's name + + column_names + list of column names in order + + \**kw + other options passed to the dialect's get_unique_constraints() + method. + + .. versionadded:: 0.9.0 + + """ + + raise NotImplementedError() + + def normalize_name(self, name): + """convert the given name to lowercase if it is detected as + case insensitive. + + this method is only used if the dialect defines + requires_name_normalize=True. + + """ + raise NotImplementedError() + + def denormalize_name(self, name): + """convert the given name to a case insensitive identifier + for the backend if it is an all-lowercase name. + + this method is only used if the dialect defines + requires_name_normalize=True. + + """ + raise NotImplementedError() + + def has_table(self, connection, table_name, schema=None): + """Check the existence of a particular table in the database. + + Given a :class:`.Connection` object and a string + `table_name`, return True if the given table (possibly within + the specified `schema`) exists in the database, False + otherwise. + """ + + raise NotImplementedError() + + def has_sequence(self, connection, sequence_name, schema=None): + """Check the existence of a particular sequence in the database. + + Given a :class:`.Connection` object and a string + `sequence_name`, return True if the given sequence exists in + the database, False otherwise. + """ + + raise NotImplementedError() + + def _get_server_version_info(self, connection): + """Retrieve the server version info from the given connection. + + This is used by the default implementation to populate the + "server_version_info" attribute and is called exactly + once upon first connect. + + """ + + raise NotImplementedError() + + def _get_default_schema_name(self, connection): + """Return the string name of the currently selected schema from + the given connection. + + This is used by the default implementation to populate the + "default_schema_name" attribute and is called exactly + once upon first connect. + + """ + + raise NotImplementedError() + + def do_begin(self, dbapi_connection): + """Provide an implementation of ``connection.begin()``, given a + DB-API connection. + + The DBAPI has no dedicated "begin" method and it is expected + that transactions are implicit. This hook is provided for those + DBAPIs that might need additional help in this area. + + Note that :meth:`.Dialect.do_begin` is not called unless a + :class:`.Transaction` object is in use. The + :meth:`.Dialect.do_autocommit` + hook is provided for DBAPIs that need some extra commands emitted + after a commit in order to enter the next transaction, when the + SQLAlchemy :class:`.Connection` is used in its default "autocommit" + mode. + + :param dbapi_connection: a DBAPI connection, typically + proxied within a :class:`.ConnectionFairy`. + + """ + + raise NotImplementedError() + + def do_rollback(self, dbapi_connection): + """Provide an implementation of ``connection.rollback()``, given + a DB-API connection. + + :param dbapi_connection: a DBAPI connection, typically + proxied within a :class:`.ConnectionFairy`. + + """ + + raise NotImplementedError() + + def do_commit(self, dbapi_connection): + """Provide an implementation of ``connection.commit()``, given a + DB-API connection. + + :param dbapi_connection: a DBAPI connection, typically + proxied within a :class:`.ConnectionFairy`. + + """ + + raise NotImplementedError() + + def do_close(self, dbapi_connection): + """Provide an implementation of ``connection.close()``, given a DBAPI + connection. + + This hook is called by the :class:`.Pool` when a connection has been + detached from the pool, or is being returned beyond the normal + capacity of the pool. + + .. versionadded:: 0.8 + + """ + + raise NotImplementedError() + + def create_xid(self): + """Create a two-phase transaction ID. + + This id will be passed to do_begin_twophase(), + do_rollback_twophase(), do_commit_twophase(). Its format is + unspecified. + """ + + raise NotImplementedError() + + def do_savepoint(self, connection, name): + """Create a savepoint with the given name. + + :param connection: a :class:`.Connection`. + :param name: savepoint name. + + """ + + raise NotImplementedError() + + def do_rollback_to_savepoint(self, connection, name): + """Rollback a connection to the named savepoint. + + :param connection: a :class:`.Connection`. + :param name: savepoint name. + + """ + + raise NotImplementedError() + + def do_release_savepoint(self, connection, name): + """Release the named savepoint on a connection. + + :param connection: a :class:`.Connection`. + :param name: savepoint name. + """ + + raise NotImplementedError() + + def do_begin_twophase(self, connection, xid): + """Begin a two phase transaction on the given connection. + + :param connection: a :class:`.Connection`. + :param xid: xid + + """ + + raise NotImplementedError() + + def do_prepare_twophase(self, connection, xid): + """Prepare a two phase transaction on the given connection. + + :param connection: a :class:`.Connection`. + :param xid: xid + + """ + + raise NotImplementedError() + + def do_rollback_twophase(self, connection, xid, is_prepared=True, + recover=False): + """Rollback a two phase transaction on the given connection. + + :param connection: a :class:`.Connection`. + :param xid: xid + :param is_prepared: whether or not + :meth:`.TwoPhaseTransaction.prepare` was called. + :param recover: if the recover flag was passed. + + """ + + raise NotImplementedError() + + def do_commit_twophase(self, connection, xid, is_prepared=True, + recover=False): + """Commit a two phase transaction on the given connection. + + + :param connection: a :class:`.Connection`. + :param xid: xid + :param is_prepared: whether or not + :meth:`.TwoPhaseTransaction.prepare` was called. + :param recover: if the recover flag was passed. + + """ + + raise NotImplementedError() + + def do_recover_twophase(self, connection): + """Recover list of uncommited prepared two phase transaction + identifiers on the given connection. + + :param connection: a :class:`.Connection`. + + """ + + raise NotImplementedError() + + def do_executemany(self, cursor, statement, parameters, context=None): + """Provide an implementation of ``cursor.executemany(statement, + parameters)``.""" + + raise NotImplementedError() + + def do_execute(self, cursor, statement, parameters, context=None): + """Provide an implementation of ``cursor.execute(statement, + parameters)``.""" + + raise NotImplementedError() + + def do_execute_no_params(self, cursor, statement, parameters, + context=None): + """Provide an implementation of ``cursor.execute(statement)``. + + The parameter collection should not be sent. + + """ + + raise NotImplementedError() + + def is_disconnect(self, e, connection, cursor): + """Return True if the given DB-API error indicates an invalid + connection""" + + raise NotImplementedError() + + def connect(self): + """return a callable which sets up a newly created DBAPI connection. + + The callable accepts a single argument "conn" which is the + DBAPI connection itself. It has no return value. + + This is used to set dialect-wide per-connection options such as + isolation modes, unicode modes, etc. + + If a callable is returned, it will be assembled into a pool listener + that receives the direct DBAPI connection, with all wrappers removed. + + If None is returned, no listener will be generated. + + """ + return None + + def reset_isolation_level(self, dbapi_conn): + """Given a DBAPI connection, revert its isolation to the default. + + Note that this is a dialect-level method which is used as part + of the implementation of the :class:`.Connection` and + :class:`.Engine` + isolation level facilities; these APIs should be preferred for + most typical use cases. + + .. seealso:: + + :meth:`.Connection.get_isolation_level` - view current level + + :attr:`.Connection.default_isolation_level` - view default level + + :paramref:`.Connection.execution_options.isolation_level` - + set per :class:`.Connection` isolation level + + :paramref:`.create_engine.isolation_level` - + set per :class:`.Engine` isolation level + + """ + + raise NotImplementedError() + + def set_isolation_level(self, dbapi_conn, level): + """Given a DBAPI connection, set its isolation level. + + Note that this is a dialect-level method which is used as part + of the implementation of the :class:`.Connection` and + :class:`.Engine` + isolation level facilities; these APIs should be preferred for + most typical use cases. + + .. seealso:: + + :meth:`.Connection.get_isolation_level` - view current level + + :attr:`.Connection.default_isolation_level` - view default level + + :paramref:`.Connection.execution_options.isolation_level` - + set per :class:`.Connection` isolation level + + :paramref:`.create_engine.isolation_level` - + set per :class:`.Engine` isolation level + + """ + + raise NotImplementedError() + + def get_isolation_level(self, dbapi_conn): + """Given a DBAPI connection, return its isolation level. + + When working with a :class:`.Connection` object, the corresponding + DBAPI connection may be procured using the + :attr:`.Connection.connection` accessor. + + Note that this is a dialect-level method which is used as part + of the implementation of the :class:`.Connection` and + :class:`.Engine` isolation level facilities; + these APIs should be preferred for most typical use cases. + + + .. seealso:: + + :meth:`.Connection.get_isolation_level` - view current level + + :attr:`.Connection.default_isolation_level` - view default level + + :paramref:`.Connection.execution_options.isolation_level` - + set per :class:`.Connection` isolation level + + :paramref:`.create_engine.isolation_level` - + set per :class:`.Engine` isolation level + + + """ + + raise NotImplementedError() + + @classmethod + def get_dialect_cls(cls, url): + """Given a URL, return the :class:`.Dialect` that will be used. + + This is a hook that allows an external plugin to provide functionality + around an existing dialect, by allowing the plugin to be loaded + from the url based on an entrypoint, and then the plugin returns + the actual dialect to be used. + + By default this just returns the cls. + + .. versionadded:: 1.0.3 + + """ + return cls + + @classmethod + def engine_created(cls, engine): + """A convenience hook called before returning the final :class:`.Engine`. + + If the dialect returned a different class from the + :meth:`.get_dialect_cls` + method, then the hook is called on both classes, first on + the dialect class returned by the :meth:`.get_dialect_cls` method and + then on the class on which the method was called. + + The hook should be used by dialects and/or wrappers to apply special + events to the engine or its components. In particular, it allows + a dialect-wrapping class to apply dialect-level events. + + .. versionadded:: 1.0.3 + + """ + pass + + +class ExecutionContext(object): + """A messenger object for a Dialect that corresponds to a single + execution. + + ExecutionContext should have these data members: + + connection + Connection object which can be freely used by default value + generators to execute SQL. This Connection should reference the + same underlying connection/transactional resources of + root_connection. + + root_connection + Connection object which is the source of this ExecutionContext. This + Connection may have close_with_result=True set, in which case it can + only be used once. + + dialect + dialect which created this ExecutionContext. + + cursor + DB-API cursor procured from the connection, + + compiled + if passed to constructor, sqlalchemy.engine.base.Compiled object + being executed, + + statement + string version of the statement to be executed. Is either + passed to the constructor, or must be created from the + sql.Compiled object by the time pre_exec() has completed. + + parameters + bind parameters passed to the execute() method. For compiled + statements, this is a dictionary or list of dictionaries. For + textual statements, it should be in a format suitable for the + dialect's paramstyle (i.e. dict or list of dicts for non + positional, list or list of lists/tuples for positional). + + isinsert + True if the statement is an INSERT. + + isupdate + True if the statement is an UPDATE. + + should_autocommit + True if the statement is a "committable" statement. + + prefetch_cols + a list of Column objects for which a client-side default + was fired off. Applies to inserts and updates. + + postfetch_cols + a list of Column objects for which a server-side default or + inline SQL expression value was fired off. Applies to inserts + and updates. + """ + + exception = None + """A DBAPI-level exception that was caught when this ExecutionContext + attempted to execute a statement. + + This attribute is meaningful only within the + :meth:`.ConnectionEvents.dbapi_error` event. + + .. versionadded:: 0.9.7 + + .. seealso:: + + :attr:`.ExecutionContext.is_disconnect` + + :meth:`.ConnectionEvents.dbapi_error` + + """ + + is_disconnect = None + """Boolean flag set to True or False when a DBAPI-level exception + is caught when this ExecutionContext attempted to execute a statement. + + This attribute is meaningful only within the + :meth:`.ConnectionEvents.dbapi_error` event. + + .. versionadded:: 0.9.7 + + .. seealso:: + + :attr:`.ExecutionContext.exception` + + :meth:`.ConnectionEvents.dbapi_error` + + """ + + def create_cursor(self): + """Return a new cursor generated from this ExecutionContext's + connection. + + Some dialects may wish to change the behavior of + connection.cursor(), such as postgresql which may return a PG + "server side" cursor. + """ + + raise NotImplementedError() + + def pre_exec(self): + """Called before an execution of a compiled statement. + + If a compiled statement was passed to this ExecutionContext, + the `statement` and `parameters` datamembers must be + initialized after this statement is complete. + """ + + raise NotImplementedError() + + def post_exec(self): + """Called after the execution of a compiled statement. + + If a compiled statement was passed to this ExecutionContext, + the `last_insert_ids`, `last_inserted_params`, etc. + datamembers should be available after this method completes. + """ + + raise NotImplementedError() + + def result(self): + """Return a result object corresponding to this ExecutionContext. + + Returns a ResultProxy. + """ + + raise NotImplementedError() + + def handle_dbapi_exception(self, e): + """Receive a DBAPI exception which occurred upon execute, result + fetch, etc.""" + + raise NotImplementedError() + + def should_autocommit_text(self, statement): + """Parse the given textual statement and return True if it refers to + a "committable" statement""" + + raise NotImplementedError() + + def lastrow_has_defaults(self): + """Return True if the last INSERT or UPDATE row contained + inlined or database-side defaults. + """ + + raise NotImplementedError() + + def get_rowcount(self): + """Return the DBAPI ``cursor.rowcount`` value, or in some + cases an interpreted value. + + See :attr:`.ResultProxy.rowcount` for details on this. + + """ + + raise NotImplementedError() + + +class Connectable(object): + """Interface for an object which supports execution of SQL constructs. + + The two implementations of :class:`.Connectable` are + :class:`.Connection` and :class:`.Engine`. + + Connectable must also implement the 'dialect' member which references a + :class:`.Dialect` instance. + + """ + + def connect(self, **kwargs): + """Return a :class:`.Connection` object. + + Depending on context, this may be ``self`` if this object + is already an instance of :class:`.Connection`, or a newly + procured :class:`.Connection` if this object is an instance + of :class:`.Engine`. + + """ + + def contextual_connect(self): + """Return a :class:`.Connection` object which may be part of an ongoing + context. + + Depending on context, this may be ``self`` if this object + is already an instance of :class:`.Connection`, or a newly + procured :class:`.Connection` if this object is an instance + of :class:`.Engine`. + + """ + + raise NotImplementedError() + + @util.deprecated("0.7", + "Use the create() method on the given schema " + "object directly, i.e. :meth:`.Table.create`, " + ":meth:`.Index.create`, :meth:`.MetaData.create_all`") + def create(self, entity, **kwargs): + """Emit CREATE statements for the given schema entity. + """ + + raise NotImplementedError() + + @util.deprecated("0.7", + "Use the drop() method on the given schema " + "object directly, i.e. :meth:`.Table.drop`, " + ":meth:`.Index.drop`, :meth:`.MetaData.drop_all`") + def drop(self, entity, **kwargs): + """Emit DROP statements for the given schema entity. + """ + + raise NotImplementedError() + + def execute(self, object, *multiparams, **params): + """Executes the given construct and returns a :class:`.ResultProxy`.""" + raise NotImplementedError() + + def scalar(self, object, *multiparams, **params): + """Executes and returns the first column of the first row. + + The underlying cursor is closed after execution. + """ + raise NotImplementedError() + + def _run_visitor(self, visitorcallable, element, + **kwargs): + raise NotImplementedError() + + def _execute_clauseelement(self, elem, multiparams=None, params=None): + raise NotImplementedError() + + +class ExceptionContext(object): + """Encapsulate information about an error condition in progress. + + This object exists solely to be passed to the + :meth:`.ConnectionEvents.handle_error` event, supporting an interface that + can be extended without backwards-incompatibility. + + .. versionadded:: 0.9.7 + + """ + + connection = None + """The :class:`.Connection` in use during the exception. + + This member is present, except in the case of a failure when + first connecting. + + .. seealso:: + + :attr:`.ExceptionContext.engine` + + + """ + + engine = None + """The :class:`.Engine` in use during the exception. + + This member should always be present, even in the case of a failure + when first connecting. + + .. versionadded:: 1.0.0 + + """ + + cursor = None + """The DBAPI cursor object. + + May be None. + + """ + + statement = None + """String SQL statement that was emitted directly to the DBAPI. + + May be None. + + """ + + parameters = None + """Parameter collection that was emitted directly to the DBAPI. + + May be None. + + """ + + original_exception = None + """The exception object which was caught. + + This member is always present. + + """ + + sqlalchemy_exception = None + """The :class:`sqlalchemy.exc.StatementError` which wraps the original, + and will be raised if exception handling is not circumvented by the event. + + May be None, as not all exception types are wrapped by SQLAlchemy. + For DBAPI-level exceptions that subclass the dbapi's Error class, this + field will always be present. + + """ + + chained_exception = None + """The exception that was returned by the previous handler in the + exception chain, if any. + + If present, this exception will be the one ultimately raised by + SQLAlchemy unless a subsequent handler replaces it. + + May be None. + + """ + + execution_context = None + """The :class:`.ExecutionContext` corresponding to the execution + operation in progress. + + This is present for statement execution operations, but not for + operations such as transaction begin/end. It also is not present when + the exception was raised before the :class:`.ExecutionContext` + could be constructed. + + Note that the :attr:`.ExceptionContext.statement` and + :attr:`.ExceptionContext.parameters` members may represent a + different value than that of the :class:`.ExecutionContext`, + potentially in the case where a + :meth:`.ConnectionEvents.before_cursor_execute` event or similar + modified the statement/parameters to be sent. + + May be None. + + """ + + is_disconnect = None + """Represent whether the exception as occurred represents a "disconnect" + condition. + + This flag will always be True or False within the scope of the + :meth:`.ConnectionEvents.handle_error` handler. + + SQLAlchemy will defer to this flag in order to determine whether or not + the connection should be invalidated subsequently. That is, by + assigning to this flag, a "disconnect" event which then results in + a connection and pool invalidation can be invoked or prevented by + changing this flag. + + """ + + invalidate_pool_on_disconnect = True + """Represent whether all connections in the pool should be invalidated + when a "disconnect" condition is in effect. + + Setting this flag to False within the scope of the + :meth:`.ConnectionEvents.handle_error` event will have the effect such + that the full collection of connections in the pool will not be + invalidated during a disconnect; only the current connection that is the + subject of the error will actually be invalidated. + + The purpose of this flag is for custom disconnect-handling schemes where + the invalidation of other connections in the pool is to be performed + based on other conditions, or even on a per-connection basis. + + .. versionadded:: 1.0.3 + + """ diff --git a/lib/python3.4/site-packages/sqlalchemy/engine/reflection.py b/lib/python3.4/site-packages/sqlalchemy/engine/reflection.py new file mode 100644 index 0000000..98fcfa0 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/engine/reflection.py @@ -0,0 +1,788 @@ +# engine/reflection.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Provides an abstraction for obtaining database schema information. + +Usage Notes: + +Here are some general conventions when accessing the low level inspector +methods such as get_table_names, get_columns, etc. + +1. Inspector methods return lists of dicts in most cases for the following + reasons: + + * They're both standard types that can be serialized. + * Using a dict instead of a tuple allows easy expansion of attributes. + * Using a list for the outer structure maintains order and is easy to work + with (e.g. list comprehension [d['name'] for d in cols]). + +2. Records that contain a name, such as the column name in a column record + use the key 'name'. So for most return values, each record will have a + 'name' attribute.. +""" + +from .. import exc, sql +from ..sql import schema as sa_schema +from .. import util +from ..sql.type_api import TypeEngine +from ..util import deprecated +from ..util import topological +from .. import inspection +from .base import Connectable + + +@util.decorator +def cache(fn, self, con, *args, **kw): + info_cache = kw.get('info_cache', None) + if info_cache is None: + return fn(self, con, *args, **kw) + key = ( + fn.__name__, + tuple(a for a in args if isinstance(a, util.string_types)), + tuple((k, v) for k, v in kw.items() if + isinstance(v, + util.string_types + util.int_types + (float, ) + ) + ) + ) + ret = info_cache.get(key) + if ret is None: + ret = fn(self, con, *args, **kw) + info_cache[key] = ret + return ret + + +class Inspector(object): + """Performs database schema inspection. + + The Inspector acts as a proxy to the reflection methods of the + :class:`~sqlalchemy.engine.interfaces.Dialect`, providing a + consistent interface as well as caching support for previously + fetched metadata. + + A :class:`.Inspector` object is usually created via the + :func:`.inspect` function:: + + from sqlalchemy import inspect, create_engine + engine = create_engine('...') + insp = inspect(engine) + + The inspection method above is equivalent to using the + :meth:`.Inspector.from_engine` method, i.e.:: + + engine = create_engine('...') + insp = Inspector.from_engine(engine) + + Where above, the :class:`~sqlalchemy.engine.interfaces.Dialect` may opt + to return an :class:`.Inspector` subclass that provides additional + methods specific to the dialect's target database. + + """ + + def __init__(self, bind): + """Initialize a new :class:`.Inspector`. + + :param bind: a :class:`~sqlalchemy.engine.Connectable`, + which is typically an instance of + :class:`~sqlalchemy.engine.Engine` or + :class:`~sqlalchemy.engine.Connection`. + + For a dialect-specific instance of :class:`.Inspector`, see + :meth:`.Inspector.from_engine` + + """ + # this might not be a connection, it could be an engine. + self.bind = bind + + # set the engine + if hasattr(bind, 'engine'): + self.engine = bind.engine + else: + self.engine = bind + + if self.engine is bind: + # if engine, ensure initialized + bind.connect().close() + + self.dialect = self.engine.dialect + self.info_cache = {} + + @classmethod + def from_engine(cls, bind): + """Construct a new dialect-specific Inspector object from the given + engine or connection. + + :param bind: a :class:`~sqlalchemy.engine.Connectable`, + which is typically an instance of + :class:`~sqlalchemy.engine.Engine` or + :class:`~sqlalchemy.engine.Connection`. + + This method differs from direct a direct constructor call of + :class:`.Inspector` in that the + :class:`~sqlalchemy.engine.interfaces.Dialect` is given a chance to + provide a dialect-specific :class:`.Inspector` instance, which may + provide additional methods. + + See the example at :class:`.Inspector`. + + """ + if hasattr(bind.dialect, 'inspector'): + return bind.dialect.inspector(bind) + return Inspector(bind) + + @inspection._inspects(Connectable) + def _insp(bind): + return Inspector.from_engine(bind) + + @property + def default_schema_name(self): + """Return the default schema name presented by the dialect + for the current engine's database user. + + E.g. this is typically ``public`` for Postgresql and ``dbo`` + for SQL Server. + + """ + return self.dialect.default_schema_name + + def get_schema_names(self): + """Return all schema names. + """ + + if hasattr(self.dialect, 'get_schema_names'): + return self.dialect.get_schema_names(self.bind, + info_cache=self.info_cache) + return [] + + def get_table_names(self, schema=None, order_by=None): + """Return all table names in referred to within a particular schema. + + The names are expected to be real tables only, not views. + Views are instead returned using the :meth:`.Inspector.get_view_names` + method. + + + :param schema: Schema name. If ``schema`` is left at ``None``, the + database's default schema is + used, else the named schema is searched. If the database does not + support named schemas, behavior is undefined if ``schema`` is not + passed as ``None``. For special quoting, use :class:`.quoted_name`. + + :param order_by: Optional, may be the string "foreign_key" to sort + the result on foreign key dependencies. Does not automatically + resolve cycles, and will raise :class:`.CircularDependencyError` + if cycles exist. + + .. deprecated:: 1.0.0 - see + :meth:`.Inspector.get_sorted_table_and_fkc_names` for a version + of this which resolves foreign key cycles between tables + automatically. + + .. versionchanged:: 0.8 the "foreign_key" sorting sorts tables + in order of dependee to dependent; that is, in creation + order, rather than in drop order. This is to maintain + consistency with similar features such as + :attr:`.MetaData.sorted_tables` and :func:`.util.sort_tables`. + + .. seealso:: + + :meth:`.Inspector.get_sorted_table_and_fkc_names` + + :attr:`.MetaData.sorted_tables` + + """ + + if hasattr(self.dialect, 'get_table_names'): + tnames = self.dialect.get_table_names( + self.bind, schema, info_cache=self.info_cache) + else: + tnames = self.engine.table_names(schema) + if order_by == 'foreign_key': + tuples = [] + for tname in tnames: + for fkey in self.get_foreign_keys(tname, schema): + if tname != fkey['referred_table']: + tuples.append((fkey['referred_table'], tname)) + tnames = list(topological.sort(tuples, tnames)) + return tnames + + def get_sorted_table_and_fkc_names(self, schema=None): + """Return dependency-sorted table and foreign key constraint names in + referred to within a particular schema. + + This will yield 2-tuples of + ``(tablename, [(tname, fkname), (tname, fkname), ...])`` + consisting of table names in CREATE order grouped with the foreign key + constraint names that are not detected as belonging to a cycle. + The final element + will be ``(None, [(tname, fkname), (tname, fkname), ..])`` + which will consist of remaining + foreign key constraint names that would require a separate CREATE + step after-the-fact, based on dependencies between tables. + + .. versionadded:: 1.0.- + + .. seealso:: + + :meth:`.Inspector.get_table_names` + + :func:`.sort_tables_and_constraints` - similar method which works + with an already-given :class:`.MetaData`. + + """ + if hasattr(self.dialect, 'get_table_names'): + tnames = self.dialect.get_table_names( + self.bind, schema, info_cache=self.info_cache) + else: + tnames = self.engine.table_names(schema) + + tuples = set() + remaining_fkcs = set() + + fknames_for_table = {} + for tname in tnames: + fkeys = self.get_foreign_keys(tname, schema) + fknames_for_table[tname] = set( + [fk['name'] for fk in fkeys] + ) + for fkey in fkeys: + if tname != fkey['referred_table']: + tuples.add((fkey['referred_table'], tname)) + try: + candidate_sort = list(topological.sort(tuples, tnames)) + except exc.CircularDependencyError as err: + for edge in err.edges: + tuples.remove(edge) + remaining_fkcs.update( + (edge[1], fkc) + for fkc in fknames_for_table[edge[1]] + ) + + candidate_sort = list(topological.sort(tuples, tnames)) + return [ + (tname, fknames_for_table[tname].difference(remaining_fkcs)) + for tname in candidate_sort + ] + [(None, list(remaining_fkcs))] + + def get_temp_table_names(self): + """return a list of temporary table names for the current bind. + + This method is unsupported by most dialects; currently + only SQLite implements it. + + .. versionadded:: 1.0.0 + + """ + return self.dialect.get_temp_table_names( + self.bind, info_cache=self.info_cache) + + def get_temp_view_names(self): + """return a list of temporary view names for the current bind. + + This method is unsupported by most dialects; currently + only SQLite implements it. + + .. versionadded:: 1.0.0 + + """ + return self.dialect.get_temp_view_names( + self.bind, info_cache=self.info_cache) + + def get_table_options(self, table_name, schema=None, **kw): + """Return a dictionary of options specified when the table of the + given name was created. + + This currently includes some options that apply to MySQL tables. + + :param table_name: string name of the table. For special quoting, + use :class:`.quoted_name`. + + :param schema: string schema name; if omitted, uses the default schema + of the database connection. For special quoting, + use :class:`.quoted_name`. + + """ + if hasattr(self.dialect, 'get_table_options'): + return self.dialect.get_table_options( + self.bind, table_name, schema, + info_cache=self.info_cache, **kw) + return {} + + def get_view_names(self, schema=None): + """Return all view names in `schema`. + + :param schema: Optional, retrieve names from a non-default schema. + For special quoting, use :class:`.quoted_name`. + + """ + + return self.dialect.get_view_names(self.bind, schema, + info_cache=self.info_cache) + + def get_view_definition(self, view_name, schema=None): + """Return definition for `view_name`. + + :param schema: Optional, retrieve names from a non-default schema. + For special quoting, use :class:`.quoted_name`. + + """ + + return self.dialect.get_view_definition( + self.bind, view_name, schema, info_cache=self.info_cache) + + def get_columns(self, table_name, schema=None, **kw): + """Return information about columns in `table_name`. + + Given a string `table_name` and an optional string `schema`, return + column information as a list of dicts with these keys: + + name + the column's name + + type + :class:`~sqlalchemy.types.TypeEngine` + + nullable + boolean + + default + the column's default value + + attrs + dict containing optional column attributes + + :param table_name: string name of the table. For special quoting, + use :class:`.quoted_name`. + + :param schema: string schema name; if omitted, uses the default schema + of the database connection. For special quoting, + use :class:`.quoted_name`. + + """ + + col_defs = self.dialect.get_columns(self.bind, table_name, schema, + info_cache=self.info_cache, + **kw) + for col_def in col_defs: + # make this easy and only return instances for coltype + coltype = col_def['type'] + if not isinstance(coltype, TypeEngine): + col_def['type'] = coltype() + return col_defs + + @deprecated('0.7', 'Call to deprecated method get_primary_keys.' + ' Use get_pk_constraint instead.') + def get_primary_keys(self, table_name, schema=None, **kw): + """Return information about primary keys in `table_name`. + + Given a string `table_name`, and an optional string `schema`, return + primary key information as a list of column names. + """ + + return self.dialect.get_pk_constraint(self.bind, table_name, schema, + info_cache=self.info_cache, + **kw)['constrained_columns'] + + def get_pk_constraint(self, table_name, schema=None, **kw): + """Return information about primary key constraint on `table_name`. + + Given a string `table_name`, and an optional string `schema`, return + primary key information as a dictionary with these keys: + + constrained_columns + a list of column names that make up the primary key + + name + optional name of the primary key constraint. + + :param table_name: string name of the table. For special quoting, + use :class:`.quoted_name`. + + :param schema: string schema name; if omitted, uses the default schema + of the database connection. For special quoting, + use :class:`.quoted_name`. + + """ + return self.dialect.get_pk_constraint(self.bind, table_name, schema, + info_cache=self.info_cache, + **kw) + + def get_foreign_keys(self, table_name, schema=None, **kw): + """Return information about foreign_keys in `table_name`. + + Given a string `table_name`, and an optional string `schema`, return + foreign key information as a list of dicts with these keys: + + constrained_columns + a list of column names that make up the foreign key + + referred_schema + the name of the referred schema + + referred_table + the name of the referred table + + referred_columns + a list of column names in the referred table that correspond to + constrained_columns + + name + optional name of the foreign key constraint. + + :param table_name: string name of the table. For special quoting, + use :class:`.quoted_name`. + + :param schema: string schema name; if omitted, uses the default schema + of the database connection. For special quoting, + use :class:`.quoted_name`. + + """ + + return self.dialect.get_foreign_keys(self.bind, table_name, schema, + info_cache=self.info_cache, + **kw) + + def get_indexes(self, table_name, schema=None, **kw): + """Return information about indexes in `table_name`. + + Given a string `table_name` and an optional string `schema`, return + index information as a list of dicts with these keys: + + name + the index's name + + column_names + list of column names in order + + unique + boolean + + dialect_options + dict of dialect-specific index options. May not be present + for all dialects. + + .. versionadded:: 1.0.0 + + :param table_name: string name of the table. For special quoting, + use :class:`.quoted_name`. + + :param schema: string schema name; if omitted, uses the default schema + of the database connection. For special quoting, + use :class:`.quoted_name`. + + """ + + return self.dialect.get_indexes(self.bind, table_name, + schema, + info_cache=self.info_cache, **kw) + + def get_unique_constraints(self, table_name, schema=None, **kw): + """Return information about unique constraints in `table_name`. + + Given a string `table_name` and an optional string `schema`, return + unique constraint information as a list of dicts with these keys: + + name + the unique constraint's name + + column_names + list of column names in order + + :param table_name: string name of the table. For special quoting, + use :class:`.quoted_name`. + + :param schema: string schema name; if omitted, uses the default schema + of the database connection. For special quoting, + use :class:`.quoted_name`. + + .. versionadded:: 0.8.4 + + """ + + return self.dialect.get_unique_constraints( + self.bind, table_name, schema, info_cache=self.info_cache, **kw) + + def reflecttable(self, table, include_columns, exclude_columns=()): + """Given a Table object, load its internal constructs based on + introspection. + + This is the underlying method used by most dialects to produce + table reflection. Direct usage is like:: + + from sqlalchemy import create_engine, MetaData, Table + from sqlalchemy.engine import reflection + + engine = create_engine('...') + meta = MetaData() + user_table = Table('user', meta) + insp = Inspector.from_engine(engine) + insp.reflecttable(user_table, None) + + :param table: a :class:`~sqlalchemy.schema.Table` instance. + :param include_columns: a list of string column names to include + in the reflection process. If ``None``, all columns are reflected. + + """ + dialect = self.bind.dialect + + schema = table.schema + table_name = table.name + + # get table-level arguments that are specifically + # intended for reflection, e.g. oracle_resolve_synonyms. + # these are unconditionally passed to related Table + # objects + reflection_options = dict( + (k, table.dialect_kwargs.get(k)) + for k in dialect.reflection_options + if k in table.dialect_kwargs + ) + + # reflect table options, like mysql_engine + tbl_opts = self.get_table_options( + table_name, schema, **table.dialect_kwargs) + if tbl_opts: + # add additional kwargs to the Table if the dialect + # returned them + table._validate_dialect_kwargs(tbl_opts) + + if util.py2k: + if isinstance(schema, str): + schema = schema.decode(dialect.encoding) + if isinstance(table_name, str): + table_name = table_name.decode(dialect.encoding) + + found_table = False + cols_by_orig_name = {} + + for col_d in self.get_columns( + table_name, schema, **table.dialect_kwargs): + found_table = True + + self._reflect_column( + table, col_d, include_columns, + exclude_columns, cols_by_orig_name) + + if not found_table: + raise exc.NoSuchTableError(table.name) + + self._reflect_pk( + table_name, schema, table, cols_by_orig_name, exclude_columns) + + self._reflect_fk( + table_name, schema, table, cols_by_orig_name, + exclude_columns, reflection_options) + + self._reflect_indexes( + table_name, schema, table, cols_by_orig_name, + include_columns, exclude_columns, reflection_options) + + self._reflect_unique_constraints( + table_name, schema, table, cols_by_orig_name, + include_columns, exclude_columns, reflection_options) + + def _reflect_column( + self, table, col_d, include_columns, + exclude_columns, cols_by_orig_name): + + orig_name = col_d['name'] + + table.dispatch.column_reflect(self, table, col_d) + + # fetch name again as column_reflect is allowed to + # change it + name = col_d['name'] + if (include_columns and name not in include_columns) \ + or (exclude_columns and name in exclude_columns): + return + + coltype = col_d['type'] + + col_kw = dict( + (k, col_d[k]) + for k in ['nullable', 'autoincrement', 'quote', 'info', 'key'] + if k in col_d + ) + + colargs = [] + if col_d.get('default') is not None: + # the "default" value is assumed to be a literal SQL + # expression, so is wrapped in text() so that no quoting + # occurs on re-issuance. + colargs.append( + sa_schema.DefaultClause( + sql.text(col_d['default']), _reflected=True + ) + ) + + if 'sequence' in col_d: + self._reflect_col_sequence(col_d, colargs) + + cols_by_orig_name[orig_name] = col = \ + sa_schema.Column(name, coltype, *colargs, **col_kw) + + if col.key in table.primary_key: + col.primary_key = True + table.append_column(col) + + def _reflect_col_sequence(self, col_d, colargs): + if 'sequence' in col_d: + # TODO: mssql and sybase are using this. + seq = col_d['sequence'] + sequence = sa_schema.Sequence(seq['name'], 1, 1) + if 'start' in seq: + sequence.start = seq['start'] + if 'increment' in seq: + sequence.increment = seq['increment'] + colargs.append(sequence) + + def _reflect_pk( + self, table_name, schema, table, + cols_by_orig_name, exclude_columns): + pk_cons = self.get_pk_constraint( + table_name, schema, **table.dialect_kwargs) + if pk_cons: + pk_cols = [ + cols_by_orig_name[pk] + for pk in pk_cons['constrained_columns'] + if pk in cols_by_orig_name and pk not in exclude_columns + ] + + # update pk constraint name + table.primary_key.name = pk_cons.get('name') + + # tell the PKConstraint to re-initialize + # its column collection + table.primary_key._reload(pk_cols) + + def _reflect_fk( + self, table_name, schema, table, cols_by_orig_name, + exclude_columns, reflection_options): + fkeys = self.get_foreign_keys( + table_name, schema, **table.dialect_kwargs) + for fkey_d in fkeys: + conname = fkey_d['name'] + # look for columns by orig name in cols_by_orig_name, + # but support columns that are in-Python only as fallback + constrained_columns = [ + cols_by_orig_name[c].key + if c in cols_by_orig_name else c + for c in fkey_d['constrained_columns'] + ] + if exclude_columns and set(constrained_columns).intersection( + exclude_columns): + continue + referred_schema = fkey_d['referred_schema'] + referred_table = fkey_d['referred_table'] + referred_columns = fkey_d['referred_columns'] + refspec = [] + if referred_schema is not None: + sa_schema.Table(referred_table, table.metadata, + autoload=True, schema=referred_schema, + autoload_with=self.bind, + **reflection_options + ) + for column in referred_columns: + refspec.append(".".join( + [referred_schema, referred_table, column])) + else: + sa_schema.Table(referred_table, table.metadata, autoload=True, + autoload_with=self.bind, + **reflection_options + ) + for column in referred_columns: + refspec.append(".".join([referred_table, column])) + if 'options' in fkey_d: + options = fkey_d['options'] + else: + options = {} + table.append_constraint( + sa_schema.ForeignKeyConstraint(constrained_columns, refspec, + conname, link_to_name=True, + **options)) + + def _reflect_indexes( + self, table_name, schema, table, cols_by_orig_name, + include_columns, exclude_columns, reflection_options): + # Indexes + indexes = self.get_indexes(table_name, schema) + for index_d in indexes: + name = index_d['name'] + columns = index_d['column_names'] + unique = index_d['unique'] + flavor = index_d.get('type', 'index') + dialect_options = index_d.get('dialect_options', {}) + + duplicates = index_d.get('duplicates_constraint') + if include_columns and \ + not set(columns).issubset(include_columns): + util.warn( + "Omitting %s key for (%s), key covers omitted columns." % + (flavor, ', '.join(columns))) + continue + if duplicates: + continue + # look for columns by orig name in cols_by_orig_name, + # but support columns that are in-Python only as fallback + idx_cols = [] + for c in columns: + try: + idx_col = cols_by_orig_name[c] \ + if c in cols_by_orig_name else table.c[c] + except KeyError: + util.warn( + "%s key '%s' was not located in " + "columns for table '%s'" % ( + flavor, c, table_name + )) + else: + idx_cols.append(idx_col) + + sa_schema.Index( + name, *idx_cols, + **dict(list(dialect_options.items()) + [('unique', unique)]) + ) + + def _reflect_unique_constraints( + self, table_name, schema, table, cols_by_orig_name, + include_columns, exclude_columns, reflection_options): + + # Unique Constraints + try: + constraints = self.get_unique_constraints(table_name, schema) + except NotImplementedError: + # optional dialect feature + return + + for const_d in constraints: + conname = const_d['name'] + columns = const_d['column_names'] + duplicates = const_d.get('duplicates_index') + if include_columns and \ + not set(columns).issubset(include_columns): + util.warn( + "Omitting unique constraint key for (%s), " + "key covers omitted columns." % + ', '.join(columns)) + continue + if duplicates: + continue + # look for columns by orig name in cols_by_orig_name, + # but support columns that are in-Python only as fallback + constrained_cols = [] + for c in columns: + try: + constrained_col = cols_by_orig_name[c] \ + if c in cols_by_orig_name else table.c[c] + except KeyError: + util.warn( + "unique constraint key '%s' was not located in " + "columns for table '%s'" % (c, table_name)) + else: + constrained_cols.append(constrained_col) + table.append_constraint( + sa_schema.UniqueConstraint(*constrained_cols, name=conname)) diff --git a/lib/python3.4/site-packages/sqlalchemy/engine/result.py b/lib/python3.4/site-packages/sqlalchemy/engine/result.py new file mode 100644 index 0000000..689382f --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/engine/result.py @@ -0,0 +1,1273 @@ +# engine/result.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Define result set constructs including :class:`.ResultProxy` +and :class:`.RowProxy.""" + + +from .. import exc, util +from ..sql import expression, sqltypes +import collections +import operator + +# This reconstructor is necessary so that pickles with the C extension or +# without use the same Binary format. +try: + # We need a different reconstructor on the C extension so that we can + # add extra checks that fields have correctly been initialized by + # __setstate__. + from sqlalchemy.cresultproxy import safe_rowproxy_reconstructor + + # The extra function embedding is needed so that the + # reconstructor function has the same signature whether or not + # the extension is present. + def rowproxy_reconstructor(cls, state): + return safe_rowproxy_reconstructor(cls, state) +except ImportError: + def rowproxy_reconstructor(cls, state): + obj = cls.__new__(cls) + obj.__setstate__(state) + return obj + +try: + from sqlalchemy.cresultproxy import BaseRowProxy +except ImportError: + class BaseRowProxy(object): + __slots__ = ('_parent', '_row', '_processors', '_keymap') + + def __init__(self, parent, row, processors, keymap): + """RowProxy objects are constructed by ResultProxy objects.""" + + self._parent = parent + self._row = row + self._processors = processors + self._keymap = keymap + + def __reduce__(self): + return (rowproxy_reconstructor, + (self.__class__, self.__getstate__())) + + def values(self): + """Return the values represented by this RowProxy as a list.""" + return list(self) + + def __iter__(self): + for processor, value in zip(self._processors, self._row): + if processor is None: + yield value + else: + yield processor(value) + + def __len__(self): + return len(self._row) + + def __getitem__(self, key): + try: + processor, obj, index = self._keymap[key] + except KeyError: + processor, obj, index = self._parent._key_fallback(key) + except TypeError: + if isinstance(key, slice): + l = [] + for processor, value in zip(self._processors[key], + self._row[key]): + if processor is None: + l.append(value) + else: + l.append(processor(value)) + return tuple(l) + else: + raise + if index is None: + raise exc.InvalidRequestError( + "Ambiguous column name '%s' in result set! " + "try 'use_labels' option on select statement." % key) + if processor is not None: + return processor(self._row[index]) + else: + return self._row[index] + + def __getattr__(self, name): + try: + return self[name] + except KeyError as e: + raise AttributeError(e.args[0]) + + +class RowProxy(BaseRowProxy): + """Proxy values from a single cursor row. + + Mostly follows "ordered dictionary" behavior, mapping result + values to the string-based column name, the integer position of + the result in the row, as well as Column instances which can be + mapped to the original Columns that produced this result set (for + results that correspond to constructed SQL expressions). + """ + __slots__ = () + + def __contains__(self, key): + return self._parent._has_key(key) + + def __getstate__(self): + return { + '_parent': self._parent, + '_row': tuple(self) + } + + def __setstate__(self, state): + self._parent = parent = state['_parent'] + self._row = state['_row'] + self._processors = parent._processors + self._keymap = parent._keymap + + __hash__ = None + + def _op(self, other, op): + return op(tuple(self), tuple(other)) \ + if isinstance(other, RowProxy) \ + else op(tuple(self), other) + + def __lt__(self, other): + return self._op(other, operator.lt) + + def __le__(self, other): + return self._op(other, operator.le) + + def __ge__(self, other): + return self._op(other, operator.ge) + + def __gt__(self, other): + return self._op(other, operator.gt) + + def __eq__(self, other): + return self._op(other, operator.eq) + + def __ne__(self, other): + return self._op(other, operator.ne) + + def __repr__(self): + return repr(tuple(self)) + + def has_key(self, key): + """Return True if this RowProxy contains the given key.""" + + return self._parent._has_key(key) + + def items(self): + """Return a list of tuples, each tuple containing a key/value pair.""" + # TODO: no coverage here + return [(key, self[key]) for key in self.keys()] + + def keys(self): + """Return the list of keys as strings represented by this RowProxy.""" + + return self._parent.keys + + def iterkeys(self): + return iter(self._parent.keys) + + def itervalues(self): + return iter(self) + +try: + # Register RowProxy with Sequence, + # so sequence protocol is implemented + from collections import Sequence + Sequence.register(RowProxy) +except ImportError: + pass + + +class ResultMetaData(object): + """Handle cursor.description, applying additional info from an execution + context.""" + + def __init__(self, parent, metadata): + context = parent.context + dialect = context.dialect + typemap = dialect.dbapi_type_map + translate_colname = context._translate_colname + self.case_sensitive = case_sensitive = dialect.case_sensitive + + if context.result_column_struct: + result_columns, cols_are_ordered = context.result_column_struct + num_ctx_cols = len(result_columns) + else: + num_ctx_cols = None + + if num_ctx_cols and \ + cols_are_ordered and \ + num_ctx_cols == len(metadata): + # case 1 - SQL expression statement, number of columns + # in result matches number of cols in compiled. This is the + # vast majority case for SQL expression constructs. In this + # case we don't bother trying to parse or match up to + # the colnames in the result description. + raw = [ + ( + idx, + key, + name.lower() if not case_sensitive else name, + context.get_result_processor( + type_, key, metadata[idx][1] + ), + obj, + None + ) for idx, (key, name, obj, type_) + in enumerate(result_columns) + ] + self.keys = [ + elem[0] for elem in result_columns + ] + else: + # case 2 - raw string, or number of columns in result does + # not match number of cols in compiled. The raw string case + # is very common. The latter can happen + # when text() is used with only a partial typemap, or + # in the extremely unlikely cases where the compiled construct + # has a single element with multiple col expressions in it + # (e.g. has commas embedded) or there's some kind of statement + # that is adding extra columns. + # In all these cases we fall back to the "named" approach + # that SQLAlchemy has used up through 0.9. + + if num_ctx_cols: + result_map = self._create_result_map( + result_columns, case_sensitive) + + raw = [] + self.keys = [] + untranslated = None + for idx, rec in enumerate(metadata): + colname = rec[0] + coltype = rec[1] + + if dialect.description_encoding: + colname = dialect._description_decoder(colname) + + if translate_colname: + colname, untranslated = translate_colname(colname) + + if dialect.requires_name_normalize: + colname = dialect.normalize_name(colname) + + self.keys.append(colname) + if not case_sensitive: + colname = colname.lower() + + if num_ctx_cols: + try: + ctx_rec = result_map[colname] + except KeyError: + mapped_type = typemap.get(coltype, sqltypes.NULLTYPE) + obj = None + else: + obj = ctx_rec[1] + mapped_type = ctx_rec[2] + else: + mapped_type = typemap.get(coltype, sqltypes.NULLTYPE) + obj = None + processor = context.get_result_processor( + mapped_type, colname, coltype) + + raw.append( + (idx, colname, colname, processor, obj, untranslated) + ) + + # keymap indexes by integer index... + self._keymap = dict([ + (elem[0], (elem[3], elem[4], elem[0])) + for elem in raw + ]) + + # processors in key order for certain per-row + # views like __iter__ and slices + self._processors = [elem[3] for elem in raw] + + if num_ctx_cols: + # keymap by primary string... + by_key = dict([ + (elem[2], (elem[3], elem[4], elem[0])) + for elem in raw + ]) + + # if by-primary-string dictionary smaller (or bigger?!) than + # number of columns, assume we have dupes, rewrite + # dupe records with "None" for index which results in + # ambiguous column exception when accessed. + if len(by_key) != num_ctx_cols: + seen = set() + for rec in raw: + key = rec[1] + if key in seen: + by_key[key] = (None, by_key[key][1], None) + seen.add(key) + + # update keymap with secondary "object"-based keys + self._keymap.update([ + (obj_elem, by_key[elem[2]]) + for elem in raw if elem[4] + for obj_elem in elem[4] + ]) + + # update keymap with primary string names taking + # precedence + self._keymap.update(by_key) + else: + self._keymap.update([ + (elem[2], (elem[3], elem[4], elem[0])) + for elem in raw + ]) + # update keymap with "translated" names (sqlite-only thing) + if translate_colname: + self._keymap.update([ + (elem[5], self._keymap[elem[2]]) + for elem in raw if elem[5] + ]) + + @classmethod + def _create_result_map(cls, result_columns, case_sensitive=True): + d = {} + for elem in result_columns: + key, rec = elem[0], elem[1:] + if not case_sensitive: + key = key.lower() + if key in d: + # conflicting keyname, just double up the list + # of objects. this will cause an "ambiguous name" + # error if an attempt is made by the result set to + # access. + e_name, e_obj, e_type = d[key] + d[key] = e_name, e_obj + rec[1], e_type + else: + d[key] = rec + return d + + @util.pending_deprecation("0.8", "sqlite dialect uses " + "_translate_colname() now") + def _set_keymap_synonym(self, name, origname): + """Set a synonym for the given name. + + Some dialects (SQLite at the moment) may use this to + adjust the column names that are significant within a + row. + + """ + rec = (processor, obj, i) = self._keymap[origname if + self.case_sensitive + else origname.lower()] + if self._keymap.setdefault(name, rec) is not rec: + self._keymap[name] = (processor, obj, None) + + def _key_fallback(self, key, raiseerr=True): + map = self._keymap + result = None + if isinstance(key, util.string_types): + result = map.get(key if self.case_sensitive else key.lower()) + # fallback for targeting a ColumnElement to a textual expression + # this is a rare use case which only occurs when matching text() + # or colummn('name') constructs to ColumnElements, or after a + # pickle/unpickle roundtrip + elif isinstance(key, expression.ColumnElement): + if key._label and ( + key._label + if self.case_sensitive + else key._label.lower()) in map: + result = map[key._label + if self.case_sensitive + else key._label.lower()] + elif hasattr(key, 'name') and ( + key.name + if self.case_sensitive + else key.name.lower()) in map: + # match is only on name. + result = map[key.name + if self.case_sensitive + else key.name.lower()] + # search extra hard to make sure this + # isn't a column/label name overlap. + # this check isn't currently available if the row + # was unpickled. + if result is not None and \ + result[1] is not None: + for obj in result[1]: + if key._compare_name_for_result(obj): + break + else: + result = None + if result is None: + if raiseerr: + raise exc.NoSuchColumnError( + "Could not locate column in row for column '%s'" % + expression._string_or_unprintable(key)) + else: + return None + else: + map[key] = result + return result + + def _has_key(self, key): + if key in self._keymap: + return True + else: + return self._key_fallback(key, False) is not None + + def _getter(self, key): + if key in self._keymap: + processor, obj, index = self._keymap[key] + else: + ret = self._key_fallback(key, False) + if ret is None: + return None + processor, obj, index = ret + + if index is None: + raise exc.InvalidRequestError( + "Ambiguous column name '%s' in result set! " + "try 'use_labels' option on select statement." % key) + + return operator.itemgetter(index) + + def __getstate__(self): + return { + '_pickled_keymap': dict( + (key, index) + for key, (processor, obj, index) in self._keymap.items() + if isinstance(key, util.string_types + util.int_types) + ), + 'keys': self.keys, + "case_sensitive": self.case_sensitive, + } + + def __setstate__(self, state): + # the row has been processed at pickling time so we don't need any + # processor anymore + self._processors = [None for _ in range(len(state['keys']))] + self._keymap = keymap = {} + for key, index in state['_pickled_keymap'].items(): + # not preserving "obj" here, unfortunately our + # proxy comparison fails with the unpickle + keymap[key] = (None, None, index) + self.keys = state['keys'] + self.case_sensitive = state['case_sensitive'] + self._echo = False + + +class ResultProxy(object): + """Wraps a DB-API cursor object to provide easier access to row columns. + + Individual columns may be accessed by their integer position, + case-insensitive column name, or by ``schema.Column`` + object. e.g.:: + + row = fetchone() + + col1 = row[0] # access via integer position + + col2 = row['col2'] # access via name + + col3 = row[mytable.c.mycol] # access via Column object. + + ``ResultProxy`` also handles post-processing of result column + data using ``TypeEngine`` objects, which are referenced from + the originating SQL statement that produced this result set. + + """ + + _process_row = RowProxy + out_parameters = None + _can_close_connection = False + _metadata = None + _soft_closed = False + closed = False + + def __init__(self, context): + self.context = context + self.dialect = context.dialect + self.cursor = self._saved_cursor = context.cursor + self.connection = context.root_connection + self._echo = self.connection._echo and \ + context.engine._should_log_debug() + self._init_metadata() + + def _getter(self, key): + try: + getter = self._metadata._getter + except AttributeError: + return self._non_result(None) + else: + return getter(key) + + def _has_key(self, key): + try: + has_key = self._metadata._has_key + except AttributeError: + return self._non_result(None) + else: + return has_key(key) + + def _init_metadata(self): + metadata = self._cursor_description() + if metadata is not None: + if self.context.compiled and \ + 'compiled_cache' in self.context.execution_options: + if self.context.compiled._cached_metadata: + self._metadata = self.context.compiled._cached_metadata + else: + self._metadata = self.context.compiled._cached_metadata = \ + ResultMetaData(self, metadata) + else: + self._metadata = ResultMetaData(self, metadata) + if self._echo: + self.context.engine.logger.debug( + "Col %r", tuple(x[0] for x in metadata)) + + def keys(self): + """Return the current set of string keys for rows.""" + if self._metadata: + return self._metadata.keys + else: + return [] + + @util.memoized_property + def rowcount(self): + """Return the 'rowcount' for this result. + + The 'rowcount' reports the number of rows *matched* + by the WHERE criterion of an UPDATE or DELETE statement. + + .. note:: + + Notes regarding :attr:`.ResultProxy.rowcount`: + + + * This attribute returns the number of rows *matched*, + which is not necessarily the same as the number of rows + that were actually *modified* - an UPDATE statement, for example, + may have no net change on a given row if the SET values + given are the same as those present in the row already. + Such a row would be matched but not modified. + On backends that feature both styles, such as MySQL, + rowcount is configured by default to return the match + count in all cases. + + * :attr:`.ResultProxy.rowcount` is *only* useful in conjunction + with an UPDATE or DELETE statement. Contrary to what the Python + DBAPI says, it does *not* return the + number of rows available from the results of a SELECT statement + as DBAPIs cannot support this functionality when rows are + unbuffered. + + * :attr:`.ResultProxy.rowcount` may not be fully implemented by + all dialects. In particular, most DBAPIs do not support an + aggregate rowcount result from an executemany call. + The :meth:`.ResultProxy.supports_sane_rowcount` and + :meth:`.ResultProxy.supports_sane_multi_rowcount` methods + will report from the dialect if each usage is known to be + supported. + + * Statements that use RETURNING may not return a correct + rowcount. + + """ + try: + return self.context.rowcount + except Exception as e: + self.connection._handle_dbapi_exception( + e, None, None, self.cursor, self.context) + + @property + def lastrowid(self): + """return the 'lastrowid' accessor on the DBAPI cursor. + + This is a DBAPI specific method and is only functional + for those backends which support it, for statements + where it is appropriate. It's behavior is not + consistent across backends. + + Usage of this method is normally unnecessary when + using insert() expression constructs; the + :attr:`~ResultProxy.inserted_primary_key` attribute provides a + tuple of primary key values for a newly inserted row, + regardless of database backend. + + """ + try: + return self._saved_cursor.lastrowid + except Exception as e: + self.connection._handle_dbapi_exception( + e, None, None, + self._saved_cursor, self.context) + + @property + def returns_rows(self): + """True if this :class:`.ResultProxy` returns rows. + + I.e. if it is legal to call the methods + :meth:`~.ResultProxy.fetchone`, + :meth:`~.ResultProxy.fetchmany` + :meth:`~.ResultProxy.fetchall`. + + """ + return self._metadata is not None + + @property + def is_insert(self): + """True if this :class:`.ResultProxy` is the result + of a executing an expression language compiled + :func:`.expression.insert` construct. + + When True, this implies that the + :attr:`inserted_primary_key` attribute is accessible, + assuming the statement did not include + a user defined "returning" construct. + + """ + return self.context.isinsert + + def _cursor_description(self): + """May be overridden by subclasses.""" + + return self._saved_cursor.description + + def _soft_close(self, _autoclose_connection=True): + """Soft close this :class:`.ResultProxy`. + + This releases all DBAPI cursor resources, but leaves the + ResultProxy "open" from a semantic perspective, meaning the + fetchXXX() methods will continue to return empty results. + + This method is called automatically when: + + * all result rows are exhausted using the fetchXXX() methods. + * cursor.description is None. + + This method is **not public**, but is documented in order to clarify + the "autoclose" process used. + + .. versionadded:: 1.0.0 + + .. seealso:: + + :meth:`.ResultProxy.close` + + + """ + if self._soft_closed: + return + self._soft_closed = True + cursor = self.cursor + self.connection._safe_close_cursor(cursor) + if _autoclose_connection and \ + self.connection.should_close_with_result: + self.connection.close() + self.cursor = None + + def close(self): + """Close this ResultProxy. + + This closes out the underlying DBAPI cursor corresonding + to the statement execution, if one is stil present. Note that the + DBAPI cursor is automatically released when the :class:`.ResultProxy` + exhausts all available rows. :meth:`.ResultProxy.close` is generally + an optional method except in the case when discarding a + :class:`.ResultProxy` that still has additional rows pending for fetch. + + In the case of a result that is the product of + :ref:`connectionless execution `, + the underyling :class:`.Connection` object is also closed, which + :term:`releases` DBAPI connection resources. + + After this method is called, it is no longer valid to call upon + the fetch methods, which will raise a :class:`.ResourceClosedError` + on subsequent use. + + .. versionchanged:: 1.0.0 - the :meth:`.ResultProxy.close` method + has been separated out from the process that releases the underlying + DBAPI cursor resource. The "auto close" feature of the + :class:`.Connection` now performs a so-called "soft close", which + releases the underlying DBAPI cursor, but allows the + :class:`.ResultProxy` to still behave as an open-but-exhausted + result set; the actual :meth:`.ResultProxy.close` method is never + called. It is still safe to discard a :class:`.ResultProxy` + that has been fully exhausted without calling this method. + + .. seealso:: + + :ref:`connections_toplevel` + + :meth:`.ResultProxy._soft_close` + + """ + + if not self.closed: + self._soft_close() + self.closed = True + + def __iter__(self): + while True: + row = self.fetchone() + if row is None: + return + else: + yield row + + @util.memoized_property + def inserted_primary_key(self): + """Return the primary key for the row just inserted. + + The return value is a list of scalar values + corresponding to the list of primary key columns + in the target table. + + This only applies to single row :func:`.insert` + constructs which did not explicitly specify + :meth:`.Insert.returning`. + + Note that primary key columns which specify a + server_default clause, + or otherwise do not qualify as "autoincrement" + columns (see the notes at :class:`.Column`), and were + generated using the database-side default, will + appear in this list as ``None`` unless the backend + supports "returning" and the insert statement executed + with the "implicit returning" enabled. + + Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed + statement is not a compiled expression construct + or is not an insert() construct. + + """ + + if not self.context.compiled: + raise exc.InvalidRequestError( + "Statement is not a compiled " + "expression construct.") + elif not self.context.isinsert: + raise exc.InvalidRequestError( + "Statement is not an insert() " + "expression construct.") + elif self.context._is_explicit_returning: + raise exc.InvalidRequestError( + "Can't call inserted_primary_key " + "when returning() " + "is used.") + + return self.context.inserted_primary_key + + def last_updated_params(self): + """Return the collection of updated parameters from this + execution. + + Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed + statement is not a compiled expression construct + or is not an update() construct. + + """ + if not self.context.compiled: + raise exc.InvalidRequestError( + "Statement is not a compiled " + "expression construct.") + elif not self.context.isupdate: + raise exc.InvalidRequestError( + "Statement is not an update() " + "expression construct.") + elif self.context.executemany: + return self.context.compiled_parameters + else: + return self.context.compiled_parameters[0] + + def last_inserted_params(self): + """Return the collection of inserted parameters from this + execution. + + Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed + statement is not a compiled expression construct + or is not an insert() construct. + + """ + if not self.context.compiled: + raise exc.InvalidRequestError( + "Statement is not a compiled " + "expression construct.") + elif not self.context.isinsert: + raise exc.InvalidRequestError( + "Statement is not an insert() " + "expression construct.") + elif self.context.executemany: + return self.context.compiled_parameters + else: + return self.context.compiled_parameters[0] + + @property + def returned_defaults(self): + """Return the values of default columns that were fetched using + the :meth:`.ValuesBase.return_defaults` feature. + + The value is an instance of :class:`.RowProxy`, or ``None`` + if :meth:`.ValuesBase.return_defaults` was not used or if the + backend does not support RETURNING. + + .. versionadded:: 0.9.0 + + .. seealso:: + + :meth:`.ValuesBase.return_defaults` + + """ + return self.context.returned_defaults + + def lastrow_has_defaults(self): + """Return ``lastrow_has_defaults()`` from the underlying + :class:`.ExecutionContext`. + + See :class:`.ExecutionContext` for details. + + """ + + return self.context.lastrow_has_defaults() + + def postfetch_cols(self): + """Return ``postfetch_cols()`` from the underlying + :class:`.ExecutionContext`. + + See :class:`.ExecutionContext` for details. + + Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed + statement is not a compiled expression construct + or is not an insert() or update() construct. + + """ + + if not self.context.compiled: + raise exc.InvalidRequestError( + "Statement is not a compiled " + "expression construct.") + elif not self.context.isinsert and not self.context.isupdate: + raise exc.InvalidRequestError( + "Statement is not an insert() or update() " + "expression construct.") + return self.context.postfetch_cols + + def prefetch_cols(self): + """Return ``prefetch_cols()`` from the underlying + :class:`.ExecutionContext`. + + See :class:`.ExecutionContext` for details. + + Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed + statement is not a compiled expression construct + or is not an insert() or update() construct. + + """ + + if not self.context.compiled: + raise exc.InvalidRequestError( + "Statement is not a compiled " + "expression construct.") + elif not self.context.isinsert and not self.context.isupdate: + raise exc.InvalidRequestError( + "Statement is not an insert() or update() " + "expression construct.") + return self.context.prefetch_cols + + def supports_sane_rowcount(self): + """Return ``supports_sane_rowcount`` from the dialect. + + See :attr:`.ResultProxy.rowcount` for background. + + """ + + return self.dialect.supports_sane_rowcount + + def supports_sane_multi_rowcount(self): + """Return ``supports_sane_multi_rowcount`` from the dialect. + + See :attr:`.ResultProxy.rowcount` for background. + + """ + + return self.dialect.supports_sane_multi_rowcount + + def _fetchone_impl(self): + try: + return self.cursor.fetchone() + except AttributeError: + return self._non_result(None) + + def _fetchmany_impl(self, size=None): + try: + if size is None: + return self.cursor.fetchmany() + else: + return self.cursor.fetchmany(size) + except AttributeError: + return self._non_result([]) + + def _fetchall_impl(self): + try: + return self.cursor.fetchall() + except AttributeError: + return self._non_result([]) + + def _non_result(self, default): + if self._metadata is None: + raise exc.ResourceClosedError( + "This result object does not return rows. " + "It has been closed automatically.", + ) + elif self.closed: + raise exc.ResourceClosedError("This result object is closed.") + else: + return default + + def process_rows(self, rows): + process_row = self._process_row + metadata = self._metadata + keymap = metadata._keymap + processors = metadata._processors + if self._echo: + log = self.context.engine.logger.debug + l = [] + for row in rows: + log("Row %r", row) + l.append(process_row(metadata, row, processors, keymap)) + return l + else: + return [process_row(metadata, row, processors, keymap) + for row in rows] + + def fetchall(self): + """Fetch all rows, just like DB-API ``cursor.fetchall()``. + + After all rows have been exhausted, the underlying DBAPI + cursor resource is released, and the object may be safely + discarded. + + Subsequent calls to :meth:`.ResultProxy.fetchall` will return + an empty list. After the :meth:`.ResultProxy.close` method is + called, the method will raise :class:`.ResourceClosedError`. + + .. versionchanged:: 1.0.0 - Added "soft close" behavior which + allows the result to be used in an "exhausted" state prior to + calling the :meth:`.ResultProxy.close` method. + + """ + + try: + l = self.process_rows(self._fetchall_impl()) + self._soft_close() + return l + except Exception as e: + self.connection._handle_dbapi_exception( + e, None, None, + self.cursor, self.context) + + def fetchmany(self, size=None): + """Fetch many rows, just like DB-API + ``cursor.fetchmany(size=cursor.arraysize)``. + + After all rows have been exhausted, the underlying DBAPI + cursor resource is released, and the object may be safely + discarded. + + Calls to :meth:`.ResultProxy.fetchmany` after all rows have been + exhuasted will return + an empty list. After the :meth:`.ResultProxy.close` method is + called, the method will raise :class:`.ResourceClosedError`. + + .. versionchanged:: 1.0.0 - Added "soft close" behavior which + allows the result to be used in an "exhausted" state prior to + calling the :meth:`.ResultProxy.close` method. + + """ + + try: + l = self.process_rows(self._fetchmany_impl(size)) + if len(l) == 0: + self._soft_close() + return l + except Exception as e: + self.connection._handle_dbapi_exception( + e, None, None, + self.cursor, self.context) + + def fetchone(self): + """Fetch one row, just like DB-API ``cursor.fetchone()``. + + After all rows have been exhausted, the underlying DBAPI + cursor resource is released, and the object may be safely + discarded. + + Calls to :meth:`.ResultProxy.fetchone` after all rows have + been exhausted will return ``None``. + After the :meth:`.ResultProxy.close` method is + called, the method will raise :class:`.ResourceClosedError`. + + .. versionchanged:: 1.0.0 - Added "soft close" behavior which + allows the result to be used in an "exhausted" state prior to + calling the :meth:`.ResultProxy.close` method. + + """ + try: + row = self._fetchone_impl() + if row is not None: + return self.process_rows([row])[0] + else: + self._soft_close() + return None + except Exception as e: + self.connection._handle_dbapi_exception( + e, None, None, + self.cursor, self.context) + + def first(self): + """Fetch the first row and then close the result set unconditionally. + + Returns None if no row is present. + + After calling this method, the object is fully closed, + e.g. the :meth:`.ResultProxy.close` method will have been called. + + """ + if self._metadata is None: + return self._non_result(None) + + try: + row = self._fetchone_impl() + except Exception as e: + self.connection._handle_dbapi_exception( + e, None, None, + self.cursor, self.context) + + try: + if row is not None: + return self.process_rows([row])[0] + else: + return None + finally: + self.close() + + def scalar(self): + """Fetch the first column of the first row, and close the result set. + + Returns None if no row is present. + + After calling this method, the object is fully closed, + e.g. the :meth:`.ResultProxy.close` method will have been called. + + """ + row = self.first() + if row is not None: + return row[0] + else: + return None + + +class BufferedRowResultProxy(ResultProxy): + """A ResultProxy with row buffering behavior. + + ``ResultProxy`` that buffers the contents of a selection of rows + before ``fetchone()`` is called. This is to allow the results of + ``cursor.description`` to be available immediately, when + interfacing with a DB-API that requires rows to be consumed before + this information is available (currently psycopg2, when used with + server-side cursors). + + The pre-fetching behavior fetches only one row initially, and then + grows its buffer size by a fixed amount with each successive need + for additional rows up to a size of 1000. + + The size argument is configurable using the ``max_row_buffer`` + execution option:: + + with psycopg2_engine.connect() as conn: + + result = conn.execution_options( + stream_results=True, max_row_buffer=50 + ).execute("select * from table") + + .. versionadded:: 1.0.6 Added the ``max_row_buffer`` option. + + .. seealso:: + + :ref:`psycopg2_execution_options` + """ + + def _init_metadata(self): + self._max_row_buffer = self.context.execution_options.get( + 'max_row_buffer', None) + self.__buffer_rows() + super(BufferedRowResultProxy, self)._init_metadata() + + # this is a "growth chart" for the buffering of rows. + # each successive __buffer_rows call will use the next + # value in the list for the buffer size until the max + # is reached + size_growth = { + 1: 5, + 5: 10, + 10: 20, + 20: 50, + 50: 100, + 100: 250, + 250: 500, + 500: 1000 + } + + def __buffer_rows(self): + if self.cursor is None: + return + size = getattr(self, '_bufsize', 1) + self.__rowbuffer = collections.deque(self.cursor.fetchmany(size)) + self._bufsize = self.size_growth.get(size, size) + if self._max_row_buffer is not None: + self._bufsize = min(self._max_row_buffer, self._bufsize) + + def _soft_close(self, **kw): + self.__rowbuffer.clear() + super(BufferedRowResultProxy, self)._soft_close(**kw) + + def _fetchone_impl(self): + if self.cursor is None: + return self._non_result(None) + if not self.__rowbuffer: + self.__buffer_rows() + if not self.__rowbuffer: + return None + return self.__rowbuffer.popleft() + + def _fetchmany_impl(self, size=None): + if size is None: + return self._fetchall_impl() + result = [] + for x in range(0, size): + row = self._fetchone_impl() + if row is None: + break + result.append(row) + return result + + def _fetchall_impl(self): + if self.cursor is None: + return self._non_result([]) + self.__rowbuffer.extend(self.cursor.fetchall()) + ret = self.__rowbuffer + self.__rowbuffer = collections.deque() + return ret + + +class FullyBufferedResultProxy(ResultProxy): + """A result proxy that buffers rows fully upon creation. + + Used for operations where a result is to be delivered + after the database conversation can not be continued, + such as MSSQL INSERT...OUTPUT after an autocommit. + + """ + + def _init_metadata(self): + super(FullyBufferedResultProxy, self)._init_metadata() + self.__rowbuffer = self._buffer_rows() + + def _buffer_rows(self): + return collections.deque(self.cursor.fetchall()) + + def _soft_close(self, **kw): + self.__rowbuffer.clear() + super(FullyBufferedResultProxy, self)._soft_close(**kw) + + def _fetchone_impl(self): + if self.__rowbuffer: + return self.__rowbuffer.popleft() + else: + return self._non_result(None) + + def _fetchmany_impl(self, size=None): + if size is None: + return self._fetchall_impl() + result = [] + for x in range(0, size): + row = self._fetchone_impl() + if row is None: + break + result.append(row) + return result + + def _fetchall_impl(self): + if not self.cursor: + return self._non_result([]) + ret = self.__rowbuffer + self.__rowbuffer = collections.deque() + return ret + + +class BufferedColumnRow(RowProxy): + def __init__(self, parent, row, processors, keymap): + # preprocess row + row = list(row) + # this is a tad faster than using enumerate + index = 0 + for processor in parent._orig_processors: + if processor is not None: + row[index] = processor(row[index]) + index += 1 + row = tuple(row) + super(BufferedColumnRow, self).__init__(parent, row, + processors, keymap) + + +class BufferedColumnResultProxy(ResultProxy): + """A ResultProxy with column buffering behavior. + + ``ResultProxy`` that loads all columns into memory each time + fetchone() is called. If fetchmany() or fetchall() are called, + the full grid of results is fetched. This is to operate with + databases where result rows contain "live" results that fall out + of scope unless explicitly fetched. Currently this includes + cx_Oracle LOB objects. + + """ + + _process_row = BufferedColumnRow + + def _init_metadata(self): + super(BufferedColumnResultProxy, self)._init_metadata() + metadata = self._metadata + # orig_processors will be used to preprocess each row when they are + # constructed. + metadata._orig_processors = metadata._processors + # replace the all type processors by None processors. + metadata._processors = [None for _ in range(len(metadata.keys))] + keymap = {} + for k, (func, obj, index) in metadata._keymap.items(): + keymap[k] = (None, obj, index) + self._metadata._keymap = keymap + + def fetchall(self): + # can't call cursor.fetchall(), since rows must be + # fully processed before requesting more from the DBAPI. + l = [] + while True: + row = self.fetchone() + if row is None: + break + l.append(row) + return l + + def fetchmany(self, size=None): + # can't call cursor.fetchmany(), since rows must be + # fully processed before requesting more from the DBAPI. + if size is None: + return self.fetchall() + l = [] + for i in range(size): + row = self.fetchone() + if row is None: + break + l.append(row) + return l diff --git a/lib/python3.4/site-packages/sqlalchemy/engine/strategies.py b/lib/python3.4/site-packages/sqlalchemy/engine/strategies.py new file mode 100644 index 0000000..2a018f8 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/engine/strategies.py @@ -0,0 +1,262 @@ +# engine/strategies.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Strategies for creating new instances of Engine types. + +These are semi-private implementation classes which provide the +underlying behavior for the "strategy" keyword argument available on +:func:`~sqlalchemy.engine.create_engine`. Current available options are +``plain``, ``threadlocal``, and ``mock``. + +New strategies can be added via new ``EngineStrategy`` classes. +""" + +from operator import attrgetter + +from sqlalchemy.engine import base, threadlocal, url +from sqlalchemy import util, exc, event +from sqlalchemy import pool as poollib + +strategies = {} + + +class EngineStrategy(object): + """An adaptor that processes input arguments and produces an Engine. + + Provides a ``create`` method that receives input arguments and + produces an instance of base.Engine or a subclass. + + """ + + def __init__(self): + strategies[self.name] = self + + def create(self, *args, **kwargs): + """Given arguments, returns a new Engine instance.""" + + raise NotImplementedError() + + +class DefaultEngineStrategy(EngineStrategy): + """Base class for built-in strategies.""" + + def create(self, name_or_url, **kwargs): + # create url.URL object + u = url.make_url(name_or_url) + + entrypoint = u._get_entrypoint() + dialect_cls = entrypoint.get_dialect_cls(u) + + if kwargs.pop('_coerce_config', False): + def pop_kwarg(key, default=None): + value = kwargs.pop(key, default) + if key in dialect_cls.engine_config_types: + value = dialect_cls.engine_config_types[key](value) + return value + else: + pop_kwarg = kwargs.pop + + dialect_args = {} + # consume dialect arguments from kwargs + for k in util.get_cls_kwargs(dialect_cls): + if k in kwargs: + dialect_args[k] = pop_kwarg(k) + + dbapi = kwargs.pop('module', None) + if dbapi is None: + dbapi_args = {} + for k in util.get_func_kwargs(dialect_cls.dbapi): + if k in kwargs: + dbapi_args[k] = pop_kwarg(k) + dbapi = dialect_cls.dbapi(**dbapi_args) + + dialect_args['dbapi'] = dbapi + + # create dialect + dialect = dialect_cls(**dialect_args) + + # assemble connection arguments + (cargs, cparams) = dialect.create_connect_args(u) + cparams.update(pop_kwarg('connect_args', {})) + cargs = list(cargs) # allow mutability + + # look for existing pool or create + pool = pop_kwarg('pool', None) + if pool is None: + def connect(connection_record=None): + if dialect._has_events: + for fn in dialect.dispatch.do_connect: + connection = fn( + dialect, connection_record, cargs, cparams) + if connection is not None: + return connection + return dialect.connect(*cargs, **cparams) + + creator = pop_kwarg('creator', connect) + + poolclass = pop_kwarg('poolclass', None) + if poolclass is None: + poolclass = dialect_cls.get_pool_class(u) + pool_args = {} + + # consume pool arguments from kwargs, translating a few of + # the arguments + translate = {'logging_name': 'pool_logging_name', + 'echo': 'echo_pool', + 'timeout': 'pool_timeout', + 'recycle': 'pool_recycle', + 'events': 'pool_events', + 'use_threadlocal': 'pool_threadlocal', + 'reset_on_return': 'pool_reset_on_return'} + for k in util.get_cls_kwargs(poolclass): + tk = translate.get(k, k) + if tk in kwargs: + pool_args[k] = pop_kwarg(tk) + pool = poolclass(creator, **pool_args) + else: + if isinstance(pool, poollib._DBProxy): + pool = pool.get_pool(*cargs, **cparams) + else: + pool = pool + + # create engine. + engineclass = self.engine_cls + engine_args = {} + for k in util.get_cls_kwargs(engineclass): + if k in kwargs: + engine_args[k] = pop_kwarg(k) + + _initialize = kwargs.pop('_initialize', True) + + # all kwargs should be consumed + if kwargs: + raise TypeError( + "Invalid argument(s) %s sent to create_engine(), " + "using configuration %s/%s/%s. Please check that the " + "keyword arguments are appropriate for this combination " + "of components." % (','.join("'%s'" % k for k in kwargs), + dialect.__class__.__name__, + pool.__class__.__name__, + engineclass.__name__)) + + engine = engineclass(pool, dialect, u, **engine_args) + + if _initialize: + do_on_connect = dialect.on_connect() + if do_on_connect: + def on_connect(dbapi_connection, connection_record): + conn = getattr( + dbapi_connection, '_sqla_unwrap', dbapi_connection) + if conn is None: + return + do_on_connect(conn) + + event.listen(pool, 'first_connect', on_connect) + event.listen(pool, 'connect', on_connect) + + def first_connect(dbapi_connection, connection_record): + c = base.Connection(engine, connection=dbapi_connection, + _has_events=False) + c._execution_options = util.immutabledict() + dialect.initialize(c) + event.listen(pool, 'first_connect', first_connect, once=True) + + dialect_cls.engine_created(engine) + if entrypoint is not dialect_cls: + entrypoint.engine_created(engine) + + return engine + + +class PlainEngineStrategy(DefaultEngineStrategy): + """Strategy for configuring a regular Engine.""" + + name = 'plain' + engine_cls = base.Engine + +PlainEngineStrategy() + + +class ThreadLocalEngineStrategy(DefaultEngineStrategy): + """Strategy for configuring an Engine with threadlocal behavior.""" + + name = 'threadlocal' + engine_cls = threadlocal.TLEngine + +ThreadLocalEngineStrategy() + + +class MockEngineStrategy(EngineStrategy): + """Strategy for configuring an Engine-like object with mocked execution. + + Produces a single mock Connectable object which dispatches + statement execution to a passed-in function. + + """ + + name = 'mock' + + def create(self, name_or_url, executor, **kwargs): + # create url.URL object + u = url.make_url(name_or_url) + + dialect_cls = u.get_dialect() + + dialect_args = {} + # consume dialect arguments from kwargs + for k in util.get_cls_kwargs(dialect_cls): + if k in kwargs: + dialect_args[k] = kwargs.pop(k) + + # create dialect + dialect = dialect_cls(**dialect_args) + + return MockEngineStrategy.MockConnection(dialect, executor) + + class MockConnection(base.Connectable): + def __init__(self, dialect, execute): + self._dialect = dialect + self.execute = execute + + engine = property(lambda s: s) + dialect = property(attrgetter('_dialect')) + name = property(lambda s: s._dialect.name) + + def contextual_connect(self, **kwargs): + return self + + def execution_options(self, **kw): + return self + + def compiler(self, statement, parameters, **kwargs): + return self._dialect.compiler( + statement, parameters, engine=self, **kwargs) + + def create(self, entity, **kwargs): + kwargs['checkfirst'] = False + from sqlalchemy.engine import ddl + + ddl.SchemaGenerator( + self.dialect, self, **kwargs).traverse_single(entity) + + def drop(self, entity, **kwargs): + kwargs['checkfirst'] = False + from sqlalchemy.engine import ddl + ddl.SchemaDropper( + self.dialect, self, **kwargs).traverse_single(entity) + + def _run_visitor(self, visitorcallable, element, + connection=None, + **kwargs): + kwargs['checkfirst'] = False + visitorcallable(self.dialect, self, + **kwargs).traverse_single(element) + + def execute(self, object, *multiparams, **params): + raise NotImplementedError() + +MockEngineStrategy() diff --git a/lib/python3.4/site-packages/sqlalchemy/engine/threadlocal.py b/lib/python3.4/site-packages/sqlalchemy/engine/threadlocal.py new file mode 100644 index 0000000..505d1fa --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/engine/threadlocal.py @@ -0,0 +1,138 @@ +# engine/threadlocal.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Provides a thread-local transactional wrapper around the root Engine class. + +The ``threadlocal`` module is invoked when using the +``strategy="threadlocal"`` flag with :func:`~sqlalchemy.engine.create_engine`. +This module is semi-private and is invoked automatically when the threadlocal +engine strategy is used. +""" + +from .. import util +from . import base +import weakref + + +class TLConnection(base.Connection): + + def __init__(self, *arg, **kw): + super(TLConnection, self).__init__(*arg, **kw) + self.__opencount = 0 + + def _increment_connect(self): + self.__opencount += 1 + return self + + def close(self): + if self.__opencount == 1: + base.Connection.close(self) + self.__opencount -= 1 + + def _force_close(self): + self.__opencount = 0 + base.Connection.close(self) + + +class TLEngine(base.Engine): + """An Engine that includes support for thread-local managed + transactions. + + """ + _tl_connection_cls = TLConnection + + def __init__(self, *args, **kwargs): + super(TLEngine, self).__init__(*args, **kwargs) + self._connections = util.threading.local() + + def contextual_connect(self, **kw): + if not hasattr(self._connections, 'conn'): + connection = None + else: + connection = self._connections.conn() + + if connection is None or connection.closed: + # guards against pool-level reapers, if desired. + # or not connection.connection.is_valid: + connection = self._tl_connection_cls( + self, + self._wrap_pool_connect( + self.pool.connect, connection), + **kw) + self._connections.conn = weakref.ref(connection) + + return connection._increment_connect() + + def begin_twophase(self, xid=None): + if not hasattr(self._connections, 'trans'): + self._connections.trans = [] + self._connections.trans.append( + self.contextual_connect().begin_twophase(xid=xid)) + return self + + def begin_nested(self): + if not hasattr(self._connections, 'trans'): + self._connections.trans = [] + self._connections.trans.append( + self.contextual_connect().begin_nested()) + return self + + def begin(self): + if not hasattr(self._connections, 'trans'): + self._connections.trans = [] + self._connections.trans.append(self.contextual_connect().begin()) + return self + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + if type is None: + self.commit() + else: + self.rollback() + + def prepare(self): + if not hasattr(self._connections, 'trans') or \ + not self._connections.trans: + return + self._connections.trans[-1].prepare() + + def commit(self): + if not hasattr(self._connections, 'trans') or \ + not self._connections.trans: + return + trans = self._connections.trans.pop(-1) + trans.commit() + + def rollback(self): + if not hasattr(self._connections, 'trans') or \ + not self._connections.trans: + return + trans = self._connections.trans.pop(-1) + trans.rollback() + + def dispose(self): + self._connections = util.threading.local() + super(TLEngine, self).dispose() + + @property + def closed(self): + return not hasattr(self._connections, 'conn') or \ + self._connections.conn() is None or \ + self._connections.conn().closed + + def close(self): + if not self.closed: + self.contextual_connect().close() + connection = self._connections.conn() + connection._force_close() + del self._connections.conn + self._connections.trans = [] + + def __repr__(self): + return 'TLEngine(%s)' % str(self.url) diff --git a/lib/python3.4/site-packages/sqlalchemy/engine/url.py b/lib/python3.4/site-packages/sqlalchemy/engine/url.py new file mode 100644 index 0000000..3cc2f35 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/engine/url.py @@ -0,0 +1,253 @@ +# engine/url.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Provides the :class:`~sqlalchemy.engine.url.URL` class which encapsulates +information about a database connection specification. + +The URL object is created automatically when +:func:`~sqlalchemy.engine.create_engine` is called with a string +argument; alternatively, the URL is a public-facing construct which can +be used directly and is also accepted directly by ``create_engine()``. +""" + +import re +from .. import exc, util +from . import Dialect +from ..dialects import registry + + +class URL(object): + """ + Represent the components of a URL used to connect to a database. + + This object is suitable to be passed directly to a + :func:`~sqlalchemy.create_engine` call. The fields of the URL are parsed + from a string by the :func:`.make_url` function. the string + format of the URL is an RFC-1738-style string. + + All initialization parameters are available as public attributes. + + :param drivername: the name of the database backend. + This name will correspond to a module in sqlalchemy/databases + or a third party plug-in. + + :param username: The user name. + + :param password: database password. + + :param host: The name of the host. + + :param port: The port number. + + :param database: The database name. + + :param query: A dictionary of options to be passed to the + dialect and/or the DBAPI upon connect. + + """ + + def __init__(self, drivername, username=None, password=None, + host=None, port=None, database=None, query=None): + self.drivername = drivername + self.username = username + self.password = password + self.host = host + if port is not None: + self.port = int(port) + else: + self.port = None + self.database = database + self.query = query or {} + + def __to_string__(self, hide_password=True): + s = self.drivername + "://" + if self.username is not None: + s += _rfc_1738_quote(self.username) + if self.password is not None: + s += ':' + ('***' if hide_password + else _rfc_1738_quote(self.password)) + s += "@" + if self.host is not None: + if ':' in self.host: + s += "[%s]" % self.host + else: + s += self.host + if self.port is not None: + s += ':' + str(self.port) + if self.database is not None: + s += '/' + self.database + if self.query: + keys = list(self.query) + keys.sort() + s += '?' + "&".join("%s=%s" % (k, self.query[k]) for k in keys) + return s + + def __str__(self): + return self.__to_string__(hide_password=False) + + def __repr__(self): + return self.__to_string__() + + def __hash__(self): + return hash(str(self)) + + def __eq__(self, other): + return \ + isinstance(other, URL) and \ + self.drivername == other.drivername and \ + self.username == other.username and \ + self.password == other.password and \ + self.host == other.host and \ + self.database == other.database and \ + self.query == other.query + + def get_backend_name(self): + if '+' not in self.drivername: + return self.drivername + else: + return self.drivername.split('+')[0] + + def get_driver_name(self): + if '+' not in self.drivername: + return self.get_dialect().driver + else: + return self.drivername.split('+')[1] + + def _get_entrypoint(self): + """Return the "entry point" dialect class. + + This is normally the dialect itself except in the case when the + returned class implements the get_dialect_cls() method. + + """ + if '+' not in self.drivername: + name = self.drivername + else: + name = self.drivername.replace('+', '.') + cls = registry.load(name) + # check for legacy dialects that + # would return a module with 'dialect' as the + # actual class + if hasattr(cls, 'dialect') and \ + isinstance(cls.dialect, type) and \ + issubclass(cls.dialect, Dialect): + return cls.dialect + else: + return cls + + def get_dialect(self): + """Return the SQLAlchemy database dialect class corresponding + to this URL's driver name. + """ + entrypoint = self._get_entrypoint() + dialect_cls = entrypoint.get_dialect_cls(self) + return dialect_cls + + def translate_connect_args(self, names=[], **kw): + """Translate url attributes into a dictionary of connection arguments. + + Returns attributes of this url (`host`, `database`, `username`, + `password`, `port`) as a plain dictionary. The attribute names are + used as the keys by default. Unset or false attributes are omitted + from the final dictionary. + + :param \**kw: Optional, alternate key names for url attributes. + + :param names: Deprecated. Same purpose as the keyword-based alternate + names, but correlates the name to the original positionally. + """ + + translated = {} + attribute_names = ['host', 'database', 'username', 'password', 'port'] + for sname in attribute_names: + if names: + name = names.pop(0) + elif sname in kw: + name = kw[sname] + else: + name = sname + if name is not None and getattr(self, sname, False): + translated[name] = getattr(self, sname) + return translated + + +def make_url(name_or_url): + """Given a string or unicode instance, produce a new URL instance. + + The given string is parsed according to the RFC 1738 spec. If an + existing URL object is passed, just returns the object. + """ + + if isinstance(name_or_url, util.string_types): + return _parse_rfc1738_args(name_or_url) + else: + return name_or_url + + +def _parse_rfc1738_args(name): + pattern = re.compile(r''' + (?P[\w\+]+):// + (?: + (?P[^:/]*) + (?::(?P.*))? + @)? + (?: + (?: + \[(?P[^/]+)\] | + (?P[^/:]+) + )? + (?::(?P[^/]*))? + )? + (?:/(?P.*))? + ''', re.X) + + m = pattern.match(name) + if m is not None: + components = m.groupdict() + if components['database'] is not None: + tokens = components['database'].split('?', 2) + components['database'] = tokens[0] + query = ( + len(tokens) > 1 and dict(util.parse_qsl(tokens[1]))) or None + if util.py2k and query is not None: + query = dict((k.encode('ascii'), query[k]) for k in query) + else: + query = None + components['query'] = query + + if components['username'] is not None: + components['username'] = _rfc_1738_unquote(components['username']) + + if components['password'] is not None: + components['password'] = _rfc_1738_unquote(components['password']) + + ipv4host = components.pop('ipv4host') + ipv6host = components.pop('ipv6host') + components['host'] = ipv4host or ipv6host + name = components.pop('name') + return URL(name, **components) + else: + raise exc.ArgumentError( + "Could not parse rfc1738 URL from string '%s'" % name) + + +def _rfc_1738_quote(text): + return re.sub(r'[:@/]', lambda m: "%%%X" % ord(m.group(0)), text) + + +def _rfc_1738_unquote(text): + return util.unquote(text) + + +def _parse_keyvalue_args(name): + m = re.match(r'(\w+)://(.*)', name) + if m is not None: + (name, args) = m.group(1, 2) + opts = dict(util.parse_qsl(args)) + return URL(name, *opts) + else: + return None diff --git a/lib/python3.4/site-packages/sqlalchemy/engine/util.py b/lib/python3.4/site-packages/sqlalchemy/engine/util.py new file mode 100644 index 0000000..d28d870 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/engine/util.py @@ -0,0 +1,74 @@ +# engine/util.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from .. import util + + +def connection_memoize(key): + """Decorator, memoize a function in a connection.info stash. + + Only applicable to functions which take no arguments other than a + connection. The memo will be stored in ``connection.info[key]``. + """ + + @util.decorator + def decorated(fn, self, connection): + connection = connection.connect() + try: + return connection.info[key] + except KeyError: + connection.info[key] = val = fn(self, connection) + return val + + return decorated + + +def py_fallback(): + def _distill_params(multiparams, params): + """Given arguments from the calling form *multiparams, **params, + return a list of bind parameter structures, usually a list of + dictionaries. + + In the case of 'raw' execution which accepts positional parameters, + it may be a list of tuples or lists. + + """ + + if not multiparams: + if params: + return [params] + else: + return [] + elif len(multiparams) == 1: + zero = multiparams[0] + if isinstance(zero, (list, tuple)): + if not zero or hasattr(zero[0], '__iter__') and \ + not hasattr(zero[0], 'strip'): + # execute(stmt, [{}, {}, {}, ...]) + # execute(stmt, [(), (), (), ...]) + return zero + else: + # execute(stmt, ("value", "value")) + return [zero] + elif hasattr(zero, 'keys'): + # execute(stmt, {"key":"value"}) + return [zero] + else: + # execute(stmt, "value") + return [[zero]] + else: + if hasattr(multiparams[0], '__iter__') and \ + not hasattr(multiparams[0], 'strip'): + return multiparams + else: + return [multiparams] + + return locals() +try: + from sqlalchemy.cutils import _distill_params +except ImportError: + globals().update(py_fallback()) diff --git a/lib/python3.4/site-packages/sqlalchemy/event/__init__.py b/lib/python3.4/site-packages/sqlalchemy/event/__init__.py new file mode 100644 index 0000000..dddb924 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/event/__init__.py @@ -0,0 +1,11 @@ +# event/__init__.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from .api import CANCEL, NO_RETVAL, listen, listens_for, remove, contains +from .base import Events, dispatcher +from .attr import RefCollection +from .legacy import _legacy_signature diff --git a/lib/python3.4/site-packages/sqlalchemy/event/api.py b/lib/python3.4/site-packages/sqlalchemy/event/api.py new file mode 100644 index 0000000..0af48df --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/event/api.py @@ -0,0 +1,188 @@ +# event/api.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Public API functions for the event system. + +""" +from __future__ import absolute_import + +from .. import util, exc +from .base import _registrars +from .registry import _EventKey + +CANCEL = util.symbol('CANCEL') +NO_RETVAL = util.symbol('NO_RETVAL') + + +def _event_key(target, identifier, fn): + for evt_cls in _registrars[identifier]: + tgt = evt_cls._accept_with(target) + if tgt is not None: + return _EventKey(target, identifier, fn, tgt) + else: + raise exc.InvalidRequestError("No such event '%s' for target '%s'" % + (identifier, target)) + + +def listen(target, identifier, fn, *args, **kw): + """Register a listener function for the given target. + + e.g.:: + + from sqlalchemy import event + from sqlalchemy.schema import UniqueConstraint + + def unique_constraint_name(const, table): + const.name = "uq_%s_%s" % ( + table.name, + list(const.columns)[0].name + ) + event.listen( + UniqueConstraint, + "after_parent_attach", + unique_constraint_name) + + + A given function can also be invoked for only the first invocation + of the event using the ``once`` argument:: + + def on_config(): + do_config() + + event.listen(Mapper, "before_configure", on_config, once=True) + + .. versionadded:: 0.9.4 Added ``once=True`` to :func:`.event.listen` + and :func:`.event.listens_for`. + + .. note:: + + The :func:`.listen` function cannot be called at the same time + that the target event is being run. This has implications + for thread safety, and also means an event cannot be added + from inside the listener function for itself. The list of + events to be run are present inside of a mutable collection + that can't be changed during iteration. + + Event registration and removal is not intended to be a "high + velocity" operation; it is a configurational operation. For + systems that need to quickly associate and deassociate with + events at high scale, use a mutable structure that is handled + from inside of a single listener. + + .. versionchanged:: 1.0.0 - a ``collections.deque()`` object is now + used as the container for the list of events, which explicitly + disallows collection mutation while the collection is being + iterated. + + .. seealso:: + + :func:`.listens_for` + + :func:`.remove` + + """ + + _event_key(target, identifier, fn).listen(*args, **kw) + + +def listens_for(target, identifier, *args, **kw): + """Decorate a function as a listener for the given target + identifier. + + e.g.:: + + from sqlalchemy import event + from sqlalchemy.schema import UniqueConstraint + + @event.listens_for(UniqueConstraint, "after_parent_attach") + def unique_constraint_name(const, table): + const.name = "uq_%s_%s" % ( + table.name, + list(const.columns)[0].name + ) + + A given function can also be invoked for only the first invocation + of the event using the ``once`` argument:: + + @event.listens_for(Mapper, "before_configure", once=True) + def on_config(): + do_config() + + + .. versionadded:: 0.9.4 Added ``once=True`` to :func:`.event.listen` + and :func:`.event.listens_for`. + + .. seealso:: + + :func:`.listen` - general description of event listening + + """ + def decorate(fn): + listen(target, identifier, fn, *args, **kw) + return fn + return decorate + + +def remove(target, identifier, fn): + """Remove an event listener. + + The arguments here should match exactly those which were sent to + :func:`.listen`; all the event registration which proceeded as a result + of this call will be reverted by calling :func:`.remove` with the same + arguments. + + e.g.:: + + # if a function was registered like this... + @event.listens_for(SomeMappedClass, "before_insert", propagate=True) + def my_listener_function(*arg): + pass + + # ... it's removed like this + event.remove(SomeMappedClass, "before_insert", my_listener_function) + + Above, the listener function associated with ``SomeMappedClass`` was also + propagated to subclasses of ``SomeMappedClass``; the :func:`.remove` + function will revert all of these operations. + + .. versionadded:: 0.9.0 + + .. note:: + + The :func:`.remove` function cannot be called at the same time + that the target event is being run. This has implications + for thread safety, and also means an event cannot be removed + from inside the listener function for itself. The list of + events to be run are present inside of a mutable collection + that can't be changed during iteration. + + Event registration and removal is not intended to be a "high + velocity" operation; it is a configurational operation. For + systems that need to quickly associate and deassociate with + events at high scale, use a mutable structure that is handled + from inside of a single listener. + + .. versionchanged:: 1.0.0 - a ``collections.deque()`` object is now + used as the container for the list of events, which explicitly + disallows collection mutation while the collection is being + iterated. + + .. seealso:: + + :func:`.listen` + + """ + _event_key(target, identifier, fn).remove() + + +def contains(target, identifier, fn): + """Return True if the given target/ident/fn is set up to listen. + + .. versionadded:: 0.9.0 + + """ + + return _event_key(target, identifier, fn).contains() diff --git a/lib/python3.4/site-packages/sqlalchemy/event/attr.py b/lib/python3.4/site-packages/sqlalchemy/event/attr.py new file mode 100644 index 0000000..1494013 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/event/attr.py @@ -0,0 +1,373 @@ +# event/attr.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Attribute implementation for _Dispatch classes. + +The various listener targets for a particular event class are represented +as attributes, which refer to collections of listeners to be fired off. +These collections can exist at the class level as well as at the instance +level. An event is fired off using code like this:: + + some_object.dispatch.first_connect(arg1, arg2) + +Above, ``some_object.dispatch`` would be an instance of ``_Dispatch`` and +``first_connect`` is typically an instance of ``_ListenerCollection`` +if event listeners are present, or ``_EmptyListener`` if none are present. + +The attribute mechanics here spend effort trying to ensure listener functions +are available with a minimum of function call overhead, that unnecessary +objects aren't created (i.e. many empty per-instance listener collections), +as well as that everything is garbage collectable when owning references are +lost. Other features such as "propagation" of listener functions across +many ``_Dispatch`` instances, "joining" of multiple ``_Dispatch`` instances, +as well as support for subclass propagation (e.g. events assigned to +``Pool`` vs. ``QueuePool``) are all implemented here. + +""" + +from __future__ import absolute_import, with_statement + +from .. import util +from ..util import threading +from . import registry +from . import legacy +from itertools import chain +import weakref +import collections + + +class RefCollection(util.MemoizedSlots): + __slots__ = 'ref', + + def _memoized_attr_ref(self): + return weakref.ref(self, registry._collection_gced) + + +class _ClsLevelDispatch(RefCollection): + """Class-level events on :class:`._Dispatch` classes.""" + + __slots__ = ('name', 'arg_names', 'has_kw', + 'legacy_signatures', '_clslevel', '__weakref__') + + def __init__(self, parent_dispatch_cls, fn): + self.name = fn.__name__ + argspec = util.inspect_getargspec(fn) + self.arg_names = argspec.args[1:] + self.has_kw = bool(argspec.keywords) + self.legacy_signatures = list(reversed( + sorted( + getattr(fn, '_legacy_signatures', []), + key=lambda s: s[0] + ) + )) + fn.__doc__ = legacy._augment_fn_docs(self, parent_dispatch_cls, fn) + + self._clslevel = weakref.WeakKeyDictionary() + + def _adjust_fn_spec(self, fn, named): + if named: + fn = self._wrap_fn_for_kw(fn) + if self.legacy_signatures: + try: + argspec = util.get_callable_argspec(fn, no_self=True) + except TypeError: + pass + else: + fn = legacy._wrap_fn_for_legacy(self, fn, argspec) + return fn + + def _wrap_fn_for_kw(self, fn): + def wrap_kw(*args, **kw): + argdict = dict(zip(self.arg_names, args)) + argdict.update(kw) + return fn(**argdict) + return wrap_kw + + def insert(self, event_key, propagate): + target = event_key.dispatch_target + assert isinstance(target, type), \ + "Class-level Event targets must be classes." + stack = [target] + while stack: + cls = stack.pop(0) + stack.extend(cls.__subclasses__()) + if cls is not target and cls not in self._clslevel: + self.update_subclass(cls) + else: + if cls not in self._clslevel: + self._clslevel[cls] = collections.deque() + self._clslevel[cls].appendleft(event_key._listen_fn) + registry._stored_in_collection(event_key, self) + + def append(self, event_key, propagate): + target = event_key.dispatch_target + assert isinstance(target, type), \ + "Class-level Event targets must be classes." + + stack = [target] + while stack: + cls = stack.pop(0) + stack.extend(cls.__subclasses__()) + if cls is not target and cls not in self._clslevel: + self.update_subclass(cls) + else: + if cls not in self._clslevel: + self._clslevel[cls] = collections.deque() + self._clslevel[cls].append(event_key._listen_fn) + registry._stored_in_collection(event_key, self) + + def update_subclass(self, target): + if target not in self._clslevel: + self._clslevel[target] = collections.deque() + clslevel = self._clslevel[target] + for cls in target.__mro__[1:]: + if cls in self._clslevel: + clslevel.extend([ + fn for fn + in self._clslevel[cls] + if fn not in clslevel + ]) + + def remove(self, event_key): + target = event_key.dispatch_target + stack = [target] + while stack: + cls = stack.pop(0) + stack.extend(cls.__subclasses__()) + if cls in self._clslevel: + self._clslevel[cls].remove(event_key._listen_fn) + registry._removed_from_collection(event_key, self) + + def clear(self): + """Clear all class level listeners""" + + to_clear = set() + for dispatcher in self._clslevel.values(): + to_clear.update(dispatcher) + dispatcher.clear() + registry._clear(self, to_clear) + + def for_modify(self, obj): + """Return an event collection which can be modified. + + For _ClsLevelDispatch at the class level of + a dispatcher, this returns self. + + """ + return self + + +class _InstanceLevelDispatch(RefCollection): + __slots__ = () + + def _adjust_fn_spec(self, fn, named): + return self.parent._adjust_fn_spec(fn, named) + + +class _EmptyListener(_InstanceLevelDispatch): + """Serves as a proxy interface to the events + served by a _ClsLevelDispatch, when there are no + instance-level events present. + + Is replaced by _ListenerCollection when instance-level + events are added. + + """ + + propagate = frozenset() + listeners = () + + __slots__ = 'parent', 'parent_listeners', 'name' + + def __init__(self, parent, target_cls): + if target_cls not in parent._clslevel: + parent.update_subclass(target_cls) + self.parent = parent # _ClsLevelDispatch + self.parent_listeners = parent._clslevel[target_cls] + self.name = parent.name + + def for_modify(self, obj): + """Return an event collection which can be modified. + + For _EmptyListener at the instance level of + a dispatcher, this generates a new + _ListenerCollection, applies it to the instance, + and returns it. + + """ + result = _ListenerCollection(self.parent, obj._instance_cls) + if getattr(obj, self.name) is self: + setattr(obj, self.name, result) + else: + assert isinstance(getattr(obj, self.name), _JoinedListener) + return result + + def _needs_modify(self, *args, **kw): + raise NotImplementedError("need to call for_modify()") + + exec_once = insert = append = remove = clear = _needs_modify + + def __call__(self, *args, **kw): + """Execute this event.""" + + for fn in self.parent_listeners: + fn(*args, **kw) + + def __len__(self): + return len(self.parent_listeners) + + def __iter__(self): + return iter(self.parent_listeners) + + def __bool__(self): + return bool(self.parent_listeners) + + __nonzero__ = __bool__ + + +class _CompoundListener(_InstanceLevelDispatch): + __slots__ = '_exec_once_mutex', '_exec_once' + + def _memoized_attr__exec_once_mutex(self): + return threading.Lock() + + def exec_once(self, *args, **kw): + """Execute this event, but only if it has not been + executed already for this collection.""" + + if not self._exec_once: + with self._exec_once_mutex: + if not self._exec_once: + try: + self(*args, **kw) + finally: + self._exec_once = True + + def __call__(self, *args, **kw): + """Execute this event.""" + + for fn in self.parent_listeners: + fn(*args, **kw) + for fn in self.listeners: + fn(*args, **kw) + + def __len__(self): + return len(self.parent_listeners) + len(self.listeners) + + def __iter__(self): + return chain(self.parent_listeners, self.listeners) + + def __bool__(self): + return bool(self.listeners or self.parent_listeners) + + __nonzero__ = __bool__ + + +class _ListenerCollection(_CompoundListener): + """Instance-level attributes on instances of :class:`._Dispatch`. + + Represents a collection of listeners. + + As of 0.7.9, _ListenerCollection is only first + created via the _EmptyListener.for_modify() method. + + """ + + __slots__ = ( + 'parent_listeners', 'parent', 'name', 'listeners', + 'propagate', '__weakref__') + + def __init__(self, parent, target_cls): + if target_cls not in parent._clslevel: + parent.update_subclass(target_cls) + self._exec_once = False + self.parent_listeners = parent._clslevel[target_cls] + self.parent = parent + self.name = parent.name + self.listeners = collections.deque() + self.propagate = set() + + def for_modify(self, obj): + """Return an event collection which can be modified. + + For _ListenerCollection at the instance level of + a dispatcher, this returns self. + + """ + return self + + def _update(self, other, only_propagate=True): + """Populate from the listeners in another :class:`_Dispatch` + object.""" + + existing_listeners = self.listeners + existing_listener_set = set(existing_listeners) + self.propagate.update(other.propagate) + other_listeners = [l for l + in other.listeners + if l not in existing_listener_set + and not only_propagate or l in self.propagate + ] + + existing_listeners.extend(other_listeners) + + to_associate = other.propagate.union(other_listeners) + registry._stored_in_collection_multi(self, other, to_associate) + + def insert(self, event_key, propagate): + if event_key.prepend_to_list(self, self.listeners): + if propagate: + self.propagate.add(event_key._listen_fn) + + def append(self, event_key, propagate): + if event_key.append_to_list(self, self.listeners): + if propagate: + self.propagate.add(event_key._listen_fn) + + def remove(self, event_key): + self.listeners.remove(event_key._listen_fn) + self.propagate.discard(event_key._listen_fn) + registry._removed_from_collection(event_key, self) + + def clear(self): + registry._clear(self, self.listeners) + self.propagate.clear() + self.listeners.clear() + + +class _JoinedListener(_CompoundListener): + __slots__ = 'parent', 'name', 'local', 'parent_listeners' + + def __init__(self, parent, name, local): + self._exec_once = False + self.parent = parent + self.name = name + self.local = local + self.parent_listeners = self.local + + @property + def listeners(self): + return getattr(self.parent, self.name) + + def _adjust_fn_spec(self, fn, named): + return self.local._adjust_fn_spec(fn, named) + + def for_modify(self, obj): + self.local = self.parent_listeners = self.local.for_modify(obj) + return self + + def insert(self, event_key, propagate): + self.local.insert(event_key, propagate) + + def append(self, event_key, propagate): + self.local.append(event_key, propagate) + + def remove(self, event_key): + self.local.remove(event_key) + + def clear(self): + raise NotImplementedError() diff --git a/lib/python3.4/site-packages/sqlalchemy/event/base.py b/lib/python3.4/site-packages/sqlalchemy/event/base.py new file mode 100644 index 0000000..81ef5d8 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/event/base.py @@ -0,0 +1,289 @@ +# event/base.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Base implementation classes. + +The public-facing ``Events`` serves as the base class for an event interface; +its public attributes represent different kinds of events. These attributes +are mirrored onto a ``_Dispatch`` class, which serves as a container for +collections of listener functions. These collections are represented both +at the class level of a particular ``_Dispatch`` class as well as within +instances of ``_Dispatch``. + +""" +from __future__ import absolute_import + +import weakref + +from .. import util +from .attr import _JoinedListener, \ + _EmptyListener, _ClsLevelDispatch + +_registrars = util.defaultdict(list) + + +def _is_event_name(name): + return not name.startswith('_') and name != 'dispatch' + + +class _UnpickleDispatch(object): + """Serializable callable that re-generates an instance of + :class:`_Dispatch` given a particular :class:`.Events` subclass. + + """ + + def __call__(self, _instance_cls): + for cls in _instance_cls.__mro__: + if 'dispatch' in cls.__dict__: + return cls.__dict__['dispatch'].\ + dispatch_cls._for_class(_instance_cls) + else: + raise AttributeError("No class with a 'dispatch' member present.") + + +class _Dispatch(object): + """Mirror the event listening definitions of an Events class with + listener collections. + + Classes which define a "dispatch" member will return a + non-instantiated :class:`._Dispatch` subclass when the member + is accessed at the class level. When the "dispatch" member is + accessed at the instance level of its owner, an instance + of the :class:`._Dispatch` class is returned. + + A :class:`._Dispatch` class is generated for each :class:`.Events` + class defined, by the :func:`._create_dispatcher_class` function. + The original :class:`.Events` classes remain untouched. + This decouples the construction of :class:`.Events` subclasses from + the implementation used by the event internals, and allows + inspecting tools like Sphinx to work in an unsurprising + way against the public API. + + """ + + # in one ORM edge case, an attribute is added to _Dispatch, + # so __dict__ is used in just that case and potentially others. + __slots__ = '_parent', '_instance_cls', '__dict__', '_empty_listeners' + + _empty_listener_reg = weakref.WeakKeyDictionary() + + def __init__(self, parent, instance_cls=None): + self._parent = parent + self._instance_cls = instance_cls + if instance_cls: + try: + self._empty_listeners = self._empty_listener_reg[instance_cls] + except KeyError: + self._empty_listeners = \ + self._empty_listener_reg[instance_cls] = dict( + (ls.name, _EmptyListener(ls, instance_cls)) + for ls in parent._event_descriptors + ) + else: + self._empty_listeners = {} + + def __getattr__(self, name): + # assign EmptyListeners as attributes on demand + # to reduce startup time for new dispatch objects + try: + ls = self._empty_listeners[name] + except KeyError: + raise AttributeError(name) + else: + setattr(self, ls.name, ls) + return ls + + @property + def _event_descriptors(self): + for k in self._event_names: + yield getattr(self, k) + + def _for_class(self, instance_cls): + return self.__class__(self, instance_cls) + + def _for_instance(self, instance): + instance_cls = instance.__class__ + return self._for_class(instance_cls) + + @property + def _listen(self): + return self._events._listen + + def _join(self, other): + """Create a 'join' of this :class:`._Dispatch` and another. + + This new dispatcher will dispatch events to both + :class:`._Dispatch` objects. + + """ + if '_joined_dispatch_cls' not in self.__class__.__dict__: + cls = type( + "Joined%s" % self.__class__.__name__, + (_JoinedDispatcher, ), {'__slots__': self._event_names} + ) + + self.__class__._joined_dispatch_cls = cls + return self._joined_dispatch_cls(self, other) + + def __reduce__(self): + return _UnpickleDispatch(), (self._instance_cls, ) + + def _update(self, other, only_propagate=True): + """Populate from the listeners in another :class:`_Dispatch` + object.""" + for ls in other._event_descriptors: + if isinstance(ls, _EmptyListener): + continue + getattr(self, ls.name).\ + for_modify(self)._update(ls, only_propagate=only_propagate) + + def _clear(self): + for ls in self._event_descriptors: + ls.for_modify(self).clear() + + +class _EventMeta(type): + """Intercept new Event subclasses and create + associated _Dispatch classes.""" + + def __init__(cls, classname, bases, dict_): + _create_dispatcher_class(cls, classname, bases, dict_) + return type.__init__(cls, classname, bases, dict_) + + +def _create_dispatcher_class(cls, classname, bases, dict_): + """Create a :class:`._Dispatch` class corresponding to an + :class:`.Events` class.""" + + # there's all kinds of ways to do this, + # i.e. make a Dispatch class that shares the '_listen' method + # of the Event class, this is the straight monkeypatch. + if hasattr(cls, 'dispatch'): + dispatch_base = cls.dispatch.__class__ + else: + dispatch_base = _Dispatch + + event_names = [k for k in dict_ if _is_event_name(k)] + dispatch_cls = type("%sDispatch" % classname, + (dispatch_base, ), {'__slots__': event_names}) + + dispatch_cls._event_names = event_names + + dispatch_inst = cls._set_dispatch(cls, dispatch_cls) + for k in dispatch_cls._event_names: + setattr(dispatch_inst, k, _ClsLevelDispatch(cls, dict_[k])) + _registrars[k].append(cls) + + for super_ in dispatch_cls.__bases__: + if issubclass(super_, _Dispatch) and super_ is not _Dispatch: + for ls in super_._events.dispatch._event_descriptors: + setattr(dispatch_inst, ls.name, ls) + dispatch_cls._event_names.append(ls.name) + + if getattr(cls, '_dispatch_target', None): + cls._dispatch_target.dispatch = dispatcher(cls) + + +def _remove_dispatcher(cls): + for k in cls.dispatch._event_names: + _registrars[k].remove(cls) + if not _registrars[k]: + del _registrars[k] + + +class Events(util.with_metaclass(_EventMeta, object)): + """Define event listening functions for a particular target type.""" + + @staticmethod + def _set_dispatch(cls, dispatch_cls): + # this allows an Events subclass to define additional utility + # methods made available to the target via + # "self.dispatch._events." + # @staticemethod to allow easy "super" calls while in a metaclass + # constructor. + cls.dispatch = dispatch_cls(None) + dispatch_cls._events = cls + return cls.dispatch + + @classmethod + def _accept_with(cls, target): + # Mapper, ClassManager, Session override this to + # also accept classes, scoped_sessions, sessionmakers, etc. + if hasattr(target, 'dispatch') and ( + + isinstance(target.dispatch, cls.dispatch.__class__) or + + + ( + isinstance(target.dispatch, type) and + isinstance(target.dispatch, cls.dispatch.__class__) + ) or + + ( + isinstance(target.dispatch, _JoinedDispatcher) and + isinstance(target.dispatch.parent, cls.dispatch.__class__) + ) + + + ): + return target + else: + return None + + @classmethod + def _listen(cls, event_key, propagate=False, insert=False, named=False): + event_key.base_listen(propagate=propagate, insert=insert, named=named) + + @classmethod + def _remove(cls, event_key): + event_key.remove() + + @classmethod + def _clear(cls): + cls.dispatch._clear() + + +class _JoinedDispatcher(object): + """Represent a connection between two _Dispatch objects.""" + + __slots__ = 'local', 'parent', '_instance_cls' + + def __init__(self, local, parent): + self.local = local + self.parent = parent + self._instance_cls = self.local._instance_cls + + def __getattr__(self, name): + # assign _JoinedListeners as attributes on demand + # to reduce startup time for new dispatch objects + ls = getattr(self.local, name) + jl = _JoinedListener(self.parent, ls.name, ls) + setattr(self, ls.name, jl) + return jl + + @property + def _listen(self): + return self.parent._listen + + +class dispatcher(object): + """Descriptor used by target classes to + deliver the _Dispatch class at the class level + and produce new _Dispatch instances for target + instances. + + """ + + def __init__(self, events): + self.dispatch_cls = events.dispatch + self.events = events + + def __get__(self, obj, cls): + if obj is None: + return self.dispatch_cls + obj.__dict__['dispatch'] = disp = self.dispatch_cls._for_instance(obj) + return disp diff --git a/lib/python3.4/site-packages/sqlalchemy/event/legacy.py b/lib/python3.4/site-packages/sqlalchemy/event/legacy.py new file mode 100644 index 0000000..b359bf4 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/event/legacy.py @@ -0,0 +1,169 @@ +# event/legacy.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Routines to handle adaption of legacy call signatures, +generation of deprecation notes and docstrings. + +""" + +from .. import util + + +def _legacy_signature(since, argnames, converter=None): + def leg(fn): + if not hasattr(fn, '_legacy_signatures'): + fn._legacy_signatures = [] + fn._legacy_signatures.append((since, argnames, converter)) + return fn + return leg + + +def _wrap_fn_for_legacy(dispatch_collection, fn, argspec): + for since, argnames, conv in dispatch_collection.legacy_signatures: + if argnames[-1] == "**kw": + has_kw = True + argnames = argnames[0:-1] + else: + has_kw = False + + if len(argnames) == len(argspec.args) \ + and has_kw is bool(argspec.keywords): + + if conv: + assert not has_kw + + def wrap_leg(*args): + return fn(*conv(*args)) + else: + def wrap_leg(*args, **kw): + argdict = dict(zip(dispatch_collection.arg_names, args)) + args = [argdict[name] for name in argnames] + if has_kw: + return fn(*args, **kw) + else: + return fn(*args) + return wrap_leg + else: + return fn + + +def _indent(text, indent): + return "\n".join( + indent + line + for line in text.split("\n") + ) + + +def _standard_listen_example(dispatch_collection, sample_target, fn): + example_kw_arg = _indent( + "\n".join( + "%(arg)s = kw['%(arg)s']" % {"arg": arg} + for arg in dispatch_collection.arg_names[0:2] + ), + " ") + if dispatch_collection.legacy_signatures: + current_since = max(since for since, args, conv + in dispatch_collection.legacy_signatures) + else: + current_since = None + text = ( + "from sqlalchemy import event\n\n" + "# standard decorator style%(current_since)s\n" + "@event.listens_for(%(sample_target)s, '%(event_name)s')\n" + "def receive_%(event_name)s(" + "%(named_event_arguments)s%(has_kw_arguments)s):\n" + " \"listen for the '%(event_name)s' event\"\n" + "\n # ... (event handling logic) ...\n" + ) + + if len(dispatch_collection.arg_names) > 3: + text += ( + + "\n# named argument style (new in 0.9)\n" + "@event.listens_for(" + "%(sample_target)s, '%(event_name)s', named=True)\n" + "def receive_%(event_name)s(**kw):\n" + " \"listen for the '%(event_name)s' event\"\n" + "%(example_kw_arg)s\n" + "\n # ... (event handling logic) ...\n" + ) + + text %= { + "current_since": " (arguments as of %s)" % + current_since if current_since else "", + "event_name": fn.__name__, + "has_kw_arguments": ", **kw" if dispatch_collection.has_kw else "", + "named_event_arguments": ", ".join(dispatch_collection.arg_names), + "example_kw_arg": example_kw_arg, + "sample_target": sample_target + } + return text + + +def _legacy_listen_examples(dispatch_collection, sample_target, fn): + text = "" + for since, args, conv in dispatch_collection.legacy_signatures: + text += ( + "\n# legacy calling style (pre-%(since)s)\n" + "@event.listens_for(%(sample_target)s, '%(event_name)s')\n" + "def receive_%(event_name)s(" + "%(named_event_arguments)s%(has_kw_arguments)s):\n" + " \"listen for the '%(event_name)s' event\"\n" + "\n # ... (event handling logic) ...\n" % { + "since": since, + "event_name": fn.__name__, + "has_kw_arguments": " **kw" + if dispatch_collection.has_kw else "", + "named_event_arguments": ", ".join(args), + "sample_target": sample_target + } + ) + return text + + +def _version_signature_changes(dispatch_collection): + since, args, conv = dispatch_collection.legacy_signatures[0] + return ( + "\n.. versionchanged:: %(since)s\n" + " The ``%(event_name)s`` event now accepts the \n" + " arguments ``%(named_event_arguments)s%(has_kw_arguments)s``.\n" + " Listener functions which accept the previous argument \n" + " signature(s) listed above will be automatically \n" + " adapted to the new signature." % { + "since": since, + "event_name": dispatch_collection.name, + "named_event_arguments": ", ".join(dispatch_collection.arg_names), + "has_kw_arguments": ", **kw" if dispatch_collection.has_kw else "" + } + ) + + +def _augment_fn_docs(dispatch_collection, parent_dispatch_cls, fn): + header = ".. container:: event_signatures\n\n"\ + " Example argument forms::\n"\ + "\n" + + sample_target = getattr(parent_dispatch_cls, "_target_class_doc", "obj") + text = ( + header + + _indent( + _standard_listen_example( + dispatch_collection, sample_target, fn), + " " * 8) + ) + if dispatch_collection.legacy_signatures: + text += _indent( + _legacy_listen_examples( + dispatch_collection, sample_target, fn), + " " * 8) + + text += _version_signature_changes(dispatch_collection) + + return util.inject_docstring_text(fn.__doc__, + text, + 1 + ) diff --git a/lib/python3.4/site-packages/sqlalchemy/event/registry.py b/lib/python3.4/site-packages/sqlalchemy/event/registry.py new file mode 100644 index 0000000..e1e9262 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/event/registry.py @@ -0,0 +1,262 @@ +# event/registry.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Provides managed registration services on behalf of :func:`.listen` +arguments. + +By "managed registration", we mean that event listening functions and +other objects can be added to various collections in such a way that their +membership in all those collections can be revoked at once, based on +an equivalent :class:`._EventKey`. + +""" + +from __future__ import absolute_import + +import weakref +import collections +import types +from .. import exc, util + + +_key_to_collection = collections.defaultdict(dict) +""" +Given an original listen() argument, can locate all +listener collections and the listener fn contained + +(target, identifier, fn) -> { + ref(listenercollection) -> ref(listener_fn) + ref(listenercollection) -> ref(listener_fn) + ref(listenercollection) -> ref(listener_fn) + } +""" + +_collection_to_key = collections.defaultdict(dict) +""" +Given a _ListenerCollection or _ClsLevelListener, can locate +all the original listen() arguments and the listener fn contained + +ref(listenercollection) -> { + ref(listener_fn) -> (target, identifier, fn), + ref(listener_fn) -> (target, identifier, fn), + ref(listener_fn) -> (target, identifier, fn), + } +""" + + +def _collection_gced(ref): + # defaultdict, so can't get a KeyError + if not _collection_to_key or ref not in _collection_to_key: + return + listener_to_key = _collection_to_key.pop(ref) + for key in listener_to_key.values(): + if key in _key_to_collection: + # defaultdict, so can't get a KeyError + dispatch_reg = _key_to_collection[key] + dispatch_reg.pop(ref) + if not dispatch_reg: + _key_to_collection.pop(key) + + +def _stored_in_collection(event_key, owner): + key = event_key._key + + dispatch_reg = _key_to_collection[key] + + owner_ref = owner.ref + listen_ref = weakref.ref(event_key._listen_fn) + + if owner_ref in dispatch_reg: + return False + + dispatch_reg[owner_ref] = listen_ref + + listener_to_key = _collection_to_key[owner_ref] + listener_to_key[listen_ref] = key + + return True + + +def _removed_from_collection(event_key, owner): + key = event_key._key + + dispatch_reg = _key_to_collection[key] + + listen_ref = weakref.ref(event_key._listen_fn) + + owner_ref = owner.ref + dispatch_reg.pop(owner_ref, None) + if not dispatch_reg: + del _key_to_collection[key] + + if owner_ref in _collection_to_key: + listener_to_key = _collection_to_key[owner_ref] + listener_to_key.pop(listen_ref) + + +def _stored_in_collection_multi(newowner, oldowner, elements): + if not elements: + return + + oldowner = oldowner.ref + newowner = newowner.ref + + old_listener_to_key = _collection_to_key[oldowner] + new_listener_to_key = _collection_to_key[newowner] + + for listen_fn in elements: + listen_ref = weakref.ref(listen_fn) + key = old_listener_to_key[listen_ref] + dispatch_reg = _key_to_collection[key] + if newowner in dispatch_reg: + assert dispatch_reg[newowner] == listen_ref + else: + dispatch_reg[newowner] = listen_ref + + new_listener_to_key[listen_ref] = key + + +def _clear(owner, elements): + if not elements: + return + + owner = owner.ref + listener_to_key = _collection_to_key[owner] + for listen_fn in elements: + listen_ref = weakref.ref(listen_fn) + key = listener_to_key[listen_ref] + dispatch_reg = _key_to_collection[key] + dispatch_reg.pop(owner, None) + + if not dispatch_reg: + del _key_to_collection[key] + + +class _EventKey(object): + """Represent :func:`.listen` arguments. + """ + + __slots__ = ( + 'target', 'identifier', 'fn', 'fn_key', 'fn_wrap', 'dispatch_target' + ) + + def __init__(self, target, identifier, + fn, dispatch_target, _fn_wrap=None): + self.target = target + self.identifier = identifier + self.fn = fn + if isinstance(fn, types.MethodType): + self.fn_key = id(fn.__func__), id(fn.__self__) + else: + self.fn_key = id(fn) + self.fn_wrap = _fn_wrap + self.dispatch_target = dispatch_target + + @property + def _key(self): + return (id(self.target), self.identifier, self.fn_key) + + def with_wrapper(self, fn_wrap): + if fn_wrap is self._listen_fn: + return self + else: + return _EventKey( + self.target, + self.identifier, + self.fn, + self.dispatch_target, + _fn_wrap=fn_wrap + ) + + def with_dispatch_target(self, dispatch_target): + if dispatch_target is self.dispatch_target: + return self + else: + return _EventKey( + self.target, + self.identifier, + self.fn, + dispatch_target, + _fn_wrap=self.fn_wrap + ) + + def listen(self, *args, **kw): + once = kw.pop("once", False) + named = kw.pop("named", False) + + target, identifier, fn = \ + self.dispatch_target, self.identifier, self._listen_fn + + dispatch_collection = getattr(target.dispatch, identifier) + + adjusted_fn = dispatch_collection._adjust_fn_spec(fn, named) + + self = self.with_wrapper(adjusted_fn) + + if once: + self.with_wrapper( + util.only_once(self._listen_fn)).listen(*args, **kw) + else: + self.dispatch_target.dispatch._listen(self, *args, **kw) + + def remove(self): + key = self._key + + if key not in _key_to_collection: + raise exc.InvalidRequestError( + "No listeners found for event %s / %r / %s " % + (self.target, self.identifier, self.fn) + ) + dispatch_reg = _key_to_collection.pop(key) + + for collection_ref, listener_ref in dispatch_reg.items(): + collection = collection_ref() + listener_fn = listener_ref() + if collection is not None and listener_fn is not None: + collection.remove(self.with_wrapper(listener_fn)) + + def contains(self): + """Return True if this event key is registered to listen. + """ + return self._key in _key_to_collection + + def base_listen(self, propagate=False, insert=False, + named=False): + + target, identifier, fn = \ + self.dispatch_target, self.identifier, self._listen_fn + + dispatch_collection = getattr(target.dispatch, identifier) + + if insert: + dispatch_collection.\ + for_modify(target.dispatch).insert(self, propagate) + else: + dispatch_collection.\ + for_modify(target.dispatch).append(self, propagate) + + @property + def _listen_fn(self): + return self.fn_wrap or self.fn + + def append_to_list(self, owner, list_): + if _stored_in_collection(self, owner): + list_.append(self._listen_fn) + return True + else: + return False + + def remove_from_list(self, owner, list_): + _removed_from_collection(self, owner) + list_.remove(self._listen_fn) + + def prepend_to_list(self, owner, list_): + if _stored_in_collection(self, owner): + list_.appendleft(self._listen_fn) + return True + else: + return False diff --git a/lib/python3.4/site-packages/sqlalchemy/events.py b/lib/python3.4/site-packages/sqlalchemy/events.py new file mode 100644 index 0000000..1abef26 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/events.py @@ -0,0 +1,1101 @@ +# sqlalchemy/events.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Core event interfaces.""" + +from . import event, exc +from .pool import Pool +from .engine import Connectable, Engine, Dialect +from .sql.base import SchemaEventTarget + + +class DDLEvents(event.Events): + """ + Define event listeners for schema objects, + that is, :class:`.SchemaItem` and other :class:`.SchemaEventTarget` + subclasses, including :class:`.MetaData`, :class:`.Table`, + :class:`.Column`. + + :class:`.MetaData` and :class:`.Table` support events + specifically regarding when CREATE and DROP + DDL is emitted to the database. + + Attachment events are also provided to customize + behavior whenever a child schema element is associated + with a parent, such as, when a :class:`.Column` is associated + with its :class:`.Table`, when a :class:`.ForeignKeyConstraint` + is associated with a :class:`.Table`, etc. + + Example using the ``after_create`` event:: + + from sqlalchemy import event + from sqlalchemy import Table, Column, Metadata, Integer + + m = MetaData() + some_table = Table('some_table', m, Column('data', Integer)) + + def after_create(target, connection, **kw): + connection.execute("ALTER TABLE %s SET name=foo_%s" % + (target.name, target.name)) + + event.listen(some_table, "after_create", after_create) + + DDL events integrate closely with the + :class:`.DDL` class and the :class:`.DDLElement` hierarchy + of DDL clause constructs, which are themselves appropriate + as listener callables:: + + from sqlalchemy import DDL + event.listen( + some_table, + "after_create", + DDL("ALTER TABLE %(table)s SET name=foo_%(table)s") + ) + + The methods here define the name of an event as well + as the names of members that are passed to listener + functions. + + See also: + + :ref:`event_toplevel` + + :class:`.DDLElement` + + :class:`.DDL` + + :ref:`schema_ddl_sequences` + + """ + + _target_class_doc = "SomeSchemaClassOrObject" + _dispatch_target = SchemaEventTarget + + def before_create(self, target, connection, **kw): + """Called before CREATE statements are emitted. + + :param target: the :class:`.MetaData` or :class:`.Table` + object which is the target of the event. + :param connection: the :class:`.Connection` where the + CREATE statement or statements will be emitted. + :param \**kw: additional keyword arguments relevant + to the event. The contents of this dictionary + may vary across releases, and include the + list of tables being generated for a metadata-level + event, the checkfirst flag, and other + elements used by internal events. + + """ + + def after_create(self, target, connection, **kw): + """Called after CREATE statements are emitted. + + :param target: the :class:`.MetaData` or :class:`.Table` + object which is the target of the event. + :param connection: the :class:`.Connection` where the + CREATE statement or statements have been emitted. + :param \**kw: additional keyword arguments relevant + to the event. The contents of this dictionary + may vary across releases, and include the + list of tables being generated for a metadata-level + event, the checkfirst flag, and other + elements used by internal events. + + """ + + def before_drop(self, target, connection, **kw): + """Called before DROP statements are emitted. + + :param target: the :class:`.MetaData` or :class:`.Table` + object which is the target of the event. + :param connection: the :class:`.Connection` where the + DROP statement or statements will be emitted. + :param \**kw: additional keyword arguments relevant + to the event. The contents of this dictionary + may vary across releases, and include the + list of tables being generated for a metadata-level + event, the checkfirst flag, and other + elements used by internal events. + + """ + + def after_drop(self, target, connection, **kw): + """Called after DROP statements are emitted. + + :param target: the :class:`.MetaData` or :class:`.Table` + object which is the target of the event. + :param connection: the :class:`.Connection` where the + DROP statement or statements have been emitted. + :param \**kw: additional keyword arguments relevant + to the event. The contents of this dictionary + may vary across releases, and include the + list of tables being generated for a metadata-level + event, the checkfirst flag, and other + elements used by internal events. + + """ + + def before_parent_attach(self, target, parent): + """Called before a :class:`.SchemaItem` is associated with + a parent :class:`.SchemaItem`. + + :param target: the target object + :param parent: the parent to which the target is being attached. + + :func:`.event.listen` also accepts a modifier for this event: + + :param propagate=False: When True, the listener function will + be established for any copies made of the target object, + i.e. those copies that are generated when + :meth:`.Table.tometadata` is used. + + """ + + def after_parent_attach(self, target, parent): + """Called after a :class:`.SchemaItem` is associated with + a parent :class:`.SchemaItem`. + + :param target: the target object + :param parent: the parent to which the target is being attached. + + :func:`.event.listen` also accepts a modifier for this event: + + :param propagate=False: When True, the listener function will + be established for any copies made of the target object, + i.e. those copies that are generated when + :meth:`.Table.tometadata` is used. + + """ + + def column_reflect(self, inspector, table, column_info): + """Called for each unit of 'column info' retrieved when + a :class:`.Table` is being reflected. + + The dictionary of column information as returned by the + dialect is passed, and can be modified. The dictionary + is that returned in each element of the list returned + by :meth:`.reflection.Inspector.get_columns`. + + The event is called before any action is taken against + this dictionary, and the contents can be modified. + The :class:`.Column` specific arguments ``info``, ``key``, + and ``quote`` can also be added to the dictionary and + will be passed to the constructor of :class:`.Column`. + + Note that this event is only meaningful if either + associated with the :class:`.Table` class across the + board, e.g.:: + + from sqlalchemy.schema import Table + from sqlalchemy import event + + def listen_for_reflect(inspector, table, column_info): + "receive a column_reflect event" + # ... + + event.listen( + Table, + 'column_reflect', + listen_for_reflect) + + ...or with a specific :class:`.Table` instance using + the ``listeners`` argument:: + + def listen_for_reflect(inspector, table, column_info): + "receive a column_reflect event" + # ... + + t = Table( + 'sometable', + autoload=True, + listeners=[ + ('column_reflect', listen_for_reflect) + ]) + + This because the reflection process initiated by ``autoload=True`` + completes within the scope of the constructor for :class:`.Table`. + + """ + + +class PoolEvents(event.Events): + """Available events for :class:`.Pool`. + + The methods here define the name of an event as well + as the names of members that are passed to listener + functions. + + e.g.:: + + from sqlalchemy import event + + def my_on_checkout(dbapi_conn, connection_rec, connection_proxy): + "handle an on checkout event" + + event.listen(Pool, 'checkout', my_on_checkout) + + In addition to accepting the :class:`.Pool` class and + :class:`.Pool` instances, :class:`.PoolEvents` also accepts + :class:`.Engine` objects and the :class:`.Engine` class as + targets, which will be resolved to the ``.pool`` attribute of the + given engine or the :class:`.Pool` class:: + + engine = create_engine("postgresql://scott:tiger@localhost/test") + + # will associate with engine.pool + event.listen(engine, 'checkout', my_on_checkout) + + """ + + _target_class_doc = "SomeEngineOrPool" + _dispatch_target = Pool + + @classmethod + def _accept_with(cls, target): + if isinstance(target, type): + if issubclass(target, Engine): + return Pool + elif issubclass(target, Pool): + return target + elif isinstance(target, Engine): + return target.pool + else: + return target + + def connect(self, dbapi_connection, connection_record): + """Called at the moment a particular DBAPI connection is first + created for a given :class:`.Pool`. + + This event allows one to capture the point directly after which + the DBAPI module-level ``.connect()`` method has been used in order + to produce a new DBAPI connection. + + :param dbapi_connection: a DBAPI connection. + + :param connection_record: the :class:`._ConnectionRecord` managing the + DBAPI connection. + + """ + + def first_connect(self, dbapi_connection, connection_record): + """Called exactly once for the first time a DBAPI connection is + checked out from a particular :class:`.Pool`. + + The rationale for :meth:`.PoolEvents.first_connect` is to determine + information about a particular series of database connections based + on the settings used for all connections. Since a particular + :class:`.Pool` refers to a single "creator" function (which in terms + of a :class:`.Engine` refers to the URL and connection options used), + it is typically valid to make observations about a single connection + that can be safely assumed to be valid about all subsequent + connections, such as the database version, the server and client + encoding settings, collation settings, and many others. + + :param dbapi_connection: a DBAPI connection. + + :param connection_record: the :class:`._ConnectionRecord` managing the + DBAPI connection. + + """ + + def checkout(self, dbapi_connection, connection_record, connection_proxy): + """Called when a connection is retrieved from the Pool. + + :param dbapi_connection: a DBAPI connection. + + :param connection_record: the :class:`._ConnectionRecord` managing the + DBAPI connection. + + :param connection_proxy: the :class:`._ConnectionFairy` object which + will proxy the public interface of the DBAPI connection for the + lifespan of the checkout. + + If you raise a :class:`~sqlalchemy.exc.DisconnectionError`, the current + connection will be disposed and a fresh connection retrieved. + Processing of all checkout listeners will abort and restart + using the new connection. + + .. seealso:: :meth:`.ConnectionEvents.engine_connect` - a similar event + which occurs upon creation of a new :class:`.Connection`. + + """ + + def checkin(self, dbapi_connection, connection_record): + """Called when a connection returns to the pool. + + Note that the connection may be closed, and may be None if the + connection has been invalidated. ``checkin`` will not be called + for detached connections. (They do not return to the pool.) + + :param dbapi_connection: a DBAPI connection. + + :param connection_record: the :class:`._ConnectionRecord` managing the + DBAPI connection. + + """ + + def reset(self, dbapi_connection, connection_record): + """Called before the "reset" action occurs for a pooled connection. + + This event represents + when the ``rollback()`` method is called on the DBAPI connection + before it is returned to the pool. The behavior of "reset" can + be controlled, including disabled, using the ``reset_on_return`` + pool argument. + + + The :meth:`.PoolEvents.reset` event is usually followed by the + :meth:`.PoolEvents.checkin` event is called, except in those + cases where the connection is discarded immediately after reset. + + :param dbapi_connection: a DBAPI connection. + + :param connection_record: the :class:`._ConnectionRecord` managing the + DBAPI connection. + + .. versionadded:: 0.8 + + .. seealso:: + + :meth:`.ConnectionEvents.rollback` + + :meth:`.ConnectionEvents.commit` + + """ + + def invalidate(self, dbapi_connection, connection_record, exception): + """Called when a DBAPI connection is to be "invalidated". + + This event is called any time the :meth:`._ConnectionRecord.invalidate` + method is invoked, either from API usage or via "auto-invalidation", + without the ``soft`` flag. + + The event occurs before a final attempt to call ``.close()`` on the + connection occurs. + + :param dbapi_connection: a DBAPI connection. + + :param connection_record: the :class:`._ConnectionRecord` managing the + DBAPI connection. + + :param exception: the exception object corresponding to the reason + for this invalidation, if any. May be ``None``. + + .. versionadded:: 0.9.2 Added support for connection invalidation + listening. + + .. seealso:: + + :ref:`pool_connection_invalidation` + + """ + + def soft_invalidate(self, dbapi_connection, connection_record, exception): + """Called when a DBAPI connection is to be "soft invalidated". + + This event is called any time the :meth:`._ConnectionRecord.invalidate` + method is invoked with the ``soft`` flag. + + Soft invalidation refers to when the connection record that tracks + this connection will force a reconnect after the current connection + is checked in. It does not actively close the dbapi_connection + at the point at which it is called. + + .. versionadded:: 1.0.3 + + """ + + +class ConnectionEvents(event.Events): + """Available events for :class:`.Connectable`, which includes + :class:`.Connection` and :class:`.Engine`. + + The methods here define the name of an event as well as the names of + members that are passed to listener functions. + + An event listener can be associated with any :class:`.Connectable` + class or instance, such as an :class:`.Engine`, e.g.:: + + from sqlalchemy import event, create_engine + + def before_cursor_execute(conn, cursor, statement, parameters, context, + executemany): + log.info("Received statement: %s" % statement) + + engine = create_engine('postgresql://scott:tiger@localhost/test') + event.listen(engine, "before_cursor_execute", before_cursor_execute) + + or with a specific :class:`.Connection`:: + + with engine.begin() as conn: + @event.listens_for(conn, 'before_cursor_execute') + def before_cursor_execute(conn, cursor, statement, parameters, + context, executemany): + log.info("Received statement: %s" % statement) + + When the methods are called with a `statement` parameter, such as in + :meth:`.after_cursor_execute`, :meth:`.before_cursor_execute` and + :meth:`.dbapi_error`, the statement is the exact SQL string that was + prepared for transmission to the DBAPI ``cursor`` in the connection's + :class:`.Dialect`. + + The :meth:`.before_execute` and :meth:`.before_cursor_execute` + events can also be established with the ``retval=True`` flag, which + allows modification of the statement and parameters to be sent + to the database. The :meth:`.before_cursor_execute` event is + particularly useful here to add ad-hoc string transformations, such + as comments, to all executions:: + + from sqlalchemy.engine import Engine + from sqlalchemy import event + + @event.listens_for(Engine, "before_cursor_execute", retval=True) + def comment_sql_calls(conn, cursor, statement, parameters, + context, executemany): + statement = statement + " -- some comment" + return statement, parameters + + .. note:: :class:`.ConnectionEvents` can be established on any + combination of :class:`.Engine`, :class:`.Connection`, as well + as instances of each of those classes. Events across all + four scopes will fire off for a given instance of + :class:`.Connection`. However, for performance reasons, the + :class:`.Connection` object determines at instantiation time + whether or not its parent :class:`.Engine` has event listeners + established. Event listeners added to the :class:`.Engine` + class or to an instance of :class:`.Engine` *after* the instantiation + of a dependent :class:`.Connection` instance will usually + *not* be available on that :class:`.Connection` instance. The newly + added listeners will instead take effect for :class:`.Connection` + instances created subsequent to those event listeners being + established on the parent :class:`.Engine` class or instance. + + :param retval=False: Applies to the :meth:`.before_execute` and + :meth:`.before_cursor_execute` events only. When True, the + user-defined event function must have a return value, which + is a tuple of parameters that replace the given statement + and parameters. See those methods for a description of + specific return arguments. + + .. versionchanged:: 0.8 :class:`.ConnectionEvents` can now be associated + with any :class:`.Connectable` including :class:`.Connection`, + in addition to the existing support for :class:`.Engine`. + + """ + + _target_class_doc = "SomeEngine" + _dispatch_target = Connectable + + @classmethod + def _listen(cls, event_key, retval=False): + target, identifier, fn = \ + event_key.dispatch_target, event_key.identifier, \ + event_key._listen_fn + + target._has_events = True + + if not retval: + if identifier == 'before_execute': + orig_fn = fn + + def wrap_before_execute(conn, clauseelement, + multiparams, params): + orig_fn(conn, clauseelement, multiparams, params) + return clauseelement, multiparams, params + fn = wrap_before_execute + elif identifier == 'before_cursor_execute': + orig_fn = fn + + def wrap_before_cursor_execute(conn, cursor, statement, + parameters, context, + executemany): + orig_fn(conn, cursor, statement, + parameters, context, executemany) + return statement, parameters + fn = wrap_before_cursor_execute + elif retval and \ + identifier not in ('before_execute', + 'before_cursor_execute', 'handle_error'): + raise exc.ArgumentError( + "Only the 'before_execute', " + "'before_cursor_execute' and 'handle_error' engine " + "event listeners accept the 'retval=True' " + "argument.") + event_key.with_wrapper(fn).base_listen() + + def before_execute(self, conn, clauseelement, multiparams, params): + """Intercept high level execute() events, receiving uncompiled + SQL constructs and other objects prior to rendering into SQL. + + This event is good for debugging SQL compilation issues as well + as early manipulation of the parameters being sent to the database, + as the parameter lists will be in a consistent format here. + + This event can be optionally established with the ``retval=True`` + flag. The ``clauseelement``, ``multiparams``, and ``params`` + arguments should be returned as a three-tuple in this case:: + + @event.listens_for(Engine, "before_execute", retval=True) + def before_execute(conn, conn, clauseelement, multiparams, params): + # do something with clauseelement, multiparams, params + return clauseelement, multiparams, params + + :param conn: :class:`.Connection` object + :param clauseelement: SQL expression construct, :class:`.Compiled` + instance, or string statement passed to :meth:`.Connection.execute`. + :param multiparams: Multiple parameter sets, a list of dictionaries. + :param params: Single parameter set, a single dictionary. + + See also: + + :meth:`.before_cursor_execute` + + """ + + def after_execute(self, conn, clauseelement, multiparams, params, result): + """Intercept high level execute() events after execute. + + + :param conn: :class:`.Connection` object + :param clauseelement: SQL expression construct, :class:`.Compiled` + instance, or string statement passed to :meth:`.Connection.execute`. + :param multiparams: Multiple parameter sets, a list of dictionaries. + :param params: Single parameter set, a single dictionary. + :param result: :class:`.ResultProxy` generated by the execution. + + """ + + def before_cursor_execute(self, conn, cursor, statement, + parameters, context, executemany): + """Intercept low-level cursor execute() events before execution, + receiving the string SQL statement and DBAPI-specific parameter list to + be invoked against a cursor. + + This event is a good choice for logging as well as late modifications + to the SQL string. It's less ideal for parameter modifications except + for those which are specific to a target backend. + + This event can be optionally established with the ``retval=True`` + flag. The ``statement`` and ``parameters`` arguments should be + returned as a two-tuple in this case:: + + @event.listens_for(Engine, "before_cursor_execute", retval=True) + def before_cursor_execute(conn, cursor, statement, + parameters, context, executemany): + # do something with statement, parameters + return statement, parameters + + See the example at :class:`.ConnectionEvents`. + + :param conn: :class:`.Connection` object + :param cursor: DBAPI cursor object + :param statement: string SQL statement, as to be passed to the DBAPI + :param parameters: Dictionary, tuple, or list of parameters being + passed to the ``execute()`` or ``executemany()`` method of the + DBAPI ``cursor``. In some cases may be ``None``. + :param context: :class:`.ExecutionContext` object in use. May + be ``None``. + :param executemany: boolean, if ``True``, this is an ``executemany()`` + call, if ``False``, this is an ``execute()`` call. + + See also: + + :meth:`.before_execute` + + :meth:`.after_cursor_execute` + + """ + + def after_cursor_execute(self, conn, cursor, statement, + parameters, context, executemany): + """Intercept low-level cursor execute() events after execution. + + :param conn: :class:`.Connection` object + :param cursor: DBAPI cursor object. Will have results pending + if the statement was a SELECT, but these should not be consumed + as they will be needed by the :class:`.ResultProxy`. + :param statement: string SQL statement, as passed to the DBAPI + :param parameters: Dictionary, tuple, or list of parameters being + passed to the ``execute()`` or ``executemany()`` method of the + DBAPI ``cursor``. In some cases may be ``None``. + :param context: :class:`.ExecutionContext` object in use. May + be ``None``. + :param executemany: boolean, if ``True``, this is an ``executemany()`` + call, if ``False``, this is an ``execute()`` call. + + """ + + def dbapi_error(self, conn, cursor, statement, parameters, + context, exception): + """Intercept a raw DBAPI error. + + This event is called with the DBAPI exception instance + received from the DBAPI itself, *before* SQLAlchemy wraps the + exception with it's own exception wrappers, and before any + other operations are performed on the DBAPI cursor; the + existing transaction remains in effect as well as any state + on the cursor. + + The use case here is to inject low-level exception handling + into an :class:`.Engine`, typically for logging and + debugging purposes. + + .. warning:: + + Code should **not** modify + any state or throw any exceptions here as this will + interfere with SQLAlchemy's cleanup and error handling + routines. For exception modification, please refer to the + new :meth:`.ConnectionEvents.handle_error` event. + + Subsequent to this hook, SQLAlchemy may attempt any + number of operations on the connection/cursor, including + closing the cursor, rolling back of the transaction in the + case of connectionless execution, and disposing of the entire + connection pool if a "disconnect" was detected. The + exception is then wrapped in a SQLAlchemy DBAPI exception + wrapper and re-thrown. + + :param conn: :class:`.Connection` object + :param cursor: DBAPI cursor object + :param statement: string SQL statement, as passed to the DBAPI + :param parameters: Dictionary, tuple, or list of parameters being + passed to the ``execute()`` or ``executemany()`` method of the + DBAPI ``cursor``. In some cases may be ``None``. + :param context: :class:`.ExecutionContext` object in use. May + be ``None``. + :param exception: The **unwrapped** exception emitted directly from the + DBAPI. The class here is specific to the DBAPI module in use. + + .. deprecated:: 0.9.7 - replaced by + :meth:`.ConnectionEvents.handle_error` + + """ + + def handle_error(self, exception_context): + """Intercept all exceptions processed by the :class:`.Connection`. + + This includes all exceptions emitted by the DBAPI as well as + within SQLAlchemy's statement invocation process, including + encoding errors and other statement validation errors. Other areas + in which the event is invoked include transaction begin and end, + result row fetching, cursor creation. + + Note that :meth:`.handle_error` may support new kinds of exceptions + and new calling scenarios at *any time*. Code which uses this + event must expect new calling patterns to be present in minor + releases. + + To support the wide variety of members that correspond to an exception, + as well as to allow extensibility of the event without backwards + incompatibility, the sole argument received is an instance of + :class:`.ExceptionContext`. This object contains data members + representing detail about the exception. + + Use cases supported by this hook include: + + * read-only, low-level exception handling for logging and + debugging purposes + * exception re-writing + + The hook is called while the cursor from the failed operation + (if any) is still open and accessible. Special cleanup operations + can be called on this cursor; SQLAlchemy will attempt to close + this cursor subsequent to this hook being invoked. If the connection + is in "autocommit" mode, the transaction also remains open within + the scope of this hook; the rollback of the per-statement transaction + also occurs after the hook is called. + + The user-defined event handler has two options for replacing + the SQLAlchemy-constructed exception into one that is user + defined. It can either raise this new exception directly, in + which case all further event listeners are bypassed and the + exception will be raised, after appropriate cleanup as taken + place:: + + @event.listens_for(Engine, "handle_error") + def handle_exception(context): + if isinstance(context.original_exception, + psycopg2.OperationalError) and \\ + "failed" in str(context.original_exception): + raise MySpecialException("failed operation") + + .. warning:: Because the :meth:`.ConnectionEvents.handle_error` + event specifically provides for exceptions to be re-thrown as + the ultimate exception raised by the failed statement, + **stack traces will be misleading** if the user-defined event + handler itself fails and throws an unexpected exception; + the stack trace may not illustrate the actual code line that + failed! It is advised to code carefully here and use + logging and/or inline debugging if unexpected exceptions are + occurring. + + Alternatively, a "chained" style of event handling can be + used, by configuring the handler with the ``retval=True`` + modifier and returning the new exception instance from the + function. In this case, event handling will continue onto the + next handler. The "chained" exception is available using + :attr:`.ExceptionContext.chained_exception`:: + + @event.listens_for(Engine, "handle_error", retval=True) + def handle_exception(context): + if context.chained_exception is not None and \\ + "special" in context.chained_exception.message: + return MySpecialException("failed", + cause=context.chained_exception) + + Handlers that return ``None`` may remain within this chain; the + last non-``None`` return value is the one that continues to be + passed to the next handler. + + When a custom exception is raised or returned, SQLAlchemy raises + this new exception as-is, it is not wrapped by any SQLAlchemy + object. If the exception is not a subclass of + :class:`sqlalchemy.exc.StatementError`, + certain features may not be available; currently this includes + the ORM's feature of adding a detail hint about "autoflush" to + exceptions raised within the autoflush process. + + :param context: an :class:`.ExceptionContext` object. See this + class for details on all available members. + + .. versionadded:: 0.9.7 Added the + :meth:`.ConnectionEvents.handle_error` hook. + + .. versionchanged:: 1.0.0 The :meth:`.handle_error` event is now + invoked when an :class:`.Engine` fails during the initial + call to :meth:`.Engine.connect`, as well as when a + :class:`.Connection` object encounters an error during a + reconnect operation. + + .. versionchanged:: 1.0.0 The :meth:`.handle_error` event is + not fired off when a dialect makes use of the + ``skip_user_error_events`` execution option. This is used + by dialects which intend to catch SQLAlchemy-specific exceptions + within specific operations, such as when the MySQL dialect detects + a table not present within the ``has_table()`` dialect method. + Prior to 1.0.0, code which implements :meth:`.handle_error` needs + to ensure that exceptions thrown in these scenarios are re-raised + without modification. + + """ + + def engine_connect(self, conn, branch): + """Intercept the creation of a new :class:`.Connection`. + + This event is called typically as the direct result of calling + the :meth:`.Engine.connect` method. + + It differs from the :meth:`.PoolEvents.connect` method, which + refers to the actual connection to a database at the DBAPI level; + a DBAPI connection may be pooled and reused for many operations. + In contrast, this event refers only to the production of a higher level + :class:`.Connection` wrapper around such a DBAPI connection. + + It also differs from the :meth:`.PoolEvents.checkout` event + in that it is specific to the :class:`.Connection` object, not the + DBAPI connection that :meth:`.PoolEvents.checkout` deals with, although + this DBAPI connection is available here via the + :attr:`.Connection.connection` attribute. But note there can in fact + be multiple :meth:`.PoolEvents.checkout` events within the lifespan + of a single :class:`.Connection` object, if that :class:`.Connection` + is invalidated and re-established. There can also be multiple + :class:`.Connection` objects generated for the same already-checked-out + DBAPI connection, in the case that a "branch" of a :class:`.Connection` + is produced. + + :param conn: :class:`.Connection` object. + :param branch: if True, this is a "branch" of an existing + :class:`.Connection`. A branch is generated within the course + of a statement execution to invoke supplemental statements, most + typically to pre-execute a SELECT of a default value for the purposes + of an INSERT statement. + + .. versionadded:: 0.9.0 + + .. seealso:: + + :ref:`pool_disconnects_pessimistic` - illustrates how to use + :meth:`.ConnectionEvents.engine_connect` + to transparently ensure pooled connections are connected to the + database. + + :meth:`.PoolEvents.checkout` the lower-level pool checkout event + for an individual DBAPI connection + + :meth:`.ConnectionEvents.set_connection_execution_options` - a copy + of a :class:`.Connection` is also made when the + :meth:`.Connection.execution_options` method is called. + + """ + + def set_connection_execution_options(self, conn, opts): + """Intercept when the :meth:`.Connection.execution_options` + method is called. + + This method is called after the new :class:`.Connection` has been + produced, with the newly updated execution options collection, but + before the :class:`.Dialect` has acted upon any of those new options. + + Note that this method is not called when a new :class:`.Connection` + is produced which is inheriting execution options from its parent + :class:`.Engine`; to intercept this condition, use the + :meth:`.ConnectionEvents.engine_connect` event. + + :param conn: The newly copied :class:`.Connection` object + + :param opts: dictionary of options that were passed to the + :meth:`.Connection.execution_options` method. + + .. versionadded:: 0.9.0 + + .. seealso:: + + :meth:`.ConnectionEvents.set_engine_execution_options` - event + which is called when :meth:`.Engine.execution_options` is called. + + + """ + + def set_engine_execution_options(self, engine, opts): + """Intercept when the :meth:`.Engine.execution_options` + method is called. + + The :meth:`.Engine.execution_options` method produces a shallow + copy of the :class:`.Engine` which stores the new options. That new + :class:`.Engine` is passed here. A particular application of this + method is to add a :meth:`.ConnectionEvents.engine_connect` event + handler to the given :class:`.Engine` which will perform some per- + :class:`.Connection` task specific to these execution options. + + :param conn: The newly copied :class:`.Engine` object + + :param opts: dictionary of options that were passed to the + :meth:`.Connection.execution_options` method. + + .. versionadded:: 0.9.0 + + .. seealso:: + + :meth:`.ConnectionEvents.set_connection_execution_options` - event + which is called when :meth:`.Connection.execution_options` is + called. + + """ + + def engine_disposed(self, engine): + """Intercept when the :meth:`.Engine.dispose` method is called. + + The :meth:`.Engine.dispose` method instructs the engine to + "dispose" of it's connection pool (e.g. :class:`.Pool`), and + replaces it with a new one. Disposing of the old pool has the + effect that existing checked-in connections are closed. The new + pool does not establish any new connections until it is first used. + + This event can be used to indicate that resources related to the + :class:`.Engine` should also be cleaned up, keeping in mind that the + :class:`.Engine` can still be used for new requests in which case + it re-acquires connection resources. + + .. versionadded:: 1.0.5 + + """ + def begin(self, conn): + """Intercept begin() events. + + :param conn: :class:`.Connection` object + + """ + + def rollback(self, conn): + """Intercept rollback() events, as initiated by a + :class:`.Transaction`. + + Note that the :class:`.Pool` also "auto-rolls back" + a DBAPI connection upon checkin, if the ``reset_on_return`` + flag is set to its default value of ``'rollback'``. + To intercept this + rollback, use the :meth:`.PoolEvents.reset` hook. + + :param conn: :class:`.Connection` object + + .. seealso:: + + :meth:`.PoolEvents.reset` + + """ + + def commit(self, conn): + """Intercept commit() events, as initiated by a + :class:`.Transaction`. + + Note that the :class:`.Pool` may also "auto-commit" + a DBAPI connection upon checkin, if the ``reset_on_return`` + flag is set to the value ``'commit'``. To intercept this + commit, use the :meth:`.PoolEvents.reset` hook. + + :param conn: :class:`.Connection` object + """ + + def savepoint(self, conn, name): + """Intercept savepoint() events. + + :param conn: :class:`.Connection` object + :param name: specified name used for the savepoint. + + """ + + def rollback_savepoint(self, conn, name, context): + """Intercept rollback_savepoint() events. + + :param conn: :class:`.Connection` object + :param name: specified name used for the savepoint. + :param context: :class:`.ExecutionContext` in use. May be ``None``. + + """ + + def release_savepoint(self, conn, name, context): + """Intercept release_savepoint() events. + + :param conn: :class:`.Connection` object + :param name: specified name used for the savepoint. + :param context: :class:`.ExecutionContext` in use. May be ``None``. + + """ + + def begin_twophase(self, conn, xid): + """Intercept begin_twophase() events. + + :param conn: :class:`.Connection` object + :param xid: two-phase XID identifier + + """ + + def prepare_twophase(self, conn, xid): + """Intercept prepare_twophase() events. + + :param conn: :class:`.Connection` object + :param xid: two-phase XID identifier + """ + + def rollback_twophase(self, conn, xid, is_prepared): + """Intercept rollback_twophase() events. + + :param conn: :class:`.Connection` object + :param xid: two-phase XID identifier + :param is_prepared: boolean, indicates if + :meth:`.TwoPhaseTransaction.prepare` was called. + + """ + + def commit_twophase(self, conn, xid, is_prepared): + """Intercept commit_twophase() events. + + :param conn: :class:`.Connection` object + :param xid: two-phase XID identifier + :param is_prepared: boolean, indicates if + :meth:`.TwoPhaseTransaction.prepare` was called. + + """ + + +class DialectEvents(event.Events): + """event interface for execution-replacement functions. + + These events allow direct instrumentation and replacement + of key dialect functions which interact with the DBAPI. + + .. note:: + + :class:`.DialectEvents` hooks should be considered **semi-public** + and experimental. + These hooks are not for general use and are only for those situations + where intricate re-statement of DBAPI mechanics must be injected onto + an existing dialect. For general-use statement-interception events, + please use the :class:`.ConnectionEvents` interface. + + .. seealso:: + + :meth:`.ConnectionEvents.before_cursor_execute` + + :meth:`.ConnectionEvents.before_execute` + + :meth:`.ConnectionEvents.after_cursor_execute` + + :meth:`.ConnectionEvents.after_execute` + + + .. versionadded:: 0.9.4 + + """ + + _target_class_doc = "SomeEngine" + _dispatch_target = Dialect + + @classmethod + def _listen(cls, event_key, retval=False): + target, identifier, fn = \ + event_key.dispatch_target, event_key.identifier, event_key.fn + + target._has_events = True + event_key.base_listen() + + @classmethod + def _accept_with(cls, target): + if isinstance(target, type): + if issubclass(target, Engine): + return Dialect + elif issubclass(target, Dialect): + return target + elif isinstance(target, Engine): + return target.dialect + else: + return target + + def do_connect(self, dialect, conn_rec, cargs, cparams): + """Receive connection arguments before a connection is made. + + Return a DBAPI connection to halt further events from invoking; + the returned connection will be used. + + Alternatively, the event can manipulate the cargs and/or cparams + collections; cargs will always be a Python list that can be mutated + in-place and cparams a Python dictionary. Return None to + allow control to pass to the next event handler and ultimately + to allow the dialect to connect normally, given the updated + arguments. + + .. versionadded:: 1.0.3 + + """ + + def do_executemany(self, cursor, statement, parameters, context): + """Receive a cursor to have executemany() called. + + Return the value True to halt further events from invoking, + and to indicate that the cursor execution has already taken + place within the event handler. + + """ + + def do_execute_no_params(self, cursor, statement, context): + """Receive a cursor to have execute() with no parameters called. + + Return the value True to halt further events from invoking, + and to indicate that the cursor execution has already taken + place within the event handler. + + """ + + def do_execute(self, cursor, statement, parameters, context): + """Receive a cursor to have execute() called. + + Return the value True to halt further events from invoking, + and to indicate that the cursor execution has already taken + place within the event handler. + + """ diff --git a/lib/python3.4/site-packages/sqlalchemy/exc.py b/lib/python3.4/site-packages/sqlalchemy/exc.py new file mode 100644 index 0000000..2729842 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/exc.py @@ -0,0 +1,374 @@ +# sqlalchemy/exc.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Exceptions used with SQLAlchemy. + +The base exception class is :exc:`.SQLAlchemyError`. Exceptions which are +raised as a result of DBAPI exceptions are all subclasses of +:exc:`.DBAPIError`. + +""" + + +class SQLAlchemyError(Exception): + """Generic error class.""" + + +class ArgumentError(SQLAlchemyError): + """Raised when an invalid or conflicting function argument is supplied. + + This error generally corresponds to construction time state errors. + + """ + + +class NoSuchModuleError(ArgumentError): + """Raised when a dynamically-loaded module (usually a database dialect) + of a particular name cannot be located.""" + + +class NoForeignKeysError(ArgumentError): + """Raised when no foreign keys can be located between two selectables + during a join.""" + + +class AmbiguousForeignKeysError(ArgumentError): + """Raised when more than one foreign key matching can be located + between two selectables during a join.""" + + +class CircularDependencyError(SQLAlchemyError): + """Raised by topological sorts when a circular dependency is detected. + + There are two scenarios where this error occurs: + + * In a Session flush operation, if two objects are mutually dependent + on each other, they can not be inserted or deleted via INSERT or + DELETE statements alone; an UPDATE will be needed to post-associate + or pre-deassociate one of the foreign key constrained values. + The ``post_update`` flag described at :ref:`post_update` can resolve + this cycle. + * In a :attr:`.MetaData.sorted_tables` operation, two :class:`.ForeignKey` + or :class:`.ForeignKeyConstraint` objects mutually refer to each + other. Apply the ``use_alter=True`` flag to one or both, + see :ref:`use_alter`. + + """ + def __init__(self, message, cycles, edges, msg=None): + if msg is None: + message += " (%s)" % ", ".join(repr(s) for s in cycles) + else: + message = msg + SQLAlchemyError.__init__(self, message) + self.cycles = cycles + self.edges = edges + + def __reduce__(self): + return self.__class__, (None, self.cycles, + self.edges, self.args[0]) + + +class CompileError(SQLAlchemyError): + """Raised when an error occurs during SQL compilation""" + + +class UnsupportedCompilationError(CompileError): + """Raised when an operation is not supported by the given compiler. + + + .. versionadded:: 0.8.3 + + """ + + def __init__(self, compiler, element_type): + super(UnsupportedCompilationError, self).__init__( + "Compiler %r can't render element of type %s" % + (compiler, element_type)) + + +class IdentifierError(SQLAlchemyError): + """Raised when a schema name is beyond the max character limit""" + + +class DisconnectionError(SQLAlchemyError): + """A disconnect is detected on a raw DB-API connection. + + This error is raised and consumed internally by a connection pool. It can + be raised by the :meth:`.PoolEvents.checkout` event so that the host pool + forces a retry; the exception will be caught three times in a row before + the pool gives up and raises :class:`~sqlalchemy.exc.InvalidRequestError` + regarding the connection attempt. + + """ + + +class TimeoutError(SQLAlchemyError): + """Raised when a connection pool times out on getting a connection.""" + + +class InvalidRequestError(SQLAlchemyError): + """SQLAlchemy was asked to do something it can't do. + + This error generally corresponds to runtime state errors. + + """ + + +class NoInspectionAvailable(InvalidRequestError): + """A subject passed to :func:`sqlalchemy.inspection.inspect` produced + no context for inspection.""" + + +class ResourceClosedError(InvalidRequestError): + """An operation was requested from a connection, cursor, or other + object that's in a closed state.""" + + +class NoSuchColumnError(KeyError, InvalidRequestError): + """A nonexistent column is requested from a ``RowProxy``.""" + + +class NoReferenceError(InvalidRequestError): + """Raised by ``ForeignKey`` to indicate a reference cannot be resolved.""" + + +class NoReferencedTableError(NoReferenceError): + """Raised by ``ForeignKey`` when the referred ``Table`` cannot be + located. + + """ + def __init__(self, message, tname): + NoReferenceError.__init__(self, message) + self.table_name = tname + + def __reduce__(self): + return self.__class__, (self.args[0], self.table_name) + + +class NoReferencedColumnError(NoReferenceError): + """Raised by ``ForeignKey`` when the referred ``Column`` cannot be + located. + + """ + def __init__(self, message, tname, cname): + NoReferenceError.__init__(self, message) + self.table_name = tname + self.column_name = cname + + def __reduce__(self): + return self.__class__, (self.args[0], self.table_name, + self.column_name) + + +class NoSuchTableError(InvalidRequestError): + """Table does not exist or is not visible to a connection.""" + + +class UnboundExecutionError(InvalidRequestError): + """SQL was attempted without a database connection to execute it on.""" + + +class DontWrapMixin(object): + """A mixin class which, when applied to a user-defined Exception class, + will not be wrapped inside of :exc:`.StatementError` if the error is + emitted within the process of executing a statement. + + E.g.:: + + from sqlalchemy.exc import DontWrapMixin + + class MyCustomException(Exception, DontWrapMixin): + pass + + class MySpecialType(TypeDecorator): + impl = String + + def process_bind_param(self, value, dialect): + if value == 'invalid': + raise MyCustomException("invalid!") + + """ + +# Moved to orm.exc; compatibility definition installed by orm import until 0.6 +UnmappedColumnError = None + + +class StatementError(SQLAlchemyError): + """An error occurred during execution of a SQL statement. + + :class:`StatementError` wraps the exception raised + during execution, and features :attr:`.statement` + and :attr:`.params` attributes which supply context regarding + the specifics of the statement which had an issue. + + The wrapped exception object is available in + the :attr:`.orig` attribute. + + """ + + statement = None + """The string SQL statement being invoked when this exception occurred.""" + + params = None + """The parameter list being used when this exception occurred.""" + + orig = None + """The DBAPI exception object.""" + + def __init__(self, message, statement, params, orig): + SQLAlchemyError.__init__(self, message) + self.statement = statement + self.params = params + self.orig = orig + self.detail = [] + + def add_detail(self, msg): + self.detail.append(msg) + + def __reduce__(self): + return self.__class__, (self.args[0], self.statement, + self.params, self.orig) + + def __str__(self): + from sqlalchemy.sql import util + + details = [SQLAlchemyError.__str__(self)] + if self.statement: + details.append("[SQL: %r]" % self.statement) + if self.params: + params_repr = util._repr_params(self.params, 10) + details.append("[parameters: %r]" % params_repr) + return ' '.join([ + "(%s)" % det for det in self.detail + ] + details) + + def __unicode__(self): + return self.__str__() + + +class DBAPIError(StatementError): + """Raised when the execution of a database operation fails. + + Wraps exceptions raised by the DB-API underlying the + database operation. Driver-specific implementations of the standard + DB-API exception types are wrapped by matching sub-types of SQLAlchemy's + :class:`DBAPIError` when possible. DB-API's ``Error`` type maps to + :class:`DBAPIError` in SQLAlchemy, otherwise the names are identical. Note + that there is no guarantee that different DB-API implementations will + raise the same exception type for any given error condition. + + :class:`DBAPIError` features :attr:`~.StatementError.statement` + and :attr:`~.StatementError.params` attributes which supply context + regarding the specifics of the statement which had an issue, for the + typical case when the error was raised within the context of + emitting a SQL statement. + + The wrapped exception object is available in the + :attr:`~.StatementError.orig` attribute. Its type and properties are + DB-API implementation specific. + + """ + + @classmethod + def instance(cls, statement, params, + orig, dbapi_base_err, + connection_invalidated=False, + dialect=None): + # Don't ever wrap these, just return them directly as if + # DBAPIError didn't exist. + if (isinstance(orig, BaseException) and + not isinstance(orig, Exception)) or \ + isinstance(orig, DontWrapMixin): + return orig + + if orig is not None: + # not a DBAPI error, statement is present. + # raise a StatementError + if not isinstance(orig, dbapi_base_err) and statement: + return StatementError( + "(%s.%s) %s" % + (orig.__class__.__module__, orig.__class__.__name__, + orig), + statement, params, orig + ) + + glob = globals() + for super_ in orig.__class__.__mro__: + name = super_.__name__ + if dialect: + name = dialect.dbapi_exception_translation_map.get( + name, name) + if name in glob and issubclass(glob[name], DBAPIError): + cls = glob[name] + break + + return cls(statement, params, orig, connection_invalidated) + + def __reduce__(self): + return self.__class__, (self.statement, self.params, + self.orig, self.connection_invalidated) + + def __init__(self, statement, params, orig, connection_invalidated=False): + try: + text = str(orig) + except Exception as e: + text = 'Error in str() of DB-API-generated exception: ' + str(e) + StatementError.__init__( + self, + '(%s.%s) %s' % ( + orig.__class__.__module__, orig.__class__.__name__, text, ), + statement, + params, + orig + ) + self.connection_invalidated = connection_invalidated + + +class InterfaceError(DBAPIError): + """Wraps a DB-API InterfaceError.""" + + +class DatabaseError(DBAPIError): + """Wraps a DB-API DatabaseError.""" + + +class DataError(DatabaseError): + """Wraps a DB-API DataError.""" + + +class OperationalError(DatabaseError): + """Wraps a DB-API OperationalError.""" + + +class IntegrityError(DatabaseError): + """Wraps a DB-API IntegrityError.""" + + +class InternalError(DatabaseError): + """Wraps a DB-API InternalError.""" + + +class ProgrammingError(DatabaseError): + """Wraps a DB-API ProgrammingError.""" + + +class NotSupportedError(DatabaseError): + """Wraps a DB-API NotSupportedError.""" + + +# Warnings + +class SADeprecationWarning(DeprecationWarning): + """Issued once per usage of a deprecated API.""" + + +class SAPendingDeprecationWarning(PendingDeprecationWarning): + """Issued once per usage of a deprecated API.""" + + +class SAWarning(RuntimeWarning): + """Issued at runtime.""" diff --git a/lib/python3.4/site-packages/sqlalchemy/ext/__init__.py b/lib/python3.4/site-packages/sqlalchemy/ext/__init__.py new file mode 100644 index 0000000..1c8a59a --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/ext/__init__.py @@ -0,0 +1,11 @@ +# ext/__init__.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from .. import util as _sa_util + +_sa_util.dependencies.resolve_all("sqlalchemy.ext") + diff --git a/lib/python3.4/site-packages/sqlalchemy/ext/associationproxy.py b/lib/python3.4/site-packages/sqlalchemy/ext/associationproxy.py new file mode 100644 index 0000000..fdc44f3 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/ext/associationproxy.py @@ -0,0 +1,1068 @@ +# ext/associationproxy.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Contain the ``AssociationProxy`` class. + +The ``AssociationProxy`` is a Python property object which provides +transparent proxied access to the endpoint of an association object. + +See the example ``examples/association/proxied_association.py``. + +""" +import itertools +import operator +import weakref +from .. import exc, orm, util +from ..orm import collections, interfaces +from ..sql import not_, or_ + + +def association_proxy(target_collection, attr, **kw): + """Return a Python property implementing a view of a target + attribute which references an attribute on members of the + target. + + The returned value is an instance of :class:`.AssociationProxy`. + + Implements a Python property representing a relationship as a collection + of simpler values, or a scalar value. The proxied property will mimic + the collection type of the target (list, dict or set), or, in the case of + a one to one relationship, a simple scalar value. + + :param target_collection: Name of the attribute we'll proxy to. + This attribute is typically mapped by + :func:`~sqlalchemy.orm.relationship` to link to a target collection, but + can also be a many-to-one or non-scalar relationship. + + :param attr: Attribute on the associated instance or instances we'll + proxy for. + + For example, given a target collection of [obj1, obj2], a list created + by this proxy property would look like [getattr(obj1, *attr*), + getattr(obj2, *attr*)] + + If the relationship is one-to-one or otherwise uselist=False, then + simply: getattr(obj, *attr*) + + :param creator: optional. + + When new items are added to this proxied collection, new instances of + the class collected by the target collection will be created. For list + and set collections, the target class constructor will be called with + the 'value' for the new instance. For dict types, two arguments are + passed: key and value. + + If you want to construct instances differently, supply a *creator* + function that takes arguments as above and returns instances. + + For scalar relationships, creator() will be called if the target is None. + If the target is present, set operations are proxied to setattr() on the + associated object. + + If you have an associated object with multiple attributes, you may set + up multiple association proxies mapping to different attributes. See + the unit tests for examples, and for examples of how creator() functions + can be used to construct the scalar relationship on-demand in this + situation. + + :param \*\*kw: Passes along any other keyword arguments to + :class:`.AssociationProxy`. + + """ + return AssociationProxy(target_collection, attr, **kw) + + +ASSOCIATION_PROXY = util.symbol('ASSOCIATION_PROXY') +"""Symbol indicating an :class:`InspectionAttr` that's + of type :class:`.AssociationProxy`. + + Is assigned to the :attr:`.InspectionAttr.extension_type` + attibute. + +""" + + +class AssociationProxy(interfaces.InspectionAttrInfo): + """A descriptor that presents a read/write view of an object attribute.""" + + is_attribute = False + extension_type = ASSOCIATION_PROXY + + def __init__(self, target_collection, attr, creator=None, + getset_factory=None, proxy_factory=None, + proxy_bulk_set=None, info=None): + """Construct a new :class:`.AssociationProxy`. + + The :func:`.association_proxy` function is provided as the usual + entrypoint here, though :class:`.AssociationProxy` can be instantiated + and/or subclassed directly. + + :param target_collection: Name of the collection we'll proxy to, + usually created with :func:`.relationship`. + + :param attr: Attribute on the collected instances we'll proxy + for. For example, given a target collection of [obj1, obj2], a + list created by this proxy property would look like + [getattr(obj1, attr), getattr(obj2, attr)] + + :param creator: Optional. When new items are added to this proxied + collection, new instances of the class collected by the target + collection will be created. For list and set collections, the + target class constructor will be called with the 'value' for the + new instance. For dict types, two arguments are passed: + key and value. + + If you want to construct instances differently, supply a 'creator' + function that takes arguments as above and returns instances. + + :param getset_factory: Optional. Proxied attribute access is + automatically handled by routines that get and set values based on + the `attr` argument for this proxy. + + If you would like to customize this behavior, you may supply a + `getset_factory` callable that produces a tuple of `getter` and + `setter` functions. The factory is called with two arguments, the + abstract type of the underlying collection and this proxy instance. + + :param proxy_factory: Optional. The type of collection to emulate is + determined by sniffing the target collection. If your collection + type can't be determined by duck typing or you'd like to use a + different collection implementation, you may supply a factory + function to produce those collections. Only applicable to + non-scalar relationships. + + :param proxy_bulk_set: Optional, use with proxy_factory. See + the _set() method for details. + + :param info: optional, will be assigned to + :attr:`.AssociationProxy.info` if present. + + .. versionadded:: 1.0.9 + + """ + self.target_collection = target_collection + self.value_attr = attr + self.creator = creator + self.getset_factory = getset_factory + self.proxy_factory = proxy_factory + self.proxy_bulk_set = proxy_bulk_set + + self.owning_class = None + self.key = '_%s_%s_%s' % ( + type(self).__name__, target_collection, id(self)) + self.collection_class = None + if info: + self.info = info + + @property + def remote_attr(self): + """The 'remote' :class:`.MapperProperty` referenced by this + :class:`.AssociationProxy`. + + .. versionadded:: 0.7.3 + + See also: + + :attr:`.AssociationProxy.attr` + + :attr:`.AssociationProxy.local_attr` + + """ + return getattr(self.target_class, self.value_attr) + + @property + def local_attr(self): + """The 'local' :class:`.MapperProperty` referenced by this + :class:`.AssociationProxy`. + + .. versionadded:: 0.7.3 + + See also: + + :attr:`.AssociationProxy.attr` + + :attr:`.AssociationProxy.remote_attr` + + """ + return getattr(self.owning_class, self.target_collection) + + @property + def attr(self): + """Return a tuple of ``(local_attr, remote_attr)``. + + This attribute is convenient when specifying a join + using :meth:`.Query.join` across two relationships:: + + sess.query(Parent).join(*Parent.proxied.attr) + + .. versionadded:: 0.7.3 + + See also: + + :attr:`.AssociationProxy.local_attr` + + :attr:`.AssociationProxy.remote_attr` + + """ + return (self.local_attr, self.remote_attr) + + def _get_property(self): + return (orm.class_mapper(self.owning_class). + get_property(self.target_collection)) + + @util.memoized_property + def target_class(self): + """The intermediary class handled by this :class:`.AssociationProxy`. + + Intercepted append/set/assignment events will result + in the generation of new instances of this class. + + """ + return self._get_property().mapper.class_ + + @util.memoized_property + def scalar(self): + """Return ``True`` if this :class:`.AssociationProxy` proxies a scalar + relationship on the local side.""" + + scalar = not self._get_property().uselist + if scalar: + self._initialize_scalar_accessors() + return scalar + + @util.memoized_property + def _value_is_scalar(self): + return not self._get_property().\ + mapper.get_property(self.value_attr).uselist + + @util.memoized_property + def _target_is_object(self): + return getattr(self.target_class, self.value_attr).impl.uses_objects + + def __get__(self, obj, class_): + if self.owning_class is None: + self.owning_class = class_ and class_ or type(obj) + if obj is None: + return self + + if self.scalar: + target = getattr(obj, self.target_collection) + return self._scalar_get(target) + else: + try: + # If the owning instance is reborn (orm session resurrect, + # etc.), refresh the proxy cache. + creator_id, proxy = getattr(obj, self.key) + if id(obj) == creator_id: + return proxy + except AttributeError: + pass + proxy = self._new(_lazy_collection(obj, self.target_collection)) + setattr(obj, self.key, (id(obj), proxy)) + return proxy + + def __set__(self, obj, values): + if self.owning_class is None: + self.owning_class = type(obj) + + if self.scalar: + creator = self.creator and self.creator or self.target_class + target = getattr(obj, self.target_collection) + if target is None: + setattr(obj, self.target_collection, creator(values)) + else: + self._scalar_set(target, values) + else: + proxy = self.__get__(obj, None) + if proxy is not values: + proxy.clear() + self._set(proxy, values) + + def __delete__(self, obj): + if self.owning_class is None: + self.owning_class = type(obj) + delattr(obj, self.key) + + def _initialize_scalar_accessors(self): + if self.getset_factory: + get, set = self.getset_factory(None, self) + else: + get, set = self._default_getset(None) + self._scalar_get, self._scalar_set = get, set + + def _default_getset(self, collection_class): + attr = self.value_attr + _getter = operator.attrgetter(attr) + getter = lambda target: _getter(target) if target is not None else None + if collection_class is dict: + setter = lambda o, k, v: setattr(o, attr, v) + else: + setter = lambda o, v: setattr(o, attr, v) + return getter, setter + + def _new(self, lazy_collection): + creator = self.creator and self.creator or self.target_class + self.collection_class = util.duck_type_collection(lazy_collection()) + + if self.proxy_factory: + return self.proxy_factory( + lazy_collection, creator, self.value_attr, self) + + if self.getset_factory: + getter, setter = self.getset_factory(self.collection_class, self) + else: + getter, setter = self._default_getset(self.collection_class) + + if self.collection_class is list: + return _AssociationList( + lazy_collection, creator, getter, setter, self) + elif self.collection_class is dict: + return _AssociationDict( + lazy_collection, creator, getter, setter, self) + elif self.collection_class is set: + return _AssociationSet( + lazy_collection, creator, getter, setter, self) + else: + raise exc.ArgumentError( + 'could not guess which interface to use for ' + 'collection_class "%s" backing "%s"; specify a ' + 'proxy_factory and proxy_bulk_set manually' % + (self.collection_class.__name__, self.target_collection)) + + def _inflate(self, proxy): + creator = self.creator and self.creator or self.target_class + + if self.getset_factory: + getter, setter = self.getset_factory(self.collection_class, self) + else: + getter, setter = self._default_getset(self.collection_class) + + proxy.creator = creator + proxy.getter = getter + proxy.setter = setter + + def _set(self, proxy, values): + if self.proxy_bulk_set: + self.proxy_bulk_set(proxy, values) + elif self.collection_class is list: + proxy.extend(values) + elif self.collection_class is dict: + proxy.update(values) + elif self.collection_class is set: + proxy.update(values) + else: + raise exc.ArgumentError( + 'no proxy_bulk_set supplied for custom ' + 'collection_class implementation') + + @property + def _comparator(self): + return self._get_property().comparator + + def any(self, criterion=None, **kwargs): + """Produce a proxied 'any' expression using EXISTS. + + This expression will be a composed product + using the :meth:`.RelationshipProperty.Comparator.any` + and/or :meth:`.RelationshipProperty.Comparator.has` + operators of the underlying proxied attributes. + + """ + if self._target_is_object: + if self._value_is_scalar: + value_expr = getattr( + self.target_class, self.value_attr).has( + criterion, **kwargs) + else: + value_expr = getattr( + self.target_class, self.value_attr).any( + criterion, **kwargs) + else: + value_expr = criterion + + # check _value_is_scalar here, otherwise + # we're scalar->scalar - call .any() so that + # the "can't call any() on a scalar" msg is raised. + if self.scalar and not self._value_is_scalar: + return self._comparator.has( + value_expr + ) + else: + return self._comparator.any( + value_expr + ) + + def has(self, criterion=None, **kwargs): + """Produce a proxied 'has' expression using EXISTS. + + This expression will be a composed product + using the :meth:`.RelationshipProperty.Comparator.any` + and/or :meth:`.RelationshipProperty.Comparator.has` + operators of the underlying proxied attributes. + + """ + + if self._target_is_object: + return self._comparator.has( + getattr(self.target_class, self.value_attr). + has(criterion, **kwargs) + ) + else: + if criterion is not None or kwargs: + raise exc.ArgumentError( + "Non-empty has() not allowed for " + "column-targeted association proxy; use ==") + return self._comparator.has() + + def contains(self, obj): + """Produce a proxied 'contains' expression using EXISTS. + + This expression will be a composed product + using the :meth:`.RelationshipProperty.Comparator.any` + , :meth:`.RelationshipProperty.Comparator.has`, + and/or :meth:`.RelationshipProperty.Comparator.contains` + operators of the underlying proxied attributes. + """ + + if self.scalar and not self._value_is_scalar: + return self._comparator.has( + getattr(self.target_class, self.value_attr).contains(obj) + ) + else: + return self._comparator.any(**{self.value_attr: obj}) + + def __eq__(self, obj): + # note the has() here will fail for collections; eq_() + # is only allowed with a scalar. + if obj is None: + return or_( + self._comparator.has(**{self.value_attr: obj}), + self._comparator == None + ) + else: + return self._comparator.has(**{self.value_attr: obj}) + + def __ne__(self, obj): + # note the has() here will fail for collections; eq_() + # is only allowed with a scalar. + return self._comparator.has( + getattr(self.target_class, self.value_attr) != obj) + + +class _lazy_collection(object): + def __init__(self, obj, target): + self.ref = weakref.ref(obj) + self.target = target + + def __call__(self): + obj = self.ref() + if obj is None: + raise exc.InvalidRequestError( + "stale association proxy, parent object has gone out of " + "scope") + return getattr(obj, self.target) + + def __getstate__(self): + return {'obj': self.ref(), 'target': self.target} + + def __setstate__(self, state): + self.ref = weakref.ref(state['obj']) + self.target = state['target'] + + +class _AssociationCollection(object): + def __init__(self, lazy_collection, creator, getter, setter, parent): + """Constructs an _AssociationCollection. + + This will always be a subclass of either _AssociationList, + _AssociationSet, or _AssociationDict. + + lazy_collection + A callable returning a list-based collection of entities (usually an + object attribute managed by a SQLAlchemy relationship()) + + creator + A function that creates new target entities. Given one parameter: + value. This assertion is assumed:: + + obj = creator(somevalue) + assert getter(obj) == somevalue + + getter + A function. Given an associated object, return the 'value'. + + setter + A function. Given an associated object and a value, store that + value on the object. + + """ + self.lazy_collection = lazy_collection + self.creator = creator + self.getter = getter + self.setter = setter + self.parent = parent + + col = property(lambda self: self.lazy_collection()) + + def __len__(self): + return len(self.col) + + def __bool__(self): + return bool(self.col) + + __nonzero__ = __bool__ + + def __getstate__(self): + return {'parent': self.parent, 'lazy_collection': self.lazy_collection} + + def __setstate__(self, state): + self.parent = state['parent'] + self.lazy_collection = state['lazy_collection'] + self.parent._inflate(self) + + +class _AssociationList(_AssociationCollection): + """Generic, converting, list-to-list proxy.""" + + def _create(self, value): + return self.creator(value) + + def _get(self, object): + return self.getter(object) + + def _set(self, object, value): + return self.setter(object, value) + + def __getitem__(self, index): + if not isinstance(index, slice): + return self._get(self.col[index]) + else: + return [self._get(member) for member in self.col[index]] + + def __setitem__(self, index, value): + if not isinstance(index, slice): + self._set(self.col[index], value) + else: + if index.stop is None: + stop = len(self) + elif index.stop < 0: + stop = len(self) + index.stop + else: + stop = index.stop + step = index.step or 1 + + start = index.start or 0 + rng = list(range(index.start or 0, stop, step)) + if step == 1: + for i in rng: + del self[start] + i = start + for item in value: + self.insert(i, item) + i += 1 + else: + if len(value) != len(rng): + raise ValueError( + "attempt to assign sequence of size %s to " + "extended slice of size %s" % (len(value), + len(rng))) + for i, item in zip(rng, value): + self._set(self.col[i], item) + + def __delitem__(self, index): + del self.col[index] + + def __contains__(self, value): + for member in self.col: + # testlib.pragma exempt:__eq__ + if self._get(member) == value: + return True + return False + + def __getslice__(self, start, end): + return [self._get(member) for member in self.col[start:end]] + + def __setslice__(self, start, end, values): + members = [self._create(v) for v in values] + self.col[start:end] = members + + def __delslice__(self, start, end): + del self.col[start:end] + + def __iter__(self): + """Iterate over proxied values. + + For the actual domain objects, iterate over .col instead or + just use the underlying collection directly from its property + on the parent. + """ + + for member in self.col: + yield self._get(member) + return + + def append(self, value): + item = self._create(value) + self.col.append(item) + + def count(self, value): + return sum([1 for _ in + util.itertools_filter(lambda v: v == value, iter(self))]) + + def extend(self, values): + for v in values: + self.append(v) + + def insert(self, index, value): + self.col[index:index] = [self._create(value)] + + def pop(self, index=-1): + return self.getter(self.col.pop(index)) + + def remove(self, value): + for i, val in enumerate(self): + if val == value: + del self.col[i] + return + raise ValueError("value not in list") + + def reverse(self): + """Not supported, use reversed(mylist)""" + + raise NotImplementedError + + def sort(self): + """Not supported, use sorted(mylist)""" + + raise NotImplementedError + + def clear(self): + del self.col[0:len(self.col)] + + def __eq__(self, other): + return list(self) == other + + def __ne__(self, other): + return list(self) != other + + def __lt__(self, other): + return list(self) < other + + def __le__(self, other): + return list(self) <= other + + def __gt__(self, other): + return list(self) > other + + def __ge__(self, other): + return list(self) >= other + + def __cmp__(self, other): + return cmp(list(self), other) + + def __add__(self, iterable): + try: + other = list(iterable) + except TypeError: + return NotImplemented + return list(self) + other + + def __radd__(self, iterable): + try: + other = list(iterable) + except TypeError: + return NotImplemented + return other + list(self) + + def __mul__(self, n): + if not isinstance(n, int): + return NotImplemented + return list(self) * n + __rmul__ = __mul__ + + def __iadd__(self, iterable): + self.extend(iterable) + return self + + def __imul__(self, n): + # unlike a regular list *=, proxied __imul__ will generate unique + # backing objects for each copy. *= on proxied lists is a bit of + # a stretch anyhow, and this interpretation of the __imul__ contract + # is more plausibly useful than copying the backing objects. + if not isinstance(n, int): + return NotImplemented + if n == 0: + self.clear() + elif n > 1: + self.extend(list(self) * (n - 1)) + return self + + def copy(self): + return list(self) + + def __repr__(self): + return repr(list(self)) + + def __hash__(self): + raise TypeError("%s objects are unhashable" % type(self).__name__) + + for func_name, func in list(locals().items()): + if (util.callable(func) and func.__name__ == func_name and + not func.__doc__ and hasattr(list, func_name)): + func.__doc__ = getattr(list, func_name).__doc__ + del func_name, func + + +_NotProvided = util.symbol('_NotProvided') + + +class _AssociationDict(_AssociationCollection): + """Generic, converting, dict-to-dict proxy.""" + + def _create(self, key, value): + return self.creator(key, value) + + def _get(self, object): + return self.getter(object) + + def _set(self, object, key, value): + return self.setter(object, key, value) + + def __getitem__(self, key): + return self._get(self.col[key]) + + def __setitem__(self, key, value): + if key in self.col: + self._set(self.col[key], key, value) + else: + self.col[key] = self._create(key, value) + + def __delitem__(self, key): + del self.col[key] + + def __contains__(self, key): + # testlib.pragma exempt:__hash__ + return key in self.col + + def has_key(self, key): + # testlib.pragma exempt:__hash__ + return key in self.col + + def __iter__(self): + return iter(self.col.keys()) + + def clear(self): + self.col.clear() + + def __eq__(self, other): + return dict(self) == other + + def __ne__(self, other): + return dict(self) != other + + def __lt__(self, other): + return dict(self) < other + + def __le__(self, other): + return dict(self) <= other + + def __gt__(self, other): + return dict(self) > other + + def __ge__(self, other): + return dict(self) >= other + + def __cmp__(self, other): + return cmp(dict(self), other) + + def __repr__(self): + return repr(dict(self.items())) + + def get(self, key, default=None): + try: + return self[key] + except KeyError: + return default + + def setdefault(self, key, default=None): + if key not in self.col: + self.col[key] = self._create(key, default) + return default + else: + return self[key] + + def keys(self): + return self.col.keys() + + if util.py2k: + def iteritems(self): + return ((key, self._get(self.col[key])) for key in self.col) + + def itervalues(self): + return (self._get(self.col[key]) for key in self.col) + + def iterkeys(self): + return self.col.iterkeys() + + def values(self): + return [self._get(member) for member in self.col.values()] + + def items(self): + return [(k, self._get(self.col[k])) for k in self] + else: + def items(self): + return ((key, self._get(self.col[key])) for key in self.col) + + def values(self): + return (self._get(self.col[key]) for key in self.col) + + def pop(self, key, default=_NotProvided): + if default is _NotProvided: + member = self.col.pop(key) + else: + member = self.col.pop(key, default) + return self._get(member) + + def popitem(self): + item = self.col.popitem() + return (item[0], self._get(item[1])) + + def update(self, *a, **kw): + if len(a) > 1: + raise TypeError('update expected at most 1 arguments, got %i' % + len(a)) + elif len(a) == 1: + seq_or_map = a[0] + # discern dict from sequence - took the advice from + # http://www.voidspace.org.uk/python/articles/duck_typing.shtml + # still not perfect :( + if hasattr(seq_or_map, 'keys'): + for item in seq_or_map: + self[item] = seq_or_map[item] + else: + try: + for k, v in seq_or_map: + self[k] = v + except ValueError: + raise ValueError( + "dictionary update sequence " + "requires 2-element tuples") + + for key, value in kw: + self[key] = value + + def copy(self): + return dict(self.items()) + + def __hash__(self): + raise TypeError("%s objects are unhashable" % type(self).__name__) + + for func_name, func in list(locals().items()): + if (util.callable(func) and func.__name__ == func_name and + not func.__doc__ and hasattr(dict, func_name)): + func.__doc__ = getattr(dict, func_name).__doc__ + del func_name, func + + +class _AssociationSet(_AssociationCollection): + """Generic, converting, set-to-set proxy.""" + + def _create(self, value): + return self.creator(value) + + def _get(self, object): + return self.getter(object) + + def _set(self, object, value): + return self.setter(object, value) + + def __len__(self): + return len(self.col) + + def __bool__(self): + if self.col: + return True + else: + return False + + __nonzero__ = __bool__ + + def __contains__(self, value): + for member in self.col: + # testlib.pragma exempt:__eq__ + if self._get(member) == value: + return True + return False + + def __iter__(self): + """Iterate over proxied values. + + For the actual domain objects, iterate over .col instead or just use + the underlying collection directly from its property on the parent. + + """ + for member in self.col: + yield self._get(member) + return + + def add(self, value): + if value not in self: + self.col.add(self._create(value)) + + # for discard and remove, choosing a more expensive check strategy rather + # than call self.creator() + def discard(self, value): + for member in self.col: + if self._get(member) == value: + self.col.discard(member) + break + + def remove(self, value): + for member in self.col: + if self._get(member) == value: + self.col.discard(member) + return + raise KeyError(value) + + def pop(self): + if not self.col: + raise KeyError('pop from an empty set') + member = self.col.pop() + return self._get(member) + + def update(self, other): + for value in other: + self.add(value) + + def __ior__(self, other): + if not collections._set_binops_check_strict(self, other): + return NotImplemented + for value in other: + self.add(value) + return self + + def _set(self): + return set(iter(self)) + + def union(self, other): + return set(self).union(other) + + __or__ = union + + def difference(self, other): + return set(self).difference(other) + + __sub__ = difference + + def difference_update(self, other): + for value in other: + self.discard(value) + + def __isub__(self, other): + if not collections._set_binops_check_strict(self, other): + return NotImplemented + for value in other: + self.discard(value) + return self + + def intersection(self, other): + return set(self).intersection(other) + + __and__ = intersection + + def intersection_update(self, other): + want, have = self.intersection(other), set(self) + + remove, add = have - want, want - have + + for value in remove: + self.remove(value) + for value in add: + self.add(value) + + def __iand__(self, other): + if not collections._set_binops_check_strict(self, other): + return NotImplemented + want, have = self.intersection(other), set(self) + + remove, add = have - want, want - have + + for value in remove: + self.remove(value) + for value in add: + self.add(value) + return self + + def symmetric_difference(self, other): + return set(self).symmetric_difference(other) + + __xor__ = symmetric_difference + + def symmetric_difference_update(self, other): + want, have = self.symmetric_difference(other), set(self) + + remove, add = have - want, want - have + + for value in remove: + self.remove(value) + for value in add: + self.add(value) + + def __ixor__(self, other): + if not collections._set_binops_check_strict(self, other): + return NotImplemented + want, have = self.symmetric_difference(other), set(self) + + remove, add = have - want, want - have + + for value in remove: + self.remove(value) + for value in add: + self.add(value) + return self + + def issubset(self, other): + return set(self).issubset(other) + + def issuperset(self, other): + return set(self).issuperset(other) + + def clear(self): + self.col.clear() + + def copy(self): + return set(self) + + def __eq__(self, other): + return set(self) == other + + def __ne__(self, other): + return set(self) != other + + def __lt__(self, other): + return set(self) < other + + def __le__(self, other): + return set(self) <= other + + def __gt__(self, other): + return set(self) > other + + def __ge__(self, other): + return set(self) >= other + + def __repr__(self): + return repr(set(self)) + + def __hash__(self): + raise TypeError("%s objects are unhashable" % type(self).__name__) + + for func_name, func in list(locals().items()): + if (util.callable(func) and func.__name__ == func_name and + not func.__doc__ and hasattr(set, func_name)): + func.__doc__ = getattr(set, func_name).__doc__ + del func_name, func diff --git a/lib/python3.4/site-packages/sqlalchemy/ext/automap.py b/lib/python3.4/site-packages/sqlalchemy/ext/automap.py new file mode 100644 index 0000000..023d11c --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/ext/automap.py @@ -0,0 +1,1038 @@ +# ext/automap.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Define an extension to the :mod:`sqlalchemy.ext.declarative` system +which automatically generates mapped classes and relationships from a database +schema, typically though not necessarily one which is reflected. + +.. versionadded:: 0.9.1 Added :mod:`sqlalchemy.ext.automap`. + +It is hoped that the :class:`.AutomapBase` system provides a quick +and modernized solution to the problem that the very famous +`SQLSoup `_ +also tries to solve, that of generating a quick and rudimentary object +model from an existing database on the fly. By addressing the issue strictly +at the mapper configuration level, and integrating fully with existing +Declarative class techniques, :class:`.AutomapBase` seeks to provide +a well-integrated approach to the issue of expediently auto-generating ad-hoc +mappings. + + +Basic Use +========= + +The simplest usage is to reflect an existing database into a new model. +We create a new :class:`.AutomapBase` class in a similar manner as to how +we create a declarative base class, using :func:`.automap_base`. +We then call :meth:`.AutomapBase.prepare` on the resulting base class, +asking it to reflect the schema and produce mappings:: + + from sqlalchemy.ext.automap import automap_base + from sqlalchemy.orm import Session + from sqlalchemy import create_engine + + Base = automap_base() + + # engine, suppose it has two tables 'user' and 'address' set up + engine = create_engine("sqlite:///mydatabase.db") + + # reflect the tables + Base.prepare(engine, reflect=True) + + # mapped classes are now created with names by default + # matching that of the table name. + User = Base.classes.user + Address = Base.classes.address + + session = Session(engine) + + # rudimentary relationships are produced + session.add(Address(email_address="foo@bar.com", user=User(name="foo"))) + session.commit() + + # collection-based relationships are by default named + # "_collection" + print (u1.address_collection) + +Above, calling :meth:`.AutomapBase.prepare` while passing along the +:paramref:`.AutomapBase.prepare.reflect` parameter indicates that the +:meth:`.MetaData.reflect` method will be called on this declarative base +classes' :class:`.MetaData` collection; then, each **viable** +:class:`.Table` within the :class:`.MetaData` will get a new mapped class +generated automatically. The :class:`.ForeignKeyConstraint` objects which +link the various tables together will be used to produce new, bidirectional +:func:`.relationship` objects between classes. The classes and relationships +follow along a default naming scheme that we can customize. At this point, +our basic mapping consisting of related ``User`` and ``Address`` classes is +ready to use in the traditional way. + +.. note:: By **viable**, we mean that for a table to be mapped, it must + specify a primary key. Additionally, if the table is detected as being + a pure association table between two other tables, it will not be directly + mapped and will instead be configured as a many-to-many table between + the mappings for the two referring tables. + +Generating Mappings from an Existing MetaData +============================================= + +We can pass a pre-declared :class:`.MetaData` object to :func:`.automap_base`. +This object can be constructed in any way, including programmatically, from +a serialized file, or from itself being reflected using +:meth:`.MetaData.reflect`. Below we illustrate a combination of reflection and +explicit table declaration:: + + from sqlalchemy import create_engine, MetaData, Table, Column, ForeignKey + engine = create_engine("sqlite:///mydatabase.db") + + # produce our own MetaData object + metadata = MetaData() + + # we can reflect it ourselves from a database, using options + # such as 'only' to limit what tables we look at... + metadata.reflect(engine, only=['user', 'address']) + + # ... or just define our own Table objects with it (or combine both) + Table('user_order', metadata, + Column('id', Integer, primary_key=True), + Column('user_id', ForeignKey('user.id')) + ) + + # we can then produce a set of mappings from this MetaData. + Base = automap_base(metadata=metadata) + + # calling prepare() just sets up mapped classes and relationships. + Base.prepare() + + # mapped classes are ready + User, Address, Order = Base.classes.user, Base.classes.address,\ + Base.classes.user_order + +Specifying Classes Explicitly +============================= + +The :mod:`.sqlalchemy.ext.automap` extension allows classes to be defined +explicitly, in a way similar to that of the :class:`.DeferredReflection` class. +Classes that extend from :class:`.AutomapBase` act like regular declarative +classes, but are not immediately mapped after their construction, and are +instead mapped when we call :meth:`.AutomapBase.prepare`. The +:meth:`.AutomapBase.prepare` method will make use of the classes we've +established based on the table name we use. If our schema contains tables +``user`` and ``address``, we can define one or both of the classes to be used:: + + from sqlalchemy.ext.automap import automap_base + from sqlalchemy import create_engine + + # automap base + Base = automap_base() + + # pre-declare User for the 'user' table + class User(Base): + __tablename__ = 'user' + + # override schema elements like Columns + user_name = Column('name', String) + + # override relationships too, if desired. + # we must use the same name that automap would use for the + # relationship, and also must refer to the class name that automap will + # generate for "address" + address_collection = relationship("address", collection_class=set) + + # reflect + engine = create_engine("sqlite:///mydatabase.db") + Base.prepare(engine, reflect=True) + + # we still have Address generated from the tablename "address", + # but User is the same as Base.classes.User now + + Address = Base.classes.address + + u1 = session.query(User).first() + print (u1.address_collection) + + # the backref is still there: + a1 = session.query(Address).first() + print (a1.user) + +Above, one of the more intricate details is that we illustrated overriding +one of the :func:`.relationship` objects that automap would have created. +To do this, we needed to make sure the names match up with what automap +would normally generate, in that the relationship name would be +``User.address_collection`` and the name of the class referred to, from +automap's perspective, is called ``address``, even though we are referring to +it as ``Address`` within our usage of this class. + +Overriding Naming Schemes +========================= + +:mod:`.sqlalchemy.ext.automap` is tasked with producing mapped classes and +relationship names based on a schema, which means it has decision points in how +these names are determined. These three decision points are provided using +functions which can be passed to the :meth:`.AutomapBase.prepare` method, and +are known as :func:`.classname_for_table`, +:func:`.name_for_scalar_relationship`, +and :func:`.name_for_collection_relationship`. Any or all of these +functions are provided as in the example below, where we use a "camel case" +scheme for class names and a "pluralizer" for collection names using the +`Inflect `_ package:: + + import re + import inflect + + def camelize_classname(base, tablename, table): + "Produce a 'camelized' class name, e.g. " + "'words_and_underscores' -> 'WordsAndUnderscores'" + + return str(tablename[0].upper() + \\ + re.sub(r'_([a-z])', lambda m: m.group(1).upper(), tablename[1:])) + + _pluralizer = inflect.engine() + def pluralize_collection(base, local_cls, referred_cls, constraint): + "Produce an 'uncamelized', 'pluralized' class name, e.g. " + "'SomeTerm' -> 'some_terms'" + + referred_name = referred_cls.__name__ + uncamelized = re.sub(r'[A-Z]', + lambda m: "_%s" % m.group(0).lower(), + referred_name)[1:] + pluralized = _pluralizer.plural(uncamelized) + return pluralized + + from sqlalchemy.ext.automap import automap_base + + Base = automap_base() + + engine = create_engine("sqlite:///mydatabase.db") + + Base.prepare(engine, reflect=True, + classname_for_table=camelize_classname, + name_for_collection_relationship=pluralize_collection + ) + +From the above mapping, we would now have classes ``User`` and ``Address``, +where the collection from ``User`` to ``Address`` is called +``User.addresses``:: + + User, Address = Base.classes.User, Base.classes.Address + + u1 = User(addresses=[Address(email="foo@bar.com")]) + +Relationship Detection +====================== + +The vast majority of what automap accomplishes is the generation of +:func:`.relationship` structures based on foreign keys. The mechanism +by which this works for many-to-one and one-to-many relationships is as +follows: + +1. A given :class:`.Table`, known to be mapped to a particular class, + is examined for :class:`.ForeignKeyConstraint` objects. + +2. From each :class:`.ForeignKeyConstraint`, the remote :class:`.Table` + object present is matched up to the class to which it is to be mapped, + if any, else it is skipped. + +3. As the :class:`.ForeignKeyConstraint` we are examining corresponds to a + reference from the immediate mapped class, the relationship will be set up + as a many-to-one referring to the referred class; a corresponding + one-to-many backref will be created on the referred class referring + to this class. + +4. If any of the columns that are part of the :class:`.ForeignKeyConstraint` + are not nullable (e.g. ``nullable=False``), a + :paramref:`~.relationship.cascade` keyword argument + of ``all, delete-orphan`` will be added to the keyword arguments to + be passed to the relationship or backref. If the + :class:`.ForeignKeyConstraint` reports that + :paramref:`.ForeignKeyConstraint.ondelete` + is set to ``CASCADE`` for a not null or ``SET NULL`` for a nullable + set of columns, the option :paramref:`~.relationship.passive_deletes` + flag is set to ``True`` in the set of relationship keyword arguments. + Note that not all backends support reflection of ON DELETE. + + .. versionadded:: 1.0.0 - automap will detect non-nullable foreign key + constraints when producing a one-to-many relationship and establish + a default cascade of ``all, delete-orphan`` if so; additionally, + if the constraint specifies :paramref:`.ForeignKeyConstraint.ondelete` + of ``CASCADE`` for non-nullable or ``SET NULL`` for nullable columns, + the ``passive_deletes=True`` option is also added. + +5. The names of the relationships are determined using the + :paramref:`.AutomapBase.prepare.name_for_scalar_relationship` and + :paramref:`.AutomapBase.prepare.name_for_collection_relationship` + callable functions. It is important to note that the default relationship + naming derives the name from the **the actual class name**. If you've + given a particular class an explicit name by declaring it, or specified an + alternate class naming scheme, that's the name from which the relationship + name will be derived. + +6. The classes are inspected for an existing mapped property matching these + names. If one is detected on one side, but none on the other side, + :class:`.AutomapBase` attempts to create a relationship on the missing side, + then uses the :paramref:`.relationship.back_populates` parameter in order to + point the new relationship to the other side. + +7. In the usual case where no relationship is on either side, + :meth:`.AutomapBase.prepare` produces a :func:`.relationship` on the + "many-to-one" side and matches it to the other using the + :paramref:`.relationship.backref` parameter. + +8. Production of the :func:`.relationship` and optionally the :func:`.backref` + is handed off to the :paramref:`.AutomapBase.prepare.generate_relationship` + function, which can be supplied by the end-user in order to augment + the arguments passed to :func:`.relationship` or :func:`.backref` or to + make use of custom implementations of these functions. + +Custom Relationship Arguments +----------------------------- + +The :paramref:`.AutomapBase.prepare.generate_relationship` hook can be used +to add parameters to relationships. For most cases, we can make use of the +existing :func:`.automap.generate_relationship` function to return +the object, after augmenting the given keyword dictionary with our own +arguments. + +Below is an illustration of how to send +:paramref:`.relationship.cascade` and +:paramref:`.relationship.passive_deletes` +options along to all one-to-many relationships:: + + from sqlalchemy.ext.automap import generate_relationship + + def _gen_relationship(base, direction, return_fn, + attrname, local_cls, referred_cls, **kw): + if direction is interfaces.ONETOMANY: + kw['cascade'] = 'all, delete-orphan' + kw['passive_deletes'] = True + # make use of the built-in function to actually return + # the result. + return generate_relationship(base, direction, return_fn, + attrname, local_cls, referred_cls, **kw) + + from sqlalchemy.ext.automap import automap_base + from sqlalchemy import create_engine + + # automap base + Base = automap_base() + + engine = create_engine("sqlite:///mydatabase.db") + Base.prepare(engine, reflect=True, + generate_relationship=_gen_relationship) + +Many-to-Many relationships +-------------------------- + +:mod:`.sqlalchemy.ext.automap` will generate many-to-many relationships, e.g. +those which contain a ``secondary`` argument. The process for producing these +is as follows: + +1. A given :class:`.Table` is examined for :class:`.ForeignKeyConstraint` + objects, before any mapped class has been assigned to it. + +2. If the table contains two and exactly two :class:`.ForeignKeyConstraint` + objects, and all columns within this table are members of these two + :class:`.ForeignKeyConstraint` objects, the table is assumed to be a + "secondary" table, and will **not be mapped directly**. + +3. The two (or one, for self-referential) external tables to which the + :class:`.Table` refers to are matched to the classes to which they will be + mapped, if any. + +4. If mapped classes for both sides are located, a many-to-many bi-directional + :func:`.relationship` / :func:`.backref` pair is created between the two + classes. + +5. The override logic for many-to-many works the same as that of one-to-many/ + many-to-one; the :func:`.generate_relationship` function is called upon + to generate the strucures and existing attributes will be maintained. + +Relationships with Inheritance +------------------------------ + +:mod:`.sqlalchemy.ext.automap` will not generate any relationships between +two classes that are in an inheritance relationship. That is, with two +classes given as follows:: + + class Employee(Base): + __tablename__ = 'employee' + id = Column(Integer, primary_key=True) + type = Column(String(50)) + __mapper_args__ = { + 'polymorphic_identity':'employee', 'polymorphic_on': type + } + + class Engineer(Employee): + __tablename__ = 'engineer' + id = Column(Integer, ForeignKey('employee.id'), primary_key=True) + __mapper_args__ = { + 'polymorphic_identity':'engineer', + } + +The foreign key from ``Engineer`` to ``Employee`` is used not for a +relationship, but to establish joined inheritance between the two classes. + +Note that this means automap will not generate *any* relationships +for foreign keys that link from a subclass to a superclass. If a mapping +has actual relationships from subclass to superclass as well, those +need to be explicit. Below, as we have two separate foreign keys +from ``Engineer`` to ``Employee``, we need to set up both the relationship +we want as well as the ``inherit_condition``, as these are not things +SQLAlchemy can guess:: + + class Employee(Base): + __tablename__ = 'employee' + id = Column(Integer, primary_key=True) + type = Column(String(50)) + + __mapper_args__ = { + 'polymorphic_identity':'employee', 'polymorphic_on':type + } + + class Engineer(Employee): + __tablename__ = 'engineer' + id = Column(Integer, ForeignKey('employee.id'), primary_key=True) + favorite_employee_id = Column(Integer, ForeignKey('employee.id')) + + favorite_employee = relationship(Employee, + foreign_keys=favorite_employee_id) + + __mapper_args__ = { + 'polymorphic_identity':'engineer', + 'inherit_condition': id == Employee.id + } + +Handling Simple Naming Conflicts +-------------------------------- + +In the case of naming conflicts during mapping, override any of +:func:`.classname_for_table`, :func:`.name_for_scalar_relationship`, +and :func:`.name_for_collection_relationship` as needed. For example, if +automap is attempting to name a many-to-one relationship the same as an +existing column, an alternate convention can be conditionally selected. Given +a schema: + +.. sourcecode:: sql + + CREATE TABLE table_a ( + id INTEGER PRIMARY KEY + ); + + CREATE TABLE table_b ( + id INTEGER PRIMARY KEY, + table_a INTEGER, + FOREIGN KEY(table_a) REFERENCES table_a(id) + ); + +The above schema will first automap the ``table_a`` table as a class named +``table_a``; it will then automap a relationship onto the class for ``table_b`` +with the same name as this related class, e.g. ``table_a``. This +relationship name conflicts with the mapping column ``table_b.table_a``, +and will emit an error on mapping. + +We can resolve this conflict by using an underscore as follows:: + + def name_for_scalar_relationship(base, local_cls, referred_cls, constraint): + name = referred_cls.__name__.lower() + local_table = local_cls.__table__ + if name in local_table.columns: + newname = name + "_" + warnings.warn( + "Already detected name %s present. using %s" % + (name, newname)) + return newname + return name + + + Base.prepare(engine, reflect=True, + name_for_scalar_relationship=name_for_scalar_relationship) + +Alternatively, we can change the name on the column side. The columns +that are mapped can be modified using the technique described at +:ref:`mapper_column_distinct_names`, by assigning the column explicitly +to a new name:: + + Base = automap_base() + + class TableB(Base): + __tablename__ = 'table_b' + _table_a = Column('table_a', ForeignKey('table_a.id')) + + Base.prepare(engine, reflect=True) + + +Using Automap with Explicit Declarations +======================================== + +As noted previously, automap has no dependency on reflection, and can make +use of any collection of :class:`.Table` objects within a :class:`.MetaData` +collection. From this, it follows that automap can also be used +generate missing relationships given an otherwise complete model that fully +defines table metadata:: + + from sqlalchemy.ext.automap import automap_base + from sqlalchemy import Column, Integer, String, ForeignKey + + Base = automap_base() + + class User(Base): + __tablename__ = 'user' + + id = Column(Integer, primary_key=True) + name = Column(String) + + class Address(Base): + __tablename__ = 'address' + + id = Column(Integer, primary_key=True) + email = Column(String) + user_id = Column(ForeignKey('user.id')) + + # produce relationships + Base.prepare() + + # mapping is complete, with "address_collection" and + # "user" relationships + a1 = Address(email='u1') + a2 = Address(email='u2') + u1 = User(address_collection=[a1, a2]) + assert a1.user is u1 + +Above, given mostly complete ``User`` and ``Address`` mappings, the +:class:`.ForeignKey` which we defined on ``Address.user_id`` allowed a +bidirectional relationship pair ``Address.user`` and +``User.address_collection`` to be generated on the mapped classes. + +Note that when subclassing :class:`.AutomapBase`, +the :meth:`.AutomapBase.prepare` method is required; if not called, the classes +we've declared are in an un-mapped state. + + +""" +from .declarative import declarative_base as _declarative_base +from .declarative.base import _DeferredMapperConfig +from ..sql import and_ +from ..schema import ForeignKeyConstraint +from ..orm import relationship, backref, interfaces +from .. import util + + +def classname_for_table(base, tablename, table): + """Return the class name that should be used, given the name + of a table. + + The default implementation is:: + + return str(tablename) + + Alternate implementations can be specified using the + :paramref:`.AutomapBase.prepare.classname_for_table` + parameter. + + :param base: the :class:`.AutomapBase` class doing the prepare. + + :param tablename: string name of the :class:`.Table`. + + :param table: the :class:`.Table` object itself. + + :return: a string class name. + + .. note:: + + In Python 2, the string used for the class name **must** be a + non-Unicode object, e.g. a ``str()`` object. The ``.name`` attribute + of :class:`.Table` is typically a Python unicode subclass, so the + ``str()`` function should be applied to this name, after accounting for + any non-ASCII characters. + + """ + return str(tablename) + + +def name_for_scalar_relationship(base, local_cls, referred_cls, constraint): + """Return the attribute name that should be used to refer from one + class to another, for a scalar object reference. + + The default implementation is:: + + return referred_cls.__name__.lower() + + Alternate implementations can be specified using the + :paramref:`.AutomapBase.prepare.name_for_scalar_relationship` + parameter. + + :param base: the :class:`.AutomapBase` class doing the prepare. + + :param local_cls: the class to be mapped on the local side. + + :param referred_cls: the class to be mapped on the referring side. + + :param constraint: the :class:`.ForeignKeyConstraint` that is being + inspected to produce this relationship. + + """ + return referred_cls.__name__.lower() + + +def name_for_collection_relationship( + base, local_cls, referred_cls, constraint): + """Return the attribute name that should be used to refer from one + class to another, for a collection reference. + + The default implementation is:: + + return referred_cls.__name__.lower() + "_collection" + + Alternate implementations + can be specified using the + :paramref:`.AutomapBase.prepare.name_for_collection_relationship` + parameter. + + :param base: the :class:`.AutomapBase` class doing the prepare. + + :param local_cls: the class to be mapped on the local side. + + :param referred_cls: the class to be mapped on the referring side. + + :param constraint: the :class:`.ForeignKeyConstraint` that is being + inspected to produce this relationship. + + """ + return referred_cls.__name__.lower() + "_collection" + + +def generate_relationship( + base, direction, return_fn, attrname, local_cls, referred_cls, **kw): + """Generate a :func:`.relationship` or :func:`.backref` on behalf of two + mapped classes. + + An alternate implementation of this function can be specified using the + :paramref:`.AutomapBase.prepare.generate_relationship` parameter. + + The default implementation of this function is as follows:: + + if return_fn is backref: + return return_fn(attrname, **kw) + elif return_fn is relationship: + return return_fn(referred_cls, **kw) + else: + raise TypeError("Unknown relationship function: %s" % return_fn) + + :param base: the :class:`.AutomapBase` class doing the prepare. + + :param direction: indicate the "direction" of the relationship; this will + be one of :data:`.ONETOMANY`, :data:`.MANYTOONE`, :data:`.MANYTOMANY`. + + :param return_fn: the function that is used by default to create the + relationship. This will be either :func:`.relationship` or + :func:`.backref`. The :func:`.backref` function's result will be used to + produce a new :func:`.relationship` in a second step, so it is critical + that user-defined implementations correctly differentiate between the two + functions, if a custom relationship function is being used. + + :attrname: the attribute name to which this relationship is being assigned. + If the value of :paramref:`.generate_relationship.return_fn` is the + :func:`.backref` function, then this name is the name that is being + assigned to the backref. + + :param local_cls: the "local" class to which this relationship or backref + will be locally present. + + :param referred_cls: the "referred" class to which the relationship or + backref refers to. + + :param \**kw: all additional keyword arguments are passed along to the + function. + + :return: a :func:`.relationship` or :func:`.backref` construct, as dictated + by the :paramref:`.generate_relationship.return_fn` parameter. + + """ + if return_fn is backref: + return return_fn(attrname, **kw) + elif return_fn is relationship: + return return_fn(referred_cls, **kw) + else: + raise TypeError("Unknown relationship function: %s" % return_fn) + + +class AutomapBase(object): + """Base class for an "automap" schema. + + The :class:`.AutomapBase` class can be compared to the "declarative base" + class that is produced by the :func:`.declarative.declarative_base` + function. In practice, the :class:`.AutomapBase` class is always used + as a mixin along with an actual declarative base. + + A new subclassable :class:`.AutomapBase` is typically instantated + using the :func:`.automap_base` function. + + .. seealso:: + + :ref:`automap_toplevel` + + """ + __abstract__ = True + + classes = None + """An instance of :class:`.util.Properties` containing classes. + + This object behaves much like the ``.c`` collection on a table. Classes + are present under the name they were given, e.g.:: + + Base = automap_base() + Base.prepare(engine=some_engine, reflect=True) + + User, Address = Base.classes.User, Base.classes.Address + + """ + + @classmethod + def prepare( + cls, + engine=None, + reflect=False, + classname_for_table=classname_for_table, + collection_class=list, + name_for_scalar_relationship=name_for_scalar_relationship, + name_for_collection_relationship=name_for_collection_relationship, + generate_relationship=generate_relationship): + """Extract mapped classes and relationships from the :class:`.MetaData` and + perform mappings. + + :param engine: an :class:`.Engine` or :class:`.Connection` with which + to perform schema reflection, if specified. + If the :paramref:`.AutomapBase.prepare.reflect` argument is False, + this object is not used. + + :param reflect: if True, the :meth:`.MetaData.reflect` method is called + on the :class:`.MetaData` associated with this :class:`.AutomapBase`. + The :class:`.Engine` passed via + :paramref:`.AutomapBase.prepare.engine` will be used to perform the + reflection if present; else, the :class:`.MetaData` should already be + bound to some engine else the operation will fail. + + :param classname_for_table: callable function which will be used to + produce new class names, given a table name. Defaults to + :func:`.classname_for_table`. + + :param name_for_scalar_relationship: callable function which will be + used to produce relationship names for scalar relationships. Defaults + to :func:`.name_for_scalar_relationship`. + + :param name_for_collection_relationship: callable function which will + be used to produce relationship names for collection-oriented + relationships. Defaults to :func:`.name_for_collection_relationship`. + + :param generate_relationship: callable function which will be used to + actually generate :func:`.relationship` and :func:`.backref` + constructs. Defaults to :func:`.generate_relationship`. + + :param collection_class: the Python collection class that will be used + when a new :func:`.relationship` object is created that represents a + collection. Defaults to ``list``. + + """ + if reflect: + cls.metadata.reflect( + engine, + extend_existing=True, + autoload_replace=False + ) + + table_to_map_config = dict( + (m.local_table, m) + for m in _DeferredMapperConfig. + classes_for_base(cls, sort=False) + ) + + many_to_many = [] + + for table in cls.metadata.tables.values(): + lcl_m2m, rem_m2m, m2m_const = _is_many_to_many(cls, table) + if lcl_m2m is not None: + many_to_many.append((lcl_m2m, rem_m2m, m2m_const, table)) + elif not table.primary_key: + continue + elif table not in table_to_map_config: + mapped_cls = type( + classname_for_table(cls, table.name, table), + (cls, ), + {"__table__": table} + ) + map_config = _DeferredMapperConfig.config_for_cls(mapped_cls) + cls.classes[map_config.cls.__name__] = mapped_cls + table_to_map_config[table] = map_config + + for map_config in table_to_map_config.values(): + _relationships_for_fks(cls, + map_config, + table_to_map_config, + collection_class, + name_for_scalar_relationship, + name_for_collection_relationship, + generate_relationship) + + for lcl_m2m, rem_m2m, m2m_const, table in many_to_many: + _m2m_relationship(cls, lcl_m2m, rem_m2m, m2m_const, table, + table_to_map_config, + collection_class, + name_for_scalar_relationship, + name_for_collection_relationship, + generate_relationship) + + for map_config in _DeferredMapperConfig.classes_for_base(cls): + map_config.map() + + _sa_decl_prepare = True + """Indicate that the mapping of classes should be deferred. + + The presence of this attribute name indicates to declarative + that the call to mapper() should not occur immediately; instead, + information about the table and attributes to be mapped are gathered + into an internal structure called _DeferredMapperConfig. These + objects can be collected later using classes_for_base(), additional + mapping decisions can be made, and then the map() method will actually + apply the mapping. + + The only real reason this deferral of the whole + thing is needed is to support primary key columns that aren't reflected + yet when the class is declared; everything else can theoretically be + added to the mapper later. However, the _DeferredMapperConfig is a + nice interface in any case which exists at that not usually exposed point + at which declarative has the class and the Table but hasn't called + mapper() yet. + + """ + + +def automap_base(declarative_base=None, **kw): + """Produce a declarative automap base. + + This function produces a new base class that is a product of the + :class:`.AutomapBase` class as well a declarative base produced by + :func:`.declarative.declarative_base`. + + All parameters other than ``declarative_base`` are keyword arguments + that are passed directly to the :func:`.declarative.declarative_base` + function. + + :param declarative_base: an existing class produced by + :func:`.declarative.declarative_base`. When this is passed, the function + no longer invokes :func:`.declarative.declarative_base` itself, and all + other keyword arguments are ignored. + + :param \**kw: keyword arguments are passed along to + :func:`.declarative.declarative_base`. + + """ + if declarative_base is None: + Base = _declarative_base(**kw) + else: + Base = declarative_base + + return type( + Base.__name__, + (AutomapBase, Base,), + {"__abstract__": True, "classes": util.Properties({})} + ) + + +def _is_many_to_many(automap_base, table): + fk_constraints = [const for const in table.constraints + if isinstance(const, ForeignKeyConstraint)] + if len(fk_constraints) != 2: + return None, None, None + + cols = sum( + [[fk.parent for fk in fk_constraint.elements] + for fk_constraint in fk_constraints], []) + + if set(cols) != set(table.c): + return None, None, None + + return ( + fk_constraints[0].elements[0].column.table, + fk_constraints[1].elements[0].column.table, + fk_constraints + ) + + +def _relationships_for_fks(automap_base, map_config, table_to_map_config, + collection_class, + name_for_scalar_relationship, + name_for_collection_relationship, + generate_relationship): + local_table = map_config.local_table + local_cls = map_config.cls + + if local_table is None: + return + for constraint in local_table.constraints: + if isinstance(constraint, ForeignKeyConstraint): + fks = constraint.elements + referred_table = fks[0].column.table + referred_cfg = table_to_map_config.get(referred_table, None) + if referred_cfg is None: + continue + referred_cls = referred_cfg.cls + + if local_cls is not referred_cls and issubclass( + local_cls, referred_cls): + continue + + relationship_name = name_for_scalar_relationship( + automap_base, + local_cls, + referred_cls, constraint) + backref_name = name_for_collection_relationship( + automap_base, + referred_cls, + local_cls, + constraint + ) + + o2m_kws = {} + nullable = False not in set([fk.parent.nullable for fk in fks]) + if not nullable: + o2m_kws['cascade'] = "all, delete-orphan" + + if constraint.ondelete and \ + constraint.ondelete.lower() == "cascade": + o2m_kws['passive_deletes'] = True + else: + if constraint.ondelete and \ + constraint.ondelete.lower() == "set null": + o2m_kws['passive_deletes'] = True + + create_backref = backref_name not in referred_cfg.properties + + if relationship_name not in map_config.properties: + if create_backref: + backref_obj = generate_relationship( + automap_base, + interfaces.ONETOMANY, backref, + backref_name, referred_cls, local_cls, + collection_class=collection_class, + **o2m_kws) + else: + backref_obj = None + rel = generate_relationship(automap_base, + interfaces.MANYTOONE, + relationship, + relationship_name, + local_cls, referred_cls, + foreign_keys=[ + fk.parent + for fk in constraint.elements], + backref=backref_obj, + remote_side=[ + fk.column + for fk in constraint.elements] + ) + if rel is not None: + map_config.properties[relationship_name] = rel + if not create_backref: + referred_cfg.properties[ + backref_name].back_populates = relationship_name + elif create_backref: + rel = generate_relationship(automap_base, + interfaces.ONETOMANY, + relationship, + backref_name, + referred_cls, local_cls, + foreign_keys=[ + fk.parent + for fk in constraint.elements], + back_populates=relationship_name, + collection_class=collection_class, + **o2m_kws) + if rel is not None: + referred_cfg.properties[backref_name] = rel + map_config.properties[ + relationship_name].back_populates = backref_name + + +def _m2m_relationship(automap_base, lcl_m2m, rem_m2m, m2m_const, table, + table_to_map_config, + collection_class, + name_for_scalar_relationship, + name_for_collection_relationship, + generate_relationship): + + map_config = table_to_map_config.get(lcl_m2m, None) + referred_cfg = table_to_map_config.get(rem_m2m, None) + if map_config is None or referred_cfg is None: + return + + local_cls = map_config.cls + referred_cls = referred_cfg.cls + + relationship_name = name_for_collection_relationship( + automap_base, + local_cls, + referred_cls, m2m_const[0]) + backref_name = name_for_collection_relationship( + automap_base, + referred_cls, + local_cls, + m2m_const[1] + ) + + create_backref = backref_name not in referred_cfg.properties + + if relationship_name not in map_config.properties: + if create_backref: + backref_obj = generate_relationship( + automap_base, + interfaces.MANYTOMANY, + backref, + backref_name, + referred_cls, local_cls, + collection_class=collection_class + ) + else: + backref_obj = None + rel = generate_relationship(automap_base, + interfaces.MANYTOMANY, + relationship, + relationship_name, + local_cls, referred_cls, + secondary=table, + primaryjoin=and_( + fk.column == fk.parent + for fk in m2m_const[0].elements), + secondaryjoin=and_( + fk.column == fk.parent + for fk in m2m_const[1].elements), + backref=backref_obj, + collection_class=collection_class + ) + if rel is not None: + map_config.properties[relationship_name] = rel + + if not create_backref: + referred_cfg.properties[ + backref_name].back_populates = relationship_name + elif create_backref: + rel = generate_relationship(automap_base, + interfaces.MANYTOMANY, + relationship, + backref_name, + referred_cls, local_cls, + secondary=table, + primaryjoin=and_( + fk.column == fk.parent + for fk in m2m_const[1].elements), + secondaryjoin=and_( + fk.column == fk.parent + for fk in m2m_const[0].elements), + back_populates=relationship_name, + collection_class=collection_class) + if rel is not None: + referred_cfg.properties[backref_name] = rel + map_config.properties[ + relationship_name].back_populates = backref_name diff --git a/lib/python3.4/site-packages/sqlalchemy/ext/baked.py b/lib/python3.4/site-packages/sqlalchemy/ext/baked.py new file mode 100644 index 0000000..2504be9 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/ext/baked.py @@ -0,0 +1,523 @@ +# sqlalchemy/ext/baked.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +"""Baked query extension. + +Provides a creational pattern for the :class:`.query.Query` object which +allows the fully constructed object, Core select statement, and string +compiled result to be fully cached. + + +""" + +from ..orm.query import Query +from ..orm import strategies, attributes, properties, \ + strategy_options, util as orm_util, interfaces +from .. import log as sqla_log +from ..sql import util as sql_util +from ..orm import exc as orm_exc +from .. import exc as sa_exc +from .. import util + +import copy +import logging + +log = logging.getLogger(__name__) + + +class BakedQuery(object): + """A builder object for :class:`.query.Query` objects.""" + + __slots__ = 'steps', '_bakery', '_cache_key', '_spoiled' + + def __init__(self, bakery, initial_fn, args=()): + self._cache_key = () + self._update_cache_key(initial_fn, args) + self.steps = [initial_fn] + self._spoiled = False + self._bakery = bakery + + @classmethod + def bakery(cls, size=200): + """Construct a new bakery.""" + + _bakery = util.LRUCache(size) + + def call(initial_fn, *args): + return cls(_bakery, initial_fn, args) + + return call + + def _clone(self): + b1 = BakedQuery.__new__(BakedQuery) + b1._cache_key = self._cache_key + b1.steps = list(self.steps) + b1._bakery = self._bakery + b1._spoiled = self._spoiled + return b1 + + def _update_cache_key(self, fn, args=()): + self._cache_key += (fn.__code__,) + args + + def __iadd__(self, other): + if isinstance(other, tuple): + self.add_criteria(*other) + else: + self.add_criteria(other) + return self + + def __add__(self, other): + if isinstance(other, tuple): + return self.with_criteria(*other) + else: + return self.with_criteria(other) + + def add_criteria(self, fn, *args): + """Add a criteria function to this :class:`.BakedQuery`. + + This is equivalent to using the ``+=`` operator to + modify a :class:`.BakedQuery` in-place. + + """ + self._update_cache_key(fn, args) + self.steps.append(fn) + return self + + def with_criteria(self, fn, *args): + """Add a criteria function to a :class:`.BakedQuery` cloned from this one. + + This is equivalent to using the ``+`` operator to + produce a new :class:`.BakedQuery` with modifications. + + """ + return self._clone().add_criteria(fn, *args) + + def for_session(self, session): + """Return a :class:`.Result` object for this :class:`.BakedQuery`. + + This is equivalent to calling the :class:`.BakedQuery` as a + Python callable, e.g. ``result = my_baked_query(session)``. + + """ + return Result(self, session) + + def __call__(self, session): + return self.for_session(session) + + def spoil(self, full=False): + """Cancel any query caching that will occur on this BakedQuery object. + + The BakedQuery can continue to be used normally, however additional + creational functions will not be cached; they will be called + on every invocation. + + This is to support the case where a particular step in constructing + a baked query disqualifies the query from being cacheable, such + as a variant that relies upon some uncacheable value. + + :param full: if False, only functions added to this + :class:`.BakedQuery` object subsequent to the spoil step will be + non-cached; the state of the :class:`.BakedQuery` up until + this point will be pulled from the cache. If True, then the + entire :class:`.Query` object is built from scratch each + time, with all creational functions being called on each + invocation. + + """ + if not full: + _spoil_point = self._clone() + _spoil_point._cache_key += ('_query_only', ) + self.steps = [_spoil_point._retrieve_baked_query] + self._spoiled = True + return self + + def _retrieve_baked_query(self, session): + query = self._bakery.get(self._cache_key, None) + if query is None: + query = self._as_query(session) + self._bakery[self._cache_key] = query.with_session(None) + return query.with_session(session) + + def _bake(self, session): + query = self._as_query(session) + + context = query._compile_context() + self._bake_subquery_loaders(session, context) + context.session = None + context.query = query = context.query.with_session(None) + query._execution_options = query._execution_options.union( + {"compiled_cache": self._bakery} + ) + # we'll be holding onto the query for some of its state, + # so delete some compilation-use-only attributes that can take up + # space + for attr in ( + '_correlate', '_from_obj', '_mapper_adapter_map', + '_joinpath', '_joinpoint'): + query.__dict__.pop(attr, None) + self._bakery[self._cache_key] = context + return context + + def _as_query(self, session): + query = self.steps[0](session) + + for step in self.steps[1:]: + query = step(query) + return query + + def _bake_subquery_loaders(self, session, context): + """convert subquery eager loaders in the cache into baked queries. + + For subquery eager loading to work, all we need here is that the + Query point to the correct session when it is run. However, since + we are "baking" anyway, we may as well also turn the query into + a "baked" query so that we save on performance too. + + """ + context.attributes['baked_queries'] = baked_queries = [] + for k, v in list(context.attributes.items()): + if isinstance(v, Query): + if 'subquery' in k: + bk = BakedQuery(self._bakery, lambda *args: v) + bk._cache_key = self._cache_key + k + bk._bake(session) + baked_queries.append((k, bk._cache_key, v)) + del context.attributes[k] + + def _unbake_subquery_loaders(self, session, context, params): + """Retrieve subquery eager loaders stored by _bake_subquery_loaders + and turn them back into Result objects that will iterate just + like a Query object. + + """ + for k, cache_key, query in context.attributes["baked_queries"]: + bk = BakedQuery(self._bakery, lambda sess: query.with_session(sess)) + bk._cache_key = cache_key + context.attributes[k] = bk.for_session(session).params(**params) + + +class Result(object): + """Invokes a :class:`.BakedQuery` against a :class:`.Session`. + + The :class:`.Result` object is where the actual :class:`.query.Query` + object gets created, or retrieved from the cache, + against a target :class:`.Session`, and is then invoked for results. + + """ + __slots__ = 'bq', 'session', '_params' + + def __init__(self, bq, session): + self.bq = bq + self.session = session + self._params = {} + + def params(self, *args, **kw): + """Specify parameters to be replaced into the string SQL statement.""" + + if len(args) == 1: + kw.update(args[0]) + elif len(args) > 0: + raise sa_exc.ArgumentError( + "params() takes zero or one positional argument, " + "which is a dictionary.") + self._params.update(kw) + return self + + def _as_query(self): + return self.bq._as_query(self.session).params(self._params) + + def __str__(self): + return str(self._as_query()) + + def __iter__(self): + bq = self.bq + if bq._spoiled: + return iter(self._as_query()) + + baked_context = bq._bakery.get(bq._cache_key, None) + if baked_context is None: + baked_context = bq._bake(self.session) + + context = copy.copy(baked_context) + context.session = self.session + context.attributes = context.attributes.copy() + + bq._unbake_subquery_loaders(self.session, context, self._params) + + context.statement.use_labels = True + if context.autoflush and not context.populate_existing: + self.session._autoflush() + return context.query.params(self._params).\ + with_session(self.session)._execute_and_instances(context) + + def first(self): + """Return the first row. + + Equivalent to :meth:`.Query.first`. + + """ + bq = self.bq.with_criteria(lambda q: q.slice(0, 1)) + ret = list(bq.for_session(self.session).params(self._params)) + if len(ret) > 0: + return ret[0] + else: + return None + + def one(self): + """Return exactly one result or raise an exception. + + Equivalent to :meth:`.Query.one`. + + """ + ret = list(self) + + l = len(ret) + if l == 1: + return ret[0] + elif l == 0: + raise orm_exc.NoResultFound("No row was found for one()") + else: + raise orm_exc.MultipleResultsFound( + "Multiple rows were found for one()") + + def one_or_none(self): + """Return one or zero results, or raise an exception for multiple + rows. + + Equivalent to :meth:`.Query.one_or_none`. + + .. versionadded:: 1.0.9 + + """ + ret = list(self) + + l = len(ret) + if l == 1: + return ret[0] + elif l == 0: + return None + else: + raise orm_exc.MultipleResultsFound( + "Multiple rows were found for one_or_none()") + + def all(self): + """Return all rows. + + Equivalent to :meth:`.Query.all`. + + """ + return list(self) + + def get(self, ident): + """Retrieve an object based on identity. + + Equivalent to :meth:`.Query.get`. + + """ + + query = self.bq.steps[0](self.session) + return query._get_impl(ident, self._load_on_ident) + + def _load_on_ident(self, query, key): + """Load the given identity key from the database.""" + + ident = key[1] + + mapper = query._mapper_zero() + + _get_clause, _get_params = mapper._get_clause + + def setup(query): + _lcl_get_clause = _get_clause + q = query._clone() + q._get_condition() + q._order_by = None + + # None present in ident - turn those comparisons + # into "IS NULL" + if None in ident: + nones = set([ + _get_params[col].key for col, value in + zip(mapper.primary_key, ident) if value is None + ]) + _lcl_get_clause = sql_util.adapt_criterion_to_null( + _lcl_get_clause, nones) + + _lcl_get_clause = q._adapt_clause(_lcl_get_clause, True, False) + q._criterion = _lcl_get_clause + return q + + # cache the query against a key that includes + # which positions in the primary key are NULL + # (remember, we can map to an OUTER JOIN) + bq = self.bq + + # add the clause we got from mapper._get_clause to the cache + # key so that if a race causes multiple calls to _get_clause, + # we've cached on ours + bq = bq._clone() + bq._cache_key += (_get_clause, ) + + bq = bq.with_criteria(setup, tuple(elem is None for elem in ident)) + + params = dict([ + (_get_params[primary_key].key, id_val) + for id_val, primary_key in zip(ident, mapper.primary_key) + ]) + + result = list(bq.for_session(self.session).params(**params)) + l = len(result) + if l > 1: + raise orm_exc.MultipleResultsFound() + elif l: + return result[0] + else: + return None + + +def bake_lazy_loaders(): + """Enable the use of baked queries for all lazyloaders systemwide. + + This operation should be safe for all lazy loaders, and will reduce + Python overhead for these operations. + + """ + BakedLazyLoader._strategy_keys[:] = [] + + properties.RelationshipProperty.strategy_for( + lazy="select")(BakedLazyLoader) + properties.RelationshipProperty.strategy_for( + lazy=True)(BakedLazyLoader) + properties.RelationshipProperty.strategy_for( + lazy="baked_select")(BakedLazyLoader) + + strategies.LazyLoader._strategy_keys[:] = BakedLazyLoader._strategy_keys[:] + + +def unbake_lazy_loaders(): + """Disable the use of baked queries for all lazyloaders systemwide. + + This operation reverts the changes produced by :func:`.bake_lazy_loaders`. + + """ + strategies.LazyLoader._strategy_keys[:] = [] + BakedLazyLoader._strategy_keys[:] = [] + + properties.RelationshipProperty.strategy_for( + lazy="select")(strategies.LazyLoader) + properties.RelationshipProperty.strategy_for( + lazy=True)(strategies.LazyLoader) + properties.RelationshipProperty.strategy_for( + lazy="baked_select")(BakedLazyLoader) + assert strategies.LazyLoader._strategy_keys + + +@sqla_log.class_logger +@properties.RelationshipProperty.strategy_for(lazy="baked_select") +class BakedLazyLoader(strategies.LazyLoader): + + def _emit_lazyload(self, session, state, ident_key, passive): + q = BakedQuery( + self.mapper._compiled_cache, + lambda session: session.query(self.mapper)) + q.add_criteria( + lambda q: q._adapt_all_clauses()._with_invoke_all_eagers(False), + self.parent_property) + + if not self.parent_property.bake_queries: + q.spoil(full=True) + + if self.parent_property.secondary is not None: + q.add_criteria( + lambda q: + q.select_from(self.mapper, self.parent_property.secondary)) + + pending = not state.key + + # don't autoflush on pending + if pending or passive & attributes.NO_AUTOFLUSH: + q.add_criteria(lambda q: q.autoflush(False)) + + if state.load_path: + q.spoil() + q.add_criteria( + lambda q: + q._with_current_path(state.load_path[self.parent_property])) + + if state.load_options: + q.spoil() + q.add_criteria( + lambda q: q._conditional_options(*state.load_options)) + + if self.use_get: + return q(session)._load_on_ident( + session.query(self.mapper), ident_key) + + if self.parent_property.order_by: + q.add_criteria( + lambda q: + q.order_by(*util.to_list(self.parent_property.order_by))) + + for rev in self.parent_property._reverse_property: + # reverse props that are MANYTOONE are loading *this* + # object from get(), so don't need to eager out to those. + if rev.direction is interfaces.MANYTOONE and \ + rev._use_get and \ + not isinstance(rev.strategy, strategies.LazyLoader): + q.add_criteria( + lambda q: + q.options( + strategy_options.Load( + rev.parent).baked_lazyload(rev.key))) + + lazy_clause, params = self._generate_lazy_clause(state, passive) + + if pending: + if orm_util._none_set.intersection(params.values()): + return None + + q.add_criteria(lambda q: q.filter(lazy_clause)) + result = q(session).params(**params).all() + if self.uselist: + return result + else: + l = len(result) + if l: + if l > 1: + util.warn( + "Multiple rows returned with " + "uselist=False for lazily-loaded attribute '%s' " + % self.parent_property) + + return result[0] + else: + return None + + +@strategy_options.loader_option() +def baked_lazyload(loadopt, attr): + """Indicate that the given attribute should be loaded using "lazy" + loading with a "baked" query used in the load. + + """ + return loadopt.set_relationship_strategy(attr, {"lazy": "baked_select"}) + + +@baked_lazyload._add_unbound_fn +def baked_lazyload(*keys): + return strategy_options._UnboundLoad._from_keys( + strategy_options._UnboundLoad.baked_lazyload, keys, False, {}) + + +@baked_lazyload._add_unbound_all_fn +def baked_lazyload_all(*keys): + return strategy_options._UnboundLoad._from_keys( + strategy_options._UnboundLoad.baked_lazyload, keys, True, {}) + +baked_lazyload = baked_lazyload._unbound_fn +baked_lazyload_all = baked_lazyload_all._unbound_all_fn + +bakery = BakedQuery.bakery diff --git a/lib/python3.4/site-packages/sqlalchemy/ext/compiler.py b/lib/python3.4/site-packages/sqlalchemy/ext/compiler.py new file mode 100644 index 0000000..86156be --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/ext/compiler.py @@ -0,0 +1,461 @@ +# ext/compiler.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Provides an API for creation of custom ClauseElements and compilers. + +Synopsis +======== + +Usage involves the creation of one or more +:class:`~sqlalchemy.sql.expression.ClauseElement` subclasses and one or +more callables defining its compilation:: + + from sqlalchemy.ext.compiler import compiles + from sqlalchemy.sql.expression import ColumnClause + + class MyColumn(ColumnClause): + pass + + @compiles(MyColumn) + def compile_mycolumn(element, compiler, **kw): + return "[%s]" % element.name + +Above, ``MyColumn`` extends :class:`~sqlalchemy.sql.expression.ColumnClause`, +the base expression element for named column objects. The ``compiles`` +decorator registers itself with the ``MyColumn`` class so that it is invoked +when the object is compiled to a string:: + + from sqlalchemy import select + + s = select([MyColumn('x'), MyColumn('y')]) + print str(s) + +Produces:: + + SELECT [x], [y] + +Dialect-specific compilation rules +================================== + +Compilers can also be made dialect-specific. The appropriate compiler will be +invoked for the dialect in use:: + + from sqlalchemy.schema import DDLElement + + class AlterColumn(DDLElement): + + def __init__(self, column, cmd): + self.column = column + self.cmd = cmd + + @compiles(AlterColumn) + def visit_alter_column(element, compiler, **kw): + return "ALTER COLUMN %s ..." % element.column.name + + @compiles(AlterColumn, 'postgresql') + def visit_alter_column(element, compiler, **kw): + return "ALTER TABLE %s ALTER COLUMN %s ..." % (element.table.name, + element.column.name) + +The second ``visit_alter_table`` will be invoked when any ``postgresql`` +dialect is used. + +Compiling sub-elements of a custom expression construct +======================================================= + +The ``compiler`` argument is the +:class:`~sqlalchemy.engine.interfaces.Compiled` object in use. This object +can be inspected for any information about the in-progress compilation, +including ``compiler.dialect``, ``compiler.statement`` etc. The +:class:`~sqlalchemy.sql.compiler.SQLCompiler` and +:class:`~sqlalchemy.sql.compiler.DDLCompiler` both include a ``process()`` +method which can be used for compilation of embedded attributes:: + + from sqlalchemy.sql.expression import Executable, ClauseElement + + class InsertFromSelect(Executable, ClauseElement): + def __init__(self, table, select): + self.table = table + self.select = select + + @compiles(InsertFromSelect) + def visit_insert_from_select(element, compiler, **kw): + return "INSERT INTO %s (%s)" % ( + compiler.process(element.table, asfrom=True), + compiler.process(element.select) + ) + + insert = InsertFromSelect(t1, select([t1]).where(t1.c.x>5)) + print insert + +Produces:: + + "INSERT INTO mytable (SELECT mytable.x, mytable.y, mytable.z + FROM mytable WHERE mytable.x > :x_1)" + +.. note:: + + The above ``InsertFromSelect`` construct is only an example, this actual + functionality is already available using the + :meth:`.Insert.from_select` method. + +.. note:: + + The above ``InsertFromSelect`` construct probably wants to have "autocommit" + enabled. See :ref:`enabling_compiled_autocommit` for this step. + +Cross Compiling between SQL and DDL compilers +--------------------------------------------- + +SQL and DDL constructs are each compiled using different base compilers - +``SQLCompiler`` and ``DDLCompiler``. A common need is to access the +compilation rules of SQL expressions from within a DDL expression. The +``DDLCompiler`` includes an accessor ``sql_compiler`` for this reason, such as +below where we generate a CHECK constraint that embeds a SQL expression:: + + @compiles(MyConstraint) + def compile_my_constraint(constraint, ddlcompiler, **kw): + return "CONSTRAINT %s CHECK (%s)" % ( + constraint.name, + ddlcompiler.sql_compiler.process( + constraint.expression, literal_binds=True) + ) + +Above, we add an additional flag to the process step as called by +:meth:`.SQLCompiler.process`, which is the ``literal_binds`` flag. This +indicates that any SQL expression which refers to a :class:`.BindParameter` +object or other "literal" object such as those which refer to strings or +integers should be rendered **in-place**, rather than being referred to as +a bound parameter; when emitting DDL, bound parameters are typically not +supported. + + +.. _enabling_compiled_autocommit: + +Enabling Autocommit on a Construct +================================== + +Recall from the section :ref:`autocommit` that the :class:`.Engine`, when +asked to execute a construct in the absence of a user-defined transaction, +detects if the given construct represents DML or DDL, that is, a data +modification or data definition statement, which requires (or may require, +in the case of DDL) that the transaction generated by the DBAPI be committed +(recall that DBAPI always has a transaction going on regardless of what +SQLAlchemy does). Checking for this is actually accomplished by checking for +the "autocommit" execution option on the construct. When building a +construct like an INSERT derivation, a new DDL type, or perhaps a stored +procedure that alters data, the "autocommit" option needs to be set in order +for the statement to function with "connectionless" execution +(as described in :ref:`dbengine_implicit`). + +Currently a quick way to do this is to subclass :class:`.Executable`, then +add the "autocommit" flag to the ``_execution_options`` dictionary (note this +is a "frozen" dictionary which supplies a generative ``union()`` method):: + + from sqlalchemy.sql.expression import Executable, ClauseElement + + class MyInsertThing(Executable, ClauseElement): + _execution_options = \\ + Executable._execution_options.union({'autocommit': True}) + +More succinctly, if the construct is truly similar to an INSERT, UPDATE, or +DELETE, :class:`.UpdateBase` can be used, which already is a subclass +of :class:`.Executable`, :class:`.ClauseElement` and includes the +``autocommit`` flag:: + + from sqlalchemy.sql.expression import UpdateBase + + class MyInsertThing(UpdateBase): + def __init__(self, ...): + ... + + + + +DDL elements that subclass :class:`.DDLElement` already have the +"autocommit" flag turned on. + + + + +Changing the default compilation of existing constructs +======================================================= + +The compiler extension applies just as well to the existing constructs. When +overriding the compilation of a built in SQL construct, the @compiles +decorator is invoked upon the appropriate class (be sure to use the class, +i.e. ``Insert`` or ``Select``, instead of the creation function such +as ``insert()`` or ``select()``). + +Within the new compilation function, to get at the "original" compilation +routine, use the appropriate visit_XXX method - this +because compiler.process() will call upon the overriding routine and cause +an endless loop. Such as, to add "prefix" to all insert statements:: + + from sqlalchemy.sql.expression import Insert + + @compiles(Insert) + def prefix_inserts(insert, compiler, **kw): + return compiler.visit_insert(insert.prefix_with("some prefix"), **kw) + +The above compiler will prefix all INSERT statements with "some prefix" when +compiled. + +.. _type_compilation_extension: + +Changing Compilation of Types +============================= + +``compiler`` works for types, too, such as below where we implement the +MS-SQL specific 'max' keyword for ``String``/``VARCHAR``:: + + @compiles(String, 'mssql') + @compiles(VARCHAR, 'mssql') + def compile_varchar(element, compiler, **kw): + if element.length == 'max': + return "VARCHAR('max')" + else: + return compiler.visit_VARCHAR(element, **kw) + + foo = Table('foo', metadata, + Column('data', VARCHAR('max')) + ) + +Subclassing Guidelines +====================== + +A big part of using the compiler extension is subclassing SQLAlchemy +expression constructs. To make this easier, the expression and +schema packages feature a set of "bases" intended for common tasks. +A synopsis is as follows: + +* :class:`~sqlalchemy.sql.expression.ClauseElement` - This is the root + expression class. Any SQL expression can be derived from this base, and is + probably the best choice for longer constructs such as specialized INSERT + statements. + +* :class:`~sqlalchemy.sql.expression.ColumnElement` - The root of all + "column-like" elements. Anything that you'd place in the "columns" clause of + a SELECT statement (as well as order by and group by) can derive from this - + the object will automatically have Python "comparison" behavior. + + :class:`~sqlalchemy.sql.expression.ColumnElement` classes want to have a + ``type`` member which is expression's return type. This can be established + at the instance level in the constructor, or at the class level if its + generally constant:: + + class timestamp(ColumnElement): + type = TIMESTAMP() + +* :class:`~sqlalchemy.sql.functions.FunctionElement` - This is a hybrid of a + ``ColumnElement`` and a "from clause" like object, and represents a SQL + function or stored procedure type of call. Since most databases support + statements along the line of "SELECT FROM " + ``FunctionElement`` adds in the ability to be used in the FROM clause of a + ``select()`` construct:: + + from sqlalchemy.sql.expression import FunctionElement + + class coalesce(FunctionElement): + name = 'coalesce' + + @compiles(coalesce) + def compile(element, compiler, **kw): + return "coalesce(%s)" % compiler.process(element.clauses) + + @compiles(coalesce, 'oracle') + def compile(element, compiler, **kw): + if len(element.clauses) > 2: + raise TypeError("coalesce only supports two arguments on Oracle") + return "nvl(%s)" % compiler.process(element.clauses) + +* :class:`~sqlalchemy.schema.DDLElement` - The root of all DDL expressions, + like CREATE TABLE, ALTER TABLE, etc. Compilation of ``DDLElement`` + subclasses is issued by a ``DDLCompiler`` instead of a ``SQLCompiler``. + ``DDLElement`` also features ``Table`` and ``MetaData`` event hooks via the + ``execute_at()`` method, allowing the construct to be invoked during CREATE + TABLE and DROP TABLE sequences. + +* :class:`~sqlalchemy.sql.expression.Executable` - This is a mixin which + should be used with any expression class that represents a "standalone" + SQL statement that can be passed directly to an ``execute()`` method. It + is already implicit within ``DDLElement`` and ``FunctionElement``. + +Further Examples +================ + +"UTC timestamp" function +------------------------- + +A function that works like "CURRENT_TIMESTAMP" except applies the +appropriate conversions so that the time is in UTC time. Timestamps are best +stored in relational databases as UTC, without time zones. UTC so that your +database doesn't think time has gone backwards in the hour when daylight +savings ends, without timezones because timezones are like character +encodings - they're best applied only at the endpoints of an application +(i.e. convert to UTC upon user input, re-apply desired timezone upon display). + +For Postgresql and Microsoft SQL Server:: + + from sqlalchemy.sql import expression + from sqlalchemy.ext.compiler import compiles + from sqlalchemy.types import DateTime + + class utcnow(expression.FunctionElement): + type = DateTime() + + @compiles(utcnow, 'postgresql') + def pg_utcnow(element, compiler, **kw): + return "TIMEZONE('utc', CURRENT_TIMESTAMP)" + + @compiles(utcnow, 'mssql') + def ms_utcnow(element, compiler, **kw): + return "GETUTCDATE()" + +Example usage:: + + from sqlalchemy import ( + Table, Column, Integer, String, DateTime, MetaData + ) + metadata = MetaData() + event = Table("event", metadata, + Column("id", Integer, primary_key=True), + Column("description", String(50), nullable=False), + Column("timestamp", DateTime, server_default=utcnow()) + ) + +"GREATEST" function +------------------- + +The "GREATEST" function is given any number of arguments and returns the one +that is of the highest value - its equivalent to Python's ``max`` +function. A SQL standard version versus a CASE based version which only +accommodates two arguments:: + + from sqlalchemy.sql import expression + from sqlalchemy.ext.compiler import compiles + from sqlalchemy.types import Numeric + + class greatest(expression.FunctionElement): + type = Numeric() + name = 'greatest' + + @compiles(greatest) + def default_greatest(element, compiler, **kw): + return compiler.visit_function(element) + + @compiles(greatest, 'sqlite') + @compiles(greatest, 'mssql') + @compiles(greatest, 'oracle') + def case_greatest(element, compiler, **kw): + arg1, arg2 = list(element.clauses) + return "CASE WHEN %s > %s THEN %s ELSE %s END" % ( + compiler.process(arg1), + compiler.process(arg2), + compiler.process(arg1), + compiler.process(arg2), + ) + +Example usage:: + + Session.query(Account).\\ + filter( + greatest( + Account.checking_balance, + Account.savings_balance) > 10000 + ) + +"false" expression +------------------ + +Render a "false" constant expression, rendering as "0" on platforms that +don't have a "false" constant:: + + from sqlalchemy.sql import expression + from sqlalchemy.ext.compiler import compiles + + class sql_false(expression.ColumnElement): + pass + + @compiles(sql_false) + def default_false(element, compiler, **kw): + return "false" + + @compiles(sql_false, 'mssql') + @compiles(sql_false, 'mysql') + @compiles(sql_false, 'oracle') + def int_false(element, compiler, **kw): + return "0" + +Example usage:: + + from sqlalchemy import select, union_all + + exp = union_all( + select([users.c.name, sql_false().label("enrolled")]), + select([customers.c.name, customers.c.enrolled]) + ) + +""" +from .. import exc +from ..sql import visitors + + +def compiles(class_, *specs): + """Register a function as a compiler for a + given :class:`.ClauseElement` type.""" + + def decorate(fn): + existing = class_.__dict__.get('_compiler_dispatcher', None) + existing_dispatch = class_.__dict__.get('_compiler_dispatch') + if not existing: + existing = _dispatcher() + + if existing_dispatch: + existing.specs['default'] = existing_dispatch + + # TODO: why is the lambda needed ? + setattr(class_, '_compiler_dispatch', + lambda *arg, **kw: existing(*arg, **kw)) + setattr(class_, '_compiler_dispatcher', existing) + + if specs: + for s in specs: + existing.specs[s] = fn + + else: + existing.specs['default'] = fn + return fn + return decorate + + +def deregister(class_): + """Remove all custom compilers associated with a given + :class:`.ClauseElement` type.""" + + if hasattr(class_, '_compiler_dispatcher'): + # regenerate default _compiler_dispatch + visitors._generate_dispatch(class_) + # remove custom directive + del class_._compiler_dispatcher + + +class _dispatcher(object): + def __init__(self): + self.specs = {} + + def __call__(self, element, compiler, **kw): + # TODO: yes, this could also switch off of DBAPI in use. + fn = self.specs.get(compiler.dialect.name, None) + if not fn: + try: + fn = self.specs['default'] + except KeyError: + raise exc.CompileError( + "%s construct has no default " + "compilation handler." % type(element)) + return fn(element, compiler, **kw) diff --git a/lib/python3.4/site-packages/sqlalchemy/ext/declarative/__init__.py b/lib/python3.4/site-packages/sqlalchemy/ext/declarative/__init__.py new file mode 100644 index 0000000..f96a402 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/ext/declarative/__init__.py @@ -0,0 +1,18 @@ +# ext/declarative/__init__.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from .api import declarative_base, synonym_for, comparable_using, \ + instrument_declarative, ConcreteBase, AbstractConcreteBase, \ + DeclarativeMeta, DeferredReflection, has_inherited_table,\ + declared_attr, as_declarative + + +__all__ = ['declarative_base', 'synonym_for', 'has_inherited_table', + 'comparable_using', 'instrument_declarative', 'declared_attr', + 'as_declarative', + 'ConcreteBase', 'AbstractConcreteBase', 'DeclarativeMeta', + 'DeferredReflection'] diff --git a/lib/python3.4/site-packages/sqlalchemy/ext/declarative/api.py b/lib/python3.4/site-packages/sqlalchemy/ext/declarative/api.py new file mode 100644 index 0000000..54e78ee --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/ext/declarative/api.py @@ -0,0 +1,687 @@ +# ext/declarative/api.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +"""Public API functions and helpers for declarative.""" + + +from ...schema import Table, MetaData, Column +from ...orm import synonym as _orm_synonym, \ + comparable_property,\ + interfaces, properties, attributes +from ...orm.util import polymorphic_union +from ...orm.base import _mapper_or_none +from ...util import OrderedDict, hybridmethod, hybridproperty +from ... import util +from ... import exc +import weakref + +from .base import _as_declarative, \ + _declarative_constructor,\ + _DeferredMapperConfig, _add_attribute +from .clsregistry import _class_resolver + + +def instrument_declarative(cls, registry, metadata): + """Given a class, configure the class declaratively, + using the given registry, which can be any dictionary, and + MetaData object. + + """ + if '_decl_class_registry' in cls.__dict__: + raise exc.InvalidRequestError( + "Class %r already has been " + "instrumented declaratively" % cls) + cls._decl_class_registry = registry + cls.metadata = metadata + _as_declarative(cls, cls.__name__, cls.__dict__) + + +def has_inherited_table(cls): + """Given a class, return True if any of the classes it inherits from has a + mapped table, otherwise return False. + """ + for class_ in cls.__mro__[1:]: + if getattr(class_, '__table__', None) is not None: + return True + return False + + +class DeclarativeMeta(type): + def __init__(cls, classname, bases, dict_): + if '_decl_class_registry' not in cls.__dict__: + _as_declarative(cls, classname, cls.__dict__) + type.__init__(cls, classname, bases, dict_) + + def __setattr__(cls, key, value): + _add_attribute(cls, key, value) + + +def synonym_for(name, map_column=False): + """Decorator, make a Python @property a query synonym for a column. + + A decorator version of :func:`~sqlalchemy.orm.synonym`. The function being + decorated is the 'descriptor', otherwise passes its arguments through to + synonym():: + + @synonym_for('col') + @property + def prop(self): + return 'special sauce' + + The regular ``synonym()`` is also usable directly in a declarative setting + and may be convenient for read/write properties:: + + prop = synonym('col', descriptor=property(_read_prop, _write_prop)) + + """ + def decorate(fn): + return _orm_synonym(name, map_column=map_column, descriptor=fn) + return decorate + + +def comparable_using(comparator_factory): + """Decorator, allow a Python @property to be used in query criteria. + + This is a decorator front end to + :func:`~sqlalchemy.orm.comparable_property` that passes + through the comparator_factory and the function being decorated:: + + @comparable_using(MyComparatorType) + @property + def prop(self): + return 'special sauce' + + The regular ``comparable_property()`` is also usable directly in a + declarative setting and may be convenient for read/write properties:: + + prop = comparable_property(MyComparatorType) + + """ + def decorate(fn): + return comparable_property(comparator_factory, fn) + return decorate + + +class declared_attr(interfaces._MappedAttribute, property): + """Mark a class-level method as representing the definition of + a mapped property or special declarative member name. + + @declared_attr turns the attribute into a scalar-like + property that can be invoked from the uninstantiated class. + Declarative treats attributes specifically marked with + @declared_attr as returning a construct that is specific + to mapping or declarative table configuration. The name + of the attribute is that of what the non-dynamic version + of the attribute would be. + + @declared_attr is more often than not applicable to mixins, + to define relationships that are to be applied to different + implementors of the class:: + + class ProvidesUser(object): + "A mixin that adds a 'user' relationship to classes." + + @declared_attr + def user(self): + return relationship("User") + + It also can be applied to mapped classes, such as to provide + a "polymorphic" scheme for inheritance:: + + class Employee(Base): + id = Column(Integer, primary_key=True) + type = Column(String(50), nullable=False) + + @declared_attr + def __tablename__(cls): + return cls.__name__.lower() + + @declared_attr + def __mapper_args__(cls): + if cls.__name__ == 'Employee': + return { + "polymorphic_on":cls.type, + "polymorphic_identity":"Employee" + } + else: + return {"polymorphic_identity":cls.__name__} + + .. versionchanged:: 0.8 :class:`.declared_attr` can be used with + non-ORM or extension attributes, such as user-defined attributes + or :func:`.association_proxy` objects, which will be assigned + to the class at class construction time. + + + """ + + def __init__(self, fget, cascading=False): + super(declared_attr, self).__init__(fget) + self.__doc__ = fget.__doc__ + self._cascading = cascading + + def __get__(desc, self, cls): + reg = cls.__dict__.get('_sa_declared_attr_reg', None) + if reg is None: + manager = attributes.manager_of_class(cls) + if manager is None: + util.warn( + "Unmanaged access of declarative attribute %s from " + "non-mapped class %s" % + (desc.fget.__name__, cls.__name__)) + return desc.fget(cls) + + if reg is None: + return desc.fget(cls) + elif desc in reg: + return reg[desc] + else: + reg[desc] = obj = desc.fget(cls) + return obj + + @hybridmethod + def _stateful(cls, **kw): + return _stateful_declared_attr(**kw) + + @hybridproperty + def cascading(cls): + """Mark a :class:`.declared_attr` as cascading. + + This is a special-use modifier which indicates that a column + or MapperProperty-based declared attribute should be configured + distinctly per mapped subclass, within a mapped-inheritance scenario. + + Below, both MyClass as well as MySubClass will have a distinct + ``id`` Column object established:: + + class HasSomeAttribute(object): + @declared_attr.cascading + def some_id(cls): + if has_inherited_table(cls): + return Column( + ForeignKey('myclass.id'), primary_key=True) + else: + return Column(Integer, primary_key=True) + + return Column('id', Integer, primary_key=True) + + class MyClass(HasSomeAttribute, Base): + "" + # ... + + class MySubClass(MyClass): + "" + # ... + + The behavior of the above configuration is that ``MySubClass`` + will refer to both its own ``id`` column as well as that of + ``MyClass`` underneath the attribute named ``some_id``. + + .. seealso:: + + :ref:`declarative_inheritance` + + :ref:`mixin_inheritance_columns` + + + """ + return cls._stateful(cascading=True) + + +class _stateful_declared_attr(declared_attr): + def __init__(self, **kw): + self.kw = kw + + def _stateful(self, **kw): + new_kw = self.kw.copy() + new_kw.update(kw) + return _stateful_declared_attr(**new_kw) + + def __call__(self, fn): + return declared_attr(fn, **self.kw) + + +def declarative_base(bind=None, metadata=None, mapper=None, cls=object, + name='Base', constructor=_declarative_constructor, + class_registry=None, + metaclass=DeclarativeMeta): + """Construct a base class for declarative class definitions. + + The new base class will be given a metaclass that produces + appropriate :class:`~sqlalchemy.schema.Table` objects and makes + the appropriate :func:`~sqlalchemy.orm.mapper` calls based on the + information provided declaratively in the class and any subclasses + of the class. + + :param bind: An optional + :class:`~sqlalchemy.engine.Connectable`, will be assigned + the ``bind`` attribute on the :class:`~sqlalchemy.schema.MetaData` + instance. + + :param metadata: + An optional :class:`~sqlalchemy.schema.MetaData` instance. All + :class:`~sqlalchemy.schema.Table` objects implicitly declared by + subclasses of the base will share this MetaData. A MetaData instance + will be created if none is provided. The + :class:`~sqlalchemy.schema.MetaData` instance will be available via the + `metadata` attribute of the generated declarative base class. + + :param mapper: + An optional callable, defaults to :func:`~sqlalchemy.orm.mapper`. Will + be used to map subclasses to their Tables. + + :param cls: + Defaults to :class:`object`. A type to use as the base for the generated + declarative base class. May be a class or tuple of classes. + + :param name: + Defaults to ``Base``. The display name for the generated + class. Customizing this is not required, but can improve clarity in + tracebacks and debugging. + + :param constructor: + Defaults to + :func:`~sqlalchemy.ext.declarative._declarative_constructor`, an + __init__ implementation that assigns \**kwargs for declared + fields and relationships to an instance. If ``None`` is supplied, + no __init__ will be provided and construction will fall back to + cls.__init__ by way of the normal Python semantics. + + :param class_registry: optional dictionary that will serve as the + registry of class names-> mapped classes when string names + are used to identify classes inside of :func:`.relationship` + and others. Allows two or more declarative base classes + to share the same registry of class names for simplified + inter-base relationships. + + :param metaclass: + Defaults to :class:`.DeclarativeMeta`. A metaclass or __metaclass__ + compatible callable to use as the meta type of the generated + declarative base class. + + .. seealso:: + + :func:`.as_declarative` + + """ + lcl_metadata = metadata or MetaData() + if bind: + lcl_metadata.bind = bind + + if class_registry is None: + class_registry = weakref.WeakValueDictionary() + + bases = not isinstance(cls, tuple) and (cls,) or cls + class_dict = dict(_decl_class_registry=class_registry, + metadata=lcl_metadata) + + if constructor: + class_dict['__init__'] = constructor + if mapper: + class_dict['__mapper_cls__'] = mapper + + return metaclass(name, bases, class_dict) + + +def as_declarative(**kw): + """ + Class decorator for :func:`.declarative_base`. + + Provides a syntactical shortcut to the ``cls`` argument + sent to :func:`.declarative_base`, allowing the base class + to be converted in-place to a "declarative" base:: + + from sqlalchemy.ext.declarative import as_declarative + + @as_declarative() + class Base(object): + @declared_attr + def __tablename__(cls): + return cls.__name__.lower() + id = Column(Integer, primary_key=True) + + class MyMappedClass(Base): + # ... + + All keyword arguments passed to :func:`.as_declarative` are passed + along to :func:`.declarative_base`. + + .. versionadded:: 0.8.3 + + .. seealso:: + + :func:`.declarative_base` + + """ + def decorate(cls): + kw['cls'] = cls + kw['name'] = cls.__name__ + return declarative_base(**kw) + + return decorate + + +class ConcreteBase(object): + """A helper class for 'concrete' declarative mappings. + + :class:`.ConcreteBase` will use the :func:`.polymorphic_union` + function automatically, against all tables mapped as a subclass + to this class. The function is called via the + ``__declare_last__()`` function, which is essentially + a hook for the :meth:`.after_configured` event. + + :class:`.ConcreteBase` produces a mapped + table for the class itself. Compare to :class:`.AbstractConcreteBase`, + which does not. + + Example:: + + from sqlalchemy.ext.declarative import ConcreteBase + + class Employee(ConcreteBase, Base): + __tablename__ = 'employee' + employee_id = Column(Integer, primary_key=True) + name = Column(String(50)) + __mapper_args__ = { + 'polymorphic_identity':'employee', + 'concrete':True} + + class Manager(Employee): + __tablename__ = 'manager' + employee_id = Column(Integer, primary_key=True) + name = Column(String(50)) + manager_data = Column(String(40)) + __mapper_args__ = { + 'polymorphic_identity':'manager', + 'concrete':True} + + .. seealso:: + + :class:`.AbstractConcreteBase` + + :ref:`concrete_inheritance` + + :ref:`inheritance_concrete_helpers` + + + """ + + @classmethod + def _create_polymorphic_union(cls, mappers): + return polymorphic_union(OrderedDict( + (mp.polymorphic_identity, mp.local_table) + for mp in mappers + ), 'type', 'pjoin') + + @classmethod + def __declare_first__(cls): + m = cls.__mapper__ + if m.with_polymorphic: + return + + mappers = list(m.self_and_descendants) + pjoin = cls._create_polymorphic_union(mappers) + m._set_with_polymorphic(("*", pjoin)) + m._set_polymorphic_on(pjoin.c.type) + + +class AbstractConcreteBase(ConcreteBase): + """A helper class for 'concrete' declarative mappings. + + :class:`.AbstractConcreteBase` will use the :func:`.polymorphic_union` + function automatically, against all tables mapped as a subclass + to this class. The function is called via the + ``__declare_last__()`` function, which is essentially + a hook for the :meth:`.after_configured` event. + + :class:`.AbstractConcreteBase` does produce a mapped class + for the base class, however it is not persisted to any table; it + is instead mapped directly to the "polymorphic" selectable directly + and is only used for selecting. Compare to :class:`.ConcreteBase`, + which does create a persisted table for the base class. + + Example:: + + from sqlalchemy.ext.declarative import AbstractConcreteBase + + class Employee(AbstractConcreteBase, Base): + pass + + class Manager(Employee): + __tablename__ = 'manager' + employee_id = Column(Integer, primary_key=True) + name = Column(String(50)) + manager_data = Column(String(40)) + + __mapper_args__ = { + 'polymorphic_identity':'manager', + 'concrete':True} + + The abstract base class is handled by declarative in a special way; + at class configuration time, it behaves like a declarative mixin + or an ``__abstract__`` base class. Once classes are configured + and mappings are produced, it then gets mapped itself, but + after all of its decscendants. This is a very unique system of mapping + not found in any other SQLAlchemy system. + + Using this approach, we can specify columns and properties + that will take place on mapped subclasses, in the way that + we normally do as in :ref:`declarative_mixins`:: + + class Company(Base): + __tablename__ = 'company' + id = Column(Integer, primary_key=True) + + class Employee(AbstractConcreteBase, Base): + employee_id = Column(Integer, primary_key=True) + + @declared_attr + def company_id(cls): + return Column(ForeignKey('company.id')) + + @declared_attr + def company(cls): + return relationship("Company") + + class Manager(Employee): + __tablename__ = 'manager' + + name = Column(String(50)) + manager_data = Column(String(40)) + + __mapper_args__ = { + 'polymorphic_identity':'manager', + 'concrete':True} + + When we make use of our mappings however, both ``Manager`` and + ``Employee`` will have an independently usable ``.company`` attribute:: + + session.query(Employee).filter(Employee.company.has(id=5)) + + .. versionchanged:: 1.0.0 - The mechanics of :class:`.AbstractConcreteBase` + have been reworked to support relationships established directly + on the abstract base, without any special configurational steps. + + .. seealso:: + + :class:`.ConcreteBase` + + :ref:`concrete_inheritance` + + :ref:`inheritance_concrete_helpers` + + """ + + __no_table__ = True + + @classmethod + def __declare_first__(cls): + cls._sa_decl_prepare_nocascade() + + @classmethod + def _sa_decl_prepare_nocascade(cls): + if getattr(cls, '__mapper__', None): + return + + to_map = _DeferredMapperConfig.config_for_cls(cls) + + # can't rely on 'self_and_descendants' here + # since technically an immediate subclass + # might not be mapped, but a subclass + # may be. + mappers = [] + stack = list(cls.__subclasses__()) + while stack: + klass = stack.pop() + stack.extend(klass.__subclasses__()) + mn = _mapper_or_none(klass) + if mn is not None: + mappers.append(mn) + pjoin = cls._create_polymorphic_union(mappers) + + # For columns that were declared on the class, these + # are normally ignored with the "__no_table__" mapping, + # unless they have a different attribute key vs. col name + # and are in the properties argument. + # In that case, ensure we update the properties entry + # to the correct column from the pjoin target table. + declared_cols = set(to_map.declared_columns) + for k, v in list(to_map.properties.items()): + if v in declared_cols: + to_map.properties[k] = pjoin.c[v.key] + + to_map.local_table = pjoin + + m_args = to_map.mapper_args_fn or dict + + def mapper_args(): + args = m_args() + args['polymorphic_on'] = pjoin.c.type + return args + to_map.mapper_args_fn = mapper_args + + m = to_map.map() + + for scls in cls.__subclasses__(): + sm = _mapper_or_none(scls) + if sm and sm.concrete and cls in scls.__bases__: + sm._set_concrete_base(m) + + +class DeferredReflection(object): + """A helper class for construction of mappings based on + a deferred reflection step. + + Normally, declarative can be used with reflection by + setting a :class:`.Table` object using autoload=True + as the ``__table__`` attribute on a declarative class. + The caveat is that the :class:`.Table` must be fully + reflected, or at the very least have a primary key column, + at the point at which a normal declarative mapping is + constructed, meaning the :class:`.Engine` must be available + at class declaration time. + + The :class:`.DeferredReflection` mixin moves the construction + of mappers to be at a later point, after a specific + method is called which first reflects all :class:`.Table` + objects created so far. Classes can define it as such:: + + from sqlalchemy.ext.declarative import declarative_base + from sqlalchemy.ext.declarative import DeferredReflection + Base = declarative_base() + + class MyClass(DeferredReflection, Base): + __tablename__ = 'mytable' + + Above, ``MyClass`` is not yet mapped. After a series of + classes have been defined in the above fashion, all tables + can be reflected and mappings created using + :meth:`.prepare`:: + + engine = create_engine("someengine://...") + DeferredReflection.prepare(engine) + + The :class:`.DeferredReflection` mixin can be applied to individual + classes, used as the base for the declarative base itself, + or used in a custom abstract class. Using an abstract base + allows that only a subset of classes to be prepared for a + particular prepare step, which is necessary for applications + that use more than one engine. For example, if an application + has two engines, you might use two bases, and prepare each + separately, e.g.:: + + class ReflectedOne(DeferredReflection, Base): + __abstract__ = True + + class ReflectedTwo(DeferredReflection, Base): + __abstract__ = True + + class MyClass(ReflectedOne): + __tablename__ = 'mytable' + + class MyOtherClass(ReflectedOne): + __tablename__ = 'myothertable' + + class YetAnotherClass(ReflectedTwo): + __tablename__ = 'yetanothertable' + + # ... etc. + + Above, the class hierarchies for ``ReflectedOne`` and + ``ReflectedTwo`` can be configured separately:: + + ReflectedOne.prepare(engine_one) + ReflectedTwo.prepare(engine_two) + + .. versionadded:: 0.8 + + """ + @classmethod + def prepare(cls, engine): + """Reflect all :class:`.Table` objects for all current + :class:`.DeferredReflection` subclasses""" + + to_map = _DeferredMapperConfig.classes_for_base(cls) + for thingy in to_map: + cls._sa_decl_prepare(thingy.local_table, engine) + thingy.map() + mapper = thingy.cls.__mapper__ + metadata = mapper.class_.metadata + for rel in mapper._props.values(): + if isinstance(rel, properties.RelationshipProperty) and \ + rel.secondary is not None: + if isinstance(rel.secondary, Table): + cls._reflect_table(rel.secondary, engine) + elif isinstance(rel.secondary, _class_resolver): + rel.secondary._resolvers += ( + cls._sa_deferred_table_resolver(engine, metadata), + ) + + @classmethod + def _sa_deferred_table_resolver(cls, engine, metadata): + def _resolve(key): + t1 = Table(key, metadata) + cls._reflect_table(t1, engine) + return t1 + return _resolve + + @classmethod + def _sa_decl_prepare(cls, local_table, engine): + # autoload Table, which is already + # present in the metadata. This + # will fill in db-loaded columns + # into the existing Table object. + if local_table is not None: + cls._reflect_table(local_table, engine) + + @classmethod + def _reflect_table(cls, table, engine): + Table(table.name, + table.metadata, + extend_existing=True, + autoload_replace=False, + autoload=True, + autoload_with=engine, + schema=table.schema) diff --git a/lib/python3.4/site-packages/sqlalchemy/ext/declarative/base.py b/lib/python3.4/site-packages/sqlalchemy/ext/declarative/base.py new file mode 100644 index 0000000..59ebe37 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/ext/declarative/base.py @@ -0,0 +1,657 @@ +# ext/declarative/base.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +"""Internal implementation for declarative.""" + +from ...schema import Table, Column +from ...orm import mapper, class_mapper, synonym +from ...orm.interfaces import MapperProperty +from ...orm.properties import ColumnProperty, CompositeProperty +from ...orm.attributes import QueryableAttribute +from ...orm.base import _is_mapped_class +from ... import util, exc +from ...util import topological +from ...sql import expression +from ... import event +from . import clsregistry +import collections +import weakref +from sqlalchemy.orm import instrumentation + +declared_attr = declarative_props = None + + +def _declared_mapping_info(cls): + # deferred mapping + if _DeferredMapperConfig.has_cls(cls): + return _DeferredMapperConfig.config_for_cls(cls) + # regular mapping + elif _is_mapped_class(cls): + return class_mapper(cls, configure=False) + else: + return None + + +def _resolve_for_abstract(cls): + if cls is object: + return None + + if _get_immediate_cls_attr(cls, '__abstract__', strict=True): + for sup in cls.__bases__: + sup = _resolve_for_abstract(sup) + if sup is not None: + return sup + else: + return None + else: + return cls + + +def _get_immediate_cls_attr(cls, attrname, strict=False): + """return an attribute of the class that is either present directly + on the class, e.g. not on a superclass, or is from a superclass but + this superclass is a mixin, that is, not a descendant of + the declarative base. + + This is used to detect attributes that indicate something about + a mapped class independently from any mapped classes that it may + inherit from. + + """ + if not issubclass(cls, object): + return None + + for base in cls.__mro__: + _is_declarative_inherits = hasattr(base, '_decl_class_registry') + if attrname in base.__dict__ and ( + base is cls or + ((base in cls.__bases__ if strict else True) + and not _is_declarative_inherits) + ): + return getattr(base, attrname) + else: + return None + + +def _as_declarative(cls, classname, dict_): + global declared_attr, declarative_props + if declared_attr is None: + from .api import declared_attr + declarative_props = (declared_attr, util.classproperty) + + if _get_immediate_cls_attr(cls, '__abstract__', strict=True): + return + + _MapperConfig.setup_mapping(cls, classname, dict_) + + +class _MapperConfig(object): + + @classmethod + def setup_mapping(cls, cls_, classname, dict_): + defer_map = _get_immediate_cls_attr( + cls_, '_sa_decl_prepare_nocascade', strict=True) or \ + hasattr(cls_, '_sa_decl_prepare') + + if defer_map: + cfg_cls = _DeferredMapperConfig + else: + cfg_cls = _MapperConfig + cfg_cls(cls_, classname, dict_) + + def __init__(self, cls_, classname, dict_): + + self.cls = cls_ + + # dict_ will be a dictproxy, which we can't write to, and we need to! + self.dict_ = dict(dict_) + self.classname = classname + self.mapped_table = None + self.properties = util.OrderedDict() + self.declared_columns = set() + self.column_copies = {} + self._setup_declared_events() + + # temporary registry. While early 1.0 versions + # set up the ClassManager here, by API contract + # we can't do that until there's a mapper. + self.cls._sa_declared_attr_reg = {} + + self._scan_attributes() + + clsregistry.add_class(self.classname, self.cls) + + self._extract_mappable_attributes() + + self._extract_declared_columns() + + self._setup_table() + + self._setup_inheritance() + + self._early_mapping() + + def _early_mapping(self): + self.map() + + def _setup_declared_events(self): + if _get_immediate_cls_attr(self.cls, '__declare_last__'): + @event.listens_for(mapper, "after_configured") + def after_configured(): + self.cls.__declare_last__() + + if _get_immediate_cls_attr(self.cls, '__declare_first__'): + @event.listens_for(mapper, "before_configured") + def before_configured(): + self.cls.__declare_first__() + + def _scan_attributes(self): + cls = self.cls + dict_ = self.dict_ + column_copies = self.column_copies + mapper_args_fn = None + table_args = inherited_table_args = None + tablename = None + + for base in cls.__mro__: + class_mapped = base is not cls and \ + _declared_mapping_info(base) is not None and \ + not _get_immediate_cls_attr( + base, '_sa_decl_prepare_nocascade', strict=True) + + if not class_mapped and base is not cls: + self._produce_column_copies(base) + + for name, obj in vars(base).items(): + if name == '__mapper_args__': + if not mapper_args_fn and ( + not class_mapped or + isinstance(obj, declarative_props) + ): + # don't even invoke __mapper_args__ until + # after we've determined everything about the + # mapped table. + # make a copy of it so a class-level dictionary + # is not overwritten when we update column-based + # arguments. + mapper_args_fn = lambda: dict(cls.__mapper_args__) + elif name == '__tablename__': + if not tablename and ( + not class_mapped or + isinstance(obj, declarative_props) + ): + tablename = cls.__tablename__ + elif name == '__table_args__': + if not table_args and ( + not class_mapped or + isinstance(obj, declarative_props) + ): + table_args = cls.__table_args__ + if not isinstance( + table_args, (tuple, dict, type(None))): + raise exc.ArgumentError( + "__table_args__ value must be a tuple, " + "dict, or None") + if base is not cls: + inherited_table_args = True + elif class_mapped: + if isinstance(obj, declarative_props): + util.warn("Regular (i.e. not __special__) " + "attribute '%s.%s' uses @declared_attr, " + "but owning class %s is mapped - " + "not applying to subclass %s." + % (base.__name__, name, base, cls)) + continue + elif base is not cls: + # we're a mixin, abstract base, or something that is + # acting like that for now. + if isinstance(obj, Column): + # already copied columns to the mapped class. + continue + elif isinstance(obj, MapperProperty): + raise exc.InvalidRequestError( + "Mapper properties (i.e. deferred," + "column_property(), relationship(), etc.) must " + "be declared as @declared_attr callables " + "on declarative mixin classes.") + elif isinstance(obj, declarative_props): + oldclassprop = isinstance(obj, util.classproperty) + if not oldclassprop and obj._cascading: + dict_[name] = column_copies[obj] = \ + ret = obj.__get__(obj, cls) + setattr(cls, name, ret) + else: + if oldclassprop: + util.warn_deprecated( + "Use of sqlalchemy.util.classproperty on " + "declarative classes is deprecated.") + dict_[name] = column_copies[obj] = \ + ret = getattr(cls, name) + if isinstance(ret, (Column, MapperProperty)) and \ + ret.doc is None: + ret.doc = obj.__doc__ + + if inherited_table_args and not tablename: + table_args = None + + self.table_args = table_args + self.tablename = tablename + self.mapper_args_fn = mapper_args_fn + + def _produce_column_copies(self, base): + cls = self.cls + dict_ = self.dict_ + column_copies = self.column_copies + # copy mixin columns to the mapped class + for name, obj in vars(base).items(): + if isinstance(obj, Column): + if getattr(cls, name) is not obj: + # if column has been overridden + # (like by the InstrumentedAttribute of the + # superclass), skip + continue + elif obj.foreign_keys: + raise exc.InvalidRequestError( + "Columns with foreign keys to other columns " + "must be declared as @declared_attr callables " + "on declarative mixin classes. ") + elif name not in dict_ and not ( + '__table__' in dict_ and + (obj.name or name) in dict_['__table__'].c + ): + column_copies[obj] = copy_ = obj.copy() + copy_._creation_order = obj._creation_order + setattr(cls, name, copy_) + dict_[name] = copy_ + + def _extract_mappable_attributes(self): + cls = self.cls + dict_ = self.dict_ + + our_stuff = self.properties + + for k in list(dict_): + + if k in ('__table__', '__tablename__', '__mapper_args__'): + continue + + value = dict_[k] + if isinstance(value, declarative_props): + value = getattr(cls, k) + + elif isinstance(value, QueryableAttribute) and \ + value.class_ is not cls and \ + value.key != k: + # detect a QueryableAttribute that's already mapped being + # assigned elsewhere in userland, turn into a synonym() + value = synonym(value.key) + setattr(cls, k, value) + + if (isinstance(value, tuple) and len(value) == 1 and + isinstance(value[0], (Column, MapperProperty))): + util.warn("Ignoring declarative-like tuple value of attribute " + "%s: possibly a copy-and-paste error with a comma " + "left at the end of the line?" % k) + continue + elif not isinstance(value, (Column, MapperProperty)): + # using @declared_attr for some object that + # isn't Column/MapperProperty; remove from the dict_ + # and place the evaluated value onto the class. + if not k.startswith('__'): + dict_.pop(k) + setattr(cls, k, value) + continue + # we expect to see the name 'metadata' in some valid cases; + # however at this point we see it's assigned to something trying + # to be mapped, so raise for that. + elif k == 'metadata': + raise exc.InvalidRequestError( + "Attribute name 'metadata' is reserved " + "for the MetaData instance when using a " + "declarative base class." + ) + prop = clsregistry._deferred_relationship(cls, value) + our_stuff[k] = prop + + def _extract_declared_columns(self): + our_stuff = self.properties + + # set up attributes in the order they were created + our_stuff.sort(key=lambda key: our_stuff[key]._creation_order) + + # extract columns from the class dict + declared_columns = self.declared_columns + name_to_prop_key = collections.defaultdict(set) + for key, c in list(our_stuff.items()): + if isinstance(c, (ColumnProperty, CompositeProperty)): + for col in c.columns: + if isinstance(col, Column) and \ + col.table is None: + _undefer_column_name(key, col) + if not isinstance(c, CompositeProperty): + name_to_prop_key[col.name].add(key) + declared_columns.add(col) + elif isinstance(c, Column): + _undefer_column_name(key, c) + name_to_prop_key[c.name].add(key) + declared_columns.add(c) + # if the column is the same name as the key, + # remove it from the explicit properties dict. + # the normal rules for assigning column-based properties + # will take over, including precedence of columns + # in multi-column ColumnProperties. + if key == c.key: + del our_stuff[key] + + for name, keys in name_to_prop_key.items(): + if len(keys) > 1: + util.warn( + "On class %r, Column object %r named " + "directly multiple times, " + "only one will be used: %s" % + (self.classname, name, (", ".join(sorted(keys)))) + ) + + def _setup_table(self): + cls = self.cls + tablename = self.tablename + table_args = self.table_args + dict_ = self.dict_ + declared_columns = self.declared_columns + + declared_columns = self.declared_columns = sorted( + declared_columns, key=lambda c: c._creation_order) + table = None + + if hasattr(cls, '__table_cls__'): + table_cls = util.unbound_method_to_callable(cls.__table_cls__) + else: + table_cls = Table + + if '__table__' not in dict_: + if tablename is not None: + + args, table_kw = (), {} + if table_args: + if isinstance(table_args, dict): + table_kw = table_args + elif isinstance(table_args, tuple): + if isinstance(table_args[-1], dict): + args, table_kw = table_args[0:-1], table_args[-1] + else: + args = table_args + + autoload = dict_.get('__autoload__') + if autoload: + table_kw['autoload'] = True + + cls.__table__ = table = table_cls( + tablename, cls.metadata, + *(tuple(declared_columns) + tuple(args)), + **table_kw) + else: + table = cls.__table__ + if declared_columns: + for c in declared_columns: + if not table.c.contains_column(c): + raise exc.ArgumentError( + "Can't add additional column %r when " + "specifying __table__" % c.key + ) + self.local_table = table + + def _setup_inheritance(self): + table = self.local_table + cls = self.cls + table_args = self.table_args + declared_columns = self.declared_columns + for c in cls.__bases__: + c = _resolve_for_abstract(c) + if c is None: + continue + if _declared_mapping_info(c) is not None and \ + not _get_immediate_cls_attr( + c, '_sa_decl_prepare_nocascade', strict=True): + self.inherits = c + break + else: + self.inherits = None + + if table is None and self.inherits is None and \ + not _get_immediate_cls_attr(cls, '__no_table__'): + + raise exc.InvalidRequestError( + "Class %r does not have a __table__ or __tablename__ " + "specified and does not inherit from an existing " + "table-mapped class." % cls + ) + elif self.inherits: + inherited_mapper = _declared_mapping_info(self.inherits) + inherited_table = inherited_mapper.local_table + inherited_mapped_table = inherited_mapper.mapped_table + + if table is None: + # single table inheritance. + # ensure no table args + if table_args: + raise exc.ArgumentError( + "Can't place __table_args__ on an inherited class " + "with no table." + ) + # add any columns declared here to the inherited table. + for c in declared_columns: + if c.primary_key: + raise exc.ArgumentError( + "Can't place primary key columns on an inherited " + "class with no table." + ) + if c.name in inherited_table.c: + if inherited_table.c[c.name] is c: + continue + raise exc.ArgumentError( + "Column '%s' on class %s conflicts with " + "existing column '%s'" % + (c, cls, inherited_table.c[c.name]) + ) + inherited_table.append_column(c) + if inherited_mapped_table is not None and \ + inherited_mapped_table is not inherited_table: + inherited_mapped_table._refresh_for_new_column(c) + + def _prepare_mapper_arguments(self): + properties = self.properties + if self.mapper_args_fn: + mapper_args = self.mapper_args_fn() + else: + mapper_args = {} + + # make sure that column copies are used rather + # than the original columns from any mixins + for k in ('version_id_col', 'polymorphic_on',): + if k in mapper_args: + v = mapper_args[k] + mapper_args[k] = self.column_copies.get(v, v) + + assert 'inherits' not in mapper_args, \ + "Can't specify 'inherits' explicitly with declarative mappings" + + if self.inherits: + mapper_args['inherits'] = self.inherits + + if self.inherits and not mapper_args.get('concrete', False): + # single or joined inheritance + # exclude any cols on the inherited table which are + # not mapped on the parent class, to avoid + # mapping columns specific to sibling/nephew classes + inherited_mapper = _declared_mapping_info(self.inherits) + inherited_table = inherited_mapper.local_table + + if 'exclude_properties' not in mapper_args: + mapper_args['exclude_properties'] = exclude_properties = \ + set([c.key for c in inherited_table.c + if c not in inherited_mapper._columntoproperty]) + exclude_properties.difference_update( + [c.key for c in self.declared_columns]) + + # look through columns in the current mapper that + # are keyed to a propname different than the colname + # (if names were the same, we'd have popped it out above, + # in which case the mapper makes this combination). + # See if the superclass has a similar column property. + # If so, join them together. + for k, col in list(properties.items()): + if not isinstance(col, expression.ColumnElement): + continue + if k in inherited_mapper._props: + p = inherited_mapper._props[k] + if isinstance(p, ColumnProperty): + # note here we place the subclass column + # first. See [ticket:1892] for background. + properties[k] = [col] + p.columns + result_mapper_args = mapper_args.copy() + result_mapper_args['properties'] = properties + self.mapper_args = result_mapper_args + + def map(self): + self._prepare_mapper_arguments() + if hasattr(self.cls, '__mapper_cls__'): + mapper_cls = util.unbound_method_to_callable( + self.cls.__mapper_cls__) + else: + mapper_cls = mapper + + self.cls.__mapper__ = mp_ = mapper_cls( + self.cls, + self.local_table, + **self.mapper_args + ) + del self.cls._sa_declared_attr_reg + return mp_ + + +class _DeferredMapperConfig(_MapperConfig): + _configs = util.OrderedDict() + + def _early_mapping(self): + pass + + @property + def cls(self): + return self._cls() + + @cls.setter + def cls(self, class_): + self._cls = weakref.ref(class_, self._remove_config_cls) + self._configs[self._cls] = self + + @classmethod + def _remove_config_cls(cls, ref): + cls._configs.pop(ref, None) + + @classmethod + def has_cls(cls, class_): + # 2.6 fails on weakref if class_ is an old style class + return isinstance(class_, type) and \ + weakref.ref(class_) in cls._configs + + @classmethod + def config_for_cls(cls, class_): + return cls._configs[weakref.ref(class_)] + + @classmethod + def classes_for_base(cls, base_cls, sort=True): + classes_for_base = [m for m in cls._configs.values() + if issubclass(m.cls, base_cls)] + if not sort: + return classes_for_base + + all_m_by_cls = dict( + (m.cls, m) + for m in classes_for_base + ) + + tuples = [] + for m_cls in all_m_by_cls: + tuples.extend( + (all_m_by_cls[base_cls], all_m_by_cls[m_cls]) + for base_cls in m_cls.__bases__ + if base_cls in all_m_by_cls + ) + return list( + topological.sort( + tuples, + classes_for_base + ) + ) + + def map(self): + self._configs.pop(self._cls, None) + return super(_DeferredMapperConfig, self).map() + + +def _add_attribute(cls, key, value): + """add an attribute to an existing declarative class. + + This runs through the logic to determine MapperProperty, + adds it to the Mapper, adds a column to the mapped Table, etc. + + """ + + if '__mapper__' in cls.__dict__: + if isinstance(value, Column): + _undefer_column_name(key, value) + cls.__table__.append_column(value) + cls.__mapper__.add_property(key, value) + elif isinstance(value, ColumnProperty): + for col in value.columns: + if isinstance(col, Column) and col.table is None: + _undefer_column_name(key, col) + cls.__table__.append_column(col) + cls.__mapper__.add_property(key, value) + elif isinstance(value, MapperProperty): + cls.__mapper__.add_property( + key, + clsregistry._deferred_relationship(cls, value) + ) + elif isinstance(value, QueryableAttribute) and value.key != key: + # detect a QueryableAttribute that's already mapped being + # assigned elsewhere in userland, turn into a synonym() + value = synonym(value.key) + cls.__mapper__.add_property( + key, + clsregistry._deferred_relationship(cls, value) + ) + else: + type.__setattr__(cls, key, value) + else: + type.__setattr__(cls, key, value) + + +def _declarative_constructor(self, **kwargs): + """A simple constructor that allows initialization from kwargs. + + Sets attributes on the constructed instance using the names and + values in ``kwargs``. + + Only keys that are present as + attributes of the instance's class are allowed. These could be, + for example, any mapped columns or relationships. + """ + cls_ = type(self) + for k in kwargs: + if not hasattr(cls_, k): + raise TypeError( + "%r is an invalid keyword argument for %s" % + (k, cls_.__name__)) + setattr(self, k, kwargs[k]) +_declarative_constructor.__name__ = '__init__' + + +def _undefer_column_name(key, column): + if column.key is None: + column.key = key + if column.name is None: + column.name = key diff --git a/lib/python3.4/site-packages/sqlalchemy/ext/declarative/clsregistry.py b/lib/python3.4/site-packages/sqlalchemy/ext/declarative/clsregistry.py new file mode 100644 index 0000000..0d62bd2 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/ext/declarative/clsregistry.py @@ -0,0 +1,328 @@ +# ext/declarative/clsregistry.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +"""Routines to handle the string class registry used by declarative. + +This system allows specification of classes and expressions used in +:func:`.relationship` using strings. + +""" +from ...orm.properties import ColumnProperty, RelationshipProperty, \ + SynonymProperty +from ...schema import _get_table_key +from ...orm import class_mapper, interfaces +from ... import util +from ... import inspection +from ... import exc +import weakref + +# strong references to registries which we place in +# the _decl_class_registry, which is usually weak referencing. +# the internal registries here link to classes with weakrefs and remove +# themselves when all references to contained classes are removed. +_registries = set() + + +def add_class(classname, cls): + """Add a class to the _decl_class_registry associated with the + given declarative class. + + """ + if classname in cls._decl_class_registry: + # class already exists. + existing = cls._decl_class_registry[classname] + if not isinstance(existing, _MultipleClassMarker): + existing = \ + cls._decl_class_registry[classname] = \ + _MultipleClassMarker([cls, existing]) + else: + cls._decl_class_registry[classname] = cls + + try: + root_module = cls._decl_class_registry['_sa_module_registry'] + except KeyError: + cls._decl_class_registry['_sa_module_registry'] = \ + root_module = _ModuleMarker('_sa_module_registry', None) + + tokens = cls.__module__.split(".") + + # build up a tree like this: + # modulename: myapp.snacks.nuts + # + # myapp->snack->nuts->(classes) + # snack->nuts->(classes) + # nuts->(classes) + # + # this allows partial token paths to be used. + while tokens: + token = tokens.pop(0) + module = root_module.get_module(token) + for token in tokens: + module = module.get_module(token) + module.add_class(classname, cls) + + +class _MultipleClassMarker(object): + """refers to multiple classes of the same name + within _decl_class_registry. + + """ + + __slots__ = 'on_remove', 'contents', '__weakref__' + + def __init__(self, classes, on_remove=None): + self.on_remove = on_remove + self.contents = set([ + weakref.ref(item, self._remove_item) for item in classes]) + _registries.add(self) + + def __iter__(self): + return (ref() for ref in self.contents) + + def attempt_get(self, path, key): + if len(self.contents) > 1: + raise exc.InvalidRequestError( + "Multiple classes found for path \"%s\" " + "in the registry of this declarative " + "base. Please use a fully module-qualified path." % + (".".join(path + [key])) + ) + else: + ref = list(self.contents)[0] + cls = ref() + if cls is None: + raise NameError(key) + return cls + + def _remove_item(self, ref): + self.contents.remove(ref) + if not self.contents: + _registries.discard(self) + if self.on_remove: + self.on_remove() + + def add_item(self, item): + # protect against class registration race condition against + # asynchronous garbage collection calling _remove_item, + # [ticket:3208] + modules = set([ + cls.__module__ for cls in + [ref() for ref in self.contents] if cls is not None]) + if item.__module__ in modules: + util.warn( + "This declarative base already contains a class with the " + "same class name and module name as %s.%s, and will " + "be replaced in the string-lookup table." % ( + item.__module__, + item.__name__ + ) + ) + self.contents.add(weakref.ref(item, self._remove_item)) + + +class _ModuleMarker(object): + """"refers to a module name within + _decl_class_registry. + + """ + + __slots__ = 'parent', 'name', 'contents', 'mod_ns', 'path', '__weakref__' + + def __init__(self, name, parent): + self.parent = parent + self.name = name + self.contents = {} + self.mod_ns = _ModNS(self) + if self.parent: + self.path = self.parent.path + [self.name] + else: + self.path = [] + _registries.add(self) + + def __contains__(self, name): + return name in self.contents + + def __getitem__(self, name): + return self.contents[name] + + def _remove_item(self, name): + self.contents.pop(name, None) + if not self.contents and self.parent is not None: + self.parent._remove_item(self.name) + _registries.discard(self) + + def resolve_attr(self, key): + return getattr(self.mod_ns, key) + + def get_module(self, name): + if name not in self.contents: + marker = _ModuleMarker(name, self) + self.contents[name] = marker + else: + marker = self.contents[name] + return marker + + def add_class(self, name, cls): + if name in self.contents: + existing = self.contents[name] + existing.add_item(cls) + else: + existing = self.contents[name] = \ + _MultipleClassMarker([cls], + on_remove=lambda: self._remove_item(name)) + + +class _ModNS(object): + __slots__ = '__parent', + + def __init__(self, parent): + self.__parent = parent + + def __getattr__(self, key): + try: + value = self.__parent.contents[key] + except KeyError: + pass + else: + if value is not None: + if isinstance(value, _ModuleMarker): + return value.mod_ns + else: + assert isinstance(value, _MultipleClassMarker) + return value.attempt_get(self.__parent.path, key) + raise AttributeError("Module %r has no mapped classes " + "registered under the name %r" % ( + self.__parent.name, key)) + + +class _GetColumns(object): + __slots__ = 'cls', + + def __init__(self, cls): + self.cls = cls + + def __getattr__(self, key): + mp = class_mapper(self.cls, configure=False) + if mp: + if key not in mp.all_orm_descriptors: + raise exc.InvalidRequestError( + "Class %r does not have a mapped column named %r" + % (self.cls, key)) + + desc = mp.all_orm_descriptors[key] + if desc.extension_type is interfaces.NOT_EXTENSION: + prop = desc.property + if isinstance(prop, SynonymProperty): + key = prop.name + elif not isinstance(prop, ColumnProperty): + raise exc.InvalidRequestError( + "Property %r is not an instance of" + " ColumnProperty (i.e. does not correspond" + " directly to a Column)." % key) + return getattr(self.cls, key) + +inspection._inspects(_GetColumns)( + lambda target: inspection.inspect(target.cls)) + + +class _GetTable(object): + __slots__ = 'key', 'metadata' + + def __init__(self, key, metadata): + self.key = key + self.metadata = metadata + + def __getattr__(self, key): + return self.metadata.tables[ + _get_table_key(key, self.key) + ] + + +def _determine_container(key, value): + if isinstance(value, _MultipleClassMarker): + value = value.attempt_get([], key) + return _GetColumns(value) + + +class _class_resolver(object): + def __init__(self, cls, prop, fallback, arg): + self.cls = cls + self.prop = prop + self.arg = self._declarative_arg = arg + self.fallback = fallback + self._dict = util.PopulateDict(self._access_cls) + self._resolvers = () + + def _access_cls(self, key): + cls = self.cls + if key in cls._decl_class_registry: + return _determine_container(key, cls._decl_class_registry[key]) + elif key in cls.metadata.tables: + return cls.metadata.tables[key] + elif key in cls.metadata._schemas: + return _GetTable(key, cls.metadata) + elif '_sa_module_registry' in cls._decl_class_registry and \ + key in cls._decl_class_registry['_sa_module_registry']: + registry = cls._decl_class_registry['_sa_module_registry'] + return registry.resolve_attr(key) + elif self._resolvers: + for resolv in self._resolvers: + value = resolv(key) + if value is not None: + return value + + return self.fallback[key] + + def __call__(self): + try: + x = eval(self.arg, globals(), self._dict) + + if isinstance(x, _GetColumns): + return x.cls + else: + return x + except NameError as n: + raise exc.InvalidRequestError( + "When initializing mapper %s, expression %r failed to " + "locate a name (%r). If this is a class name, consider " + "adding this relationship() to the %r class after " + "both dependent classes have been defined." % + (self.prop.parent, self.arg, n.args[0], self.cls) + ) + + +def _resolver(cls, prop): + import sqlalchemy + from sqlalchemy.orm import foreign, remote + + fallback = sqlalchemy.__dict__.copy() + fallback.update({'foreign': foreign, 'remote': remote}) + + def resolve_arg(arg): + return _class_resolver(cls, prop, fallback, arg) + return resolve_arg + + +def _deferred_relationship(cls, prop): + + if isinstance(prop, RelationshipProperty): + resolve_arg = _resolver(cls, prop) + + for attr in ('argument', 'order_by', 'primaryjoin', 'secondaryjoin', + 'secondary', '_user_defined_foreign_keys', 'remote_side'): + v = getattr(prop, attr) + if isinstance(v, util.string_types): + setattr(prop, attr, resolve_arg(v)) + + if prop.backref and isinstance(prop.backref, tuple): + key, kwargs = prop.backref + for attr in ('primaryjoin', 'secondaryjoin', 'secondary', + 'foreign_keys', 'remote_side', 'order_by'): + if attr in kwargs and isinstance(kwargs[attr], + util.string_types): + kwargs[attr] = resolve_arg(kwargs[attr]) + + return prop diff --git a/lib/python3.4/site-packages/sqlalchemy/ext/horizontal_shard.py b/lib/python3.4/site-packages/sqlalchemy/ext/horizontal_shard.py new file mode 100644 index 0000000..996e81f --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/ext/horizontal_shard.py @@ -0,0 +1,131 @@ +# ext/horizontal_shard.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Horizontal sharding support. + +Defines a rudimental 'horizontal sharding' system which allows a Session to +distribute queries and persistence operations across multiple databases. + +For a usage example, see the :ref:`examples_sharding` example included in +the source distribution. + +""" + +from .. import util +from ..orm.session import Session +from ..orm.query import Query + +__all__ = ['ShardedSession', 'ShardedQuery'] + + +class ShardedQuery(Query): + def __init__(self, *args, **kwargs): + super(ShardedQuery, self).__init__(*args, **kwargs) + self.id_chooser = self.session.id_chooser + self.query_chooser = self.session.query_chooser + self._shard_id = None + + def set_shard(self, shard_id): + """return a new query, limited to a single shard ID. + + all subsequent operations with the returned query will + be against the single shard regardless of other state. + """ + + q = self._clone() + q._shard_id = shard_id + return q + + def _execute_and_instances(self, context): + def iter_for_shard(shard_id): + context.attributes['shard_id'] = shard_id + result = self._connection_from_session( + mapper=self._mapper_zero(), + shard_id=shard_id).execute( + context.statement, + self._params) + return self.instances(result, context) + + if self._shard_id is not None: + return iter_for_shard(self._shard_id) + else: + partial = [] + for shard_id in self.query_chooser(self): + partial.extend(iter_for_shard(shard_id)) + + # if some kind of in memory 'sorting' + # were done, this is where it would happen + return iter(partial) + + def get(self, ident, **kwargs): + if self._shard_id is not None: + return super(ShardedQuery, self).get(ident) + else: + ident = util.to_list(ident) + for shard_id in self.id_chooser(self, ident): + o = self.set_shard(shard_id).get(ident, **kwargs) + if o is not None: + return o + else: + return None + + +class ShardedSession(Session): + def __init__(self, shard_chooser, id_chooser, query_chooser, shards=None, + query_cls=ShardedQuery, **kwargs): + """Construct a ShardedSession. + + :param shard_chooser: A callable which, passed a Mapper, a mapped + instance, and possibly a SQL clause, returns a shard ID. This id + may be based off of the attributes present within the object, or on + some round-robin scheme. If the scheme is based on a selection, it + should set whatever state on the instance to mark it in the future as + participating in that shard. + + :param id_chooser: A callable, passed a query and a tuple of identity + values, which should return a list of shard ids where the ID might + reside. The databases will be queried in the order of this listing. + + :param query_chooser: For a given Query, returns the list of shard_ids + where the query should be issued. Results from all shards returned + will be combined together into a single listing. + + :param shards: A dictionary of string shard names + to :class:`~sqlalchemy.engine.Engine` objects. + + """ + super(ShardedSession, self).__init__(query_cls=query_cls, **kwargs) + self.shard_chooser = shard_chooser + self.id_chooser = id_chooser + self.query_chooser = query_chooser + self.__binds = {} + self.connection_callable = self.connection + if shards is not None: + for k in shards: + self.bind_shard(k, shards[k]) + + def connection(self, mapper=None, instance=None, shard_id=None, **kwargs): + if shard_id is None: + shard_id = self.shard_chooser(mapper, instance) + + if self.transaction is not None: + return self.transaction.connection(mapper, shard_id=shard_id) + else: + return self.get_bind( + mapper, + shard_id=shard_id, + instance=instance + ).contextual_connect(**kwargs) + + def get_bind(self, mapper, shard_id=None, + instance=None, clause=None, **kw): + if shard_id is None: + shard_id = self.shard_chooser(mapper, instance, clause=clause) + return self.__binds[shard_id] + + def bind_shard(self, shard_id, bind): + self.__binds[shard_id] = bind diff --git a/lib/python3.4/site-packages/sqlalchemy/ext/hybrid.py b/lib/python3.4/site-packages/sqlalchemy/ext/hybrid.py new file mode 100644 index 0000000..bbf3867 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/ext/hybrid.py @@ -0,0 +1,810 @@ +# ext/hybrid.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Define attributes on ORM-mapped classes that have "hybrid" behavior. + +"hybrid" means the attribute has distinct behaviors defined at the +class level and at the instance level. + +The :mod:`~sqlalchemy.ext.hybrid` extension provides a special form of +method decorator, is around 50 lines of code and has almost no +dependencies on the rest of SQLAlchemy. It can, in theory, work with +any descriptor-based expression system. + +Consider a mapping ``Interval``, representing integer ``start`` and ``end`` +values. We can define higher level functions on mapped classes that produce +SQL expressions at the class level, and Python expression evaluation at the +instance level. Below, each function decorated with :class:`.hybrid_method` or +:class:`.hybrid_property` may receive ``self`` as an instance of the class, or +as the class itself:: + + from sqlalchemy import Column, Integer + from sqlalchemy.ext.declarative import declarative_base + from sqlalchemy.orm import Session, aliased + from sqlalchemy.ext.hybrid import hybrid_property, hybrid_method + + Base = declarative_base() + + class Interval(Base): + __tablename__ = 'interval' + + id = Column(Integer, primary_key=True) + start = Column(Integer, nullable=False) + end = Column(Integer, nullable=False) + + def __init__(self, start, end): + self.start = start + self.end = end + + @hybrid_property + def length(self): + return self.end - self.start + + @hybrid_method + def contains(self, point): + return (self.start <= point) & (point <= self.end) + + @hybrid_method + def intersects(self, other): + return self.contains(other.start) | self.contains(other.end) + +Above, the ``length`` property returns the difference between the +``end`` and ``start`` attributes. With an instance of ``Interval``, +this subtraction occurs in Python, using normal Python descriptor +mechanics:: + + >>> i1 = Interval(5, 10) + >>> i1.length + 5 + +When dealing with the ``Interval`` class itself, the :class:`.hybrid_property` +descriptor evaluates the function body given the ``Interval`` class as +the argument, which when evaluated with SQLAlchemy expression mechanics +returns a new SQL expression:: + + >>> print Interval.length + interval."end" - interval.start + + >>> print Session().query(Interval).filter(Interval.length > 10) + SELECT interval.id AS interval_id, interval.start AS interval_start, + interval."end" AS interval_end + FROM interval + WHERE interval."end" - interval.start > :param_1 + +ORM methods such as :meth:`~.Query.filter_by` generally use ``getattr()`` to +locate attributes, so can also be used with hybrid attributes:: + + >>> print Session().query(Interval).filter_by(length=5) + SELECT interval.id AS interval_id, interval.start AS interval_start, + interval."end" AS interval_end + FROM interval + WHERE interval."end" - interval.start = :param_1 + +The ``Interval`` class example also illustrates two methods, +``contains()`` and ``intersects()``, decorated with +:class:`.hybrid_method`. This decorator applies the same idea to +methods that :class:`.hybrid_property` applies to attributes. The +methods return boolean values, and take advantage of the Python ``|`` +and ``&`` bitwise operators to produce equivalent instance-level and +SQL expression-level boolean behavior:: + + >>> i1.contains(6) + True + >>> i1.contains(15) + False + >>> i1.intersects(Interval(7, 18)) + True + >>> i1.intersects(Interval(25, 29)) + False + + >>> print Session().query(Interval).filter(Interval.contains(15)) + SELECT interval.id AS interval_id, interval.start AS interval_start, + interval."end" AS interval_end + FROM interval + WHERE interval.start <= :start_1 AND interval."end" > :end_1 + + >>> ia = aliased(Interval) + >>> print Session().query(Interval, ia).filter(Interval.intersects(ia)) + SELECT interval.id AS interval_id, interval.start AS interval_start, + interval."end" AS interval_end, interval_1.id AS interval_1_id, + interval_1.start AS interval_1_start, interval_1."end" AS interval_1_end + FROM interval, interval AS interval_1 + WHERE interval.start <= interval_1.start + AND interval."end" > interval_1.start + OR interval.start <= interval_1."end" + AND interval."end" > interval_1."end" + +Defining Expression Behavior Distinct from Attribute Behavior +-------------------------------------------------------------- + +Our usage of the ``&`` and ``|`` bitwise operators above was +fortunate, considering our functions operated on two boolean values to +return a new one. In many cases, the construction of an in-Python +function and a SQLAlchemy SQL expression have enough differences that +two separate Python expressions should be defined. The +:mod:`~sqlalchemy.ext.hybrid` decorators define the +:meth:`.hybrid_property.expression` modifier for this purpose. As an +example we'll define the radius of the interval, which requires the +usage of the absolute value function:: + + from sqlalchemy import func + + class Interval(object): + # ... + + @hybrid_property + def radius(self): + return abs(self.length) / 2 + + @radius.expression + def radius(cls): + return func.abs(cls.length) / 2 + +Above the Python function ``abs()`` is used for instance-level +operations, the SQL function ``ABS()`` is used via the :data:`.func` +object for class-level expressions:: + + >>> i1.radius + 2 + + >>> print Session().query(Interval).filter(Interval.radius > 5) + SELECT interval.id AS interval_id, interval.start AS interval_start, + interval."end" AS interval_end + FROM interval + WHERE abs(interval."end" - interval.start) / :abs_1 > :param_1 + +Defining Setters +---------------- + +Hybrid properties can also define setter methods. If we wanted +``length`` above, when set, to modify the endpoint value:: + + class Interval(object): + # ... + + @hybrid_property + def length(self): + return self.end - self.start + + @length.setter + def length(self, value): + self.end = self.start + value + +The ``length(self, value)`` method is now called upon set:: + + >>> i1 = Interval(5, 10) + >>> i1.length + 5 + >>> i1.length = 12 + >>> i1.end + 17 + +Working with Relationships +-------------------------- + +There's no essential difference when creating hybrids that work with +related objects as opposed to column-based data. The need for distinct +expressions tends to be greater. Two variants of we'll illustrate +are the "join-dependent" hybrid, and the "correlated subquery" hybrid. + +Join-Dependent Relationship Hybrid +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Consider the following declarative +mapping which relates a ``User`` to a ``SavingsAccount``:: + + from sqlalchemy import Column, Integer, ForeignKey, Numeric, String + from sqlalchemy.orm import relationship + from sqlalchemy.ext.declarative import declarative_base + from sqlalchemy.ext.hybrid import hybrid_property + + Base = declarative_base() + + class SavingsAccount(Base): + __tablename__ = 'account' + id = Column(Integer, primary_key=True) + user_id = Column(Integer, ForeignKey('user.id'), nullable=False) + balance = Column(Numeric(15, 5)) + + class User(Base): + __tablename__ = 'user' + id = Column(Integer, primary_key=True) + name = Column(String(100), nullable=False) + + accounts = relationship("SavingsAccount", backref="owner") + + @hybrid_property + def balance(self): + if self.accounts: + return self.accounts[0].balance + else: + return None + + @balance.setter + def balance(self, value): + if not self.accounts: + account = Account(owner=self) + else: + account = self.accounts[0] + account.balance = value + + @balance.expression + def balance(cls): + return SavingsAccount.balance + +The above hybrid property ``balance`` works with the first +``SavingsAccount`` entry in the list of accounts for this user. The +in-Python getter/setter methods can treat ``accounts`` as a Python +list available on ``self``. + +However, at the expression level, it's expected that the ``User`` class will +be used in an appropriate context such that an appropriate join to +``SavingsAccount`` will be present:: + + >>> print Session().query(User, User.balance).\\ + ... join(User.accounts).filter(User.balance > 5000) + SELECT "user".id AS user_id, "user".name AS user_name, + account.balance AS account_balance + FROM "user" JOIN account ON "user".id = account.user_id + WHERE account.balance > :balance_1 + +Note however, that while the instance level accessors need to worry +about whether ``self.accounts`` is even present, this issue expresses +itself differently at the SQL expression level, where we basically +would use an outer join:: + + >>> from sqlalchemy import or_ + >>> print (Session().query(User, User.balance).outerjoin(User.accounts). + ... filter(or_(User.balance < 5000, User.balance == None))) + SELECT "user".id AS user_id, "user".name AS user_name, + account.balance AS account_balance + FROM "user" LEFT OUTER JOIN account ON "user".id = account.user_id + WHERE account.balance < :balance_1 OR account.balance IS NULL + +Correlated Subquery Relationship Hybrid +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +We can, of course, forego being dependent on the enclosing query's usage +of joins in favor of the correlated subquery, which can portably be packed +into a single column expression. A correlated subquery is more portable, but +often performs more poorly at the SQL level. Using the same technique +illustrated at :ref:`mapper_column_property_sql_expressions`, +we can adjust our ``SavingsAccount`` example to aggregate the balances for +*all* accounts, and use a correlated subquery for the column expression:: + + from sqlalchemy import Column, Integer, ForeignKey, Numeric, String + from sqlalchemy.orm import relationship + from sqlalchemy.ext.declarative import declarative_base + from sqlalchemy.ext.hybrid import hybrid_property + from sqlalchemy import select, func + + Base = declarative_base() + + class SavingsAccount(Base): + __tablename__ = 'account' + id = Column(Integer, primary_key=True) + user_id = Column(Integer, ForeignKey('user.id'), nullable=False) + balance = Column(Numeric(15, 5)) + + class User(Base): + __tablename__ = 'user' + id = Column(Integer, primary_key=True) + name = Column(String(100), nullable=False) + + accounts = relationship("SavingsAccount", backref="owner") + + @hybrid_property + def balance(self): + return sum(acc.balance for acc in self.accounts) + + @balance.expression + def balance(cls): + return select([func.sum(SavingsAccount.balance)]).\\ + where(SavingsAccount.user_id==cls.id).\\ + label('total_balance') + +The above recipe will give us the ``balance`` column which renders +a correlated SELECT:: + + >>> print s.query(User).filter(User.balance > 400) + SELECT "user".id AS user_id, "user".name AS user_name + FROM "user" + WHERE (SELECT sum(account.balance) AS sum_1 + FROM account + WHERE account.user_id = "user".id) > :param_1 + +.. _hybrid_custom_comparators: + +Building Custom Comparators +--------------------------- + +The hybrid property also includes a helper that allows construction of +custom comparators. A comparator object allows one to customize the +behavior of each SQLAlchemy expression operator individually. They +are useful when creating custom types that have some highly +idiosyncratic behavior on the SQL side. + +The example class below allows case-insensitive comparisons on the attribute +named ``word_insensitive``:: + + from sqlalchemy.ext.hybrid import Comparator, hybrid_property + from sqlalchemy import func, Column, Integer, String + from sqlalchemy.orm import Session + from sqlalchemy.ext.declarative import declarative_base + + Base = declarative_base() + + class CaseInsensitiveComparator(Comparator): + def __eq__(self, other): + return func.lower(self.__clause_element__()) == func.lower(other) + + class SearchWord(Base): + __tablename__ = 'searchword' + id = Column(Integer, primary_key=True) + word = Column(String(255), nullable=False) + + @hybrid_property + def word_insensitive(self): + return self.word.lower() + + @word_insensitive.comparator + def word_insensitive(cls): + return CaseInsensitiveComparator(cls.word) + +Above, SQL expressions against ``word_insensitive`` will apply the ``LOWER()`` +SQL function to both sides:: + + >>> print Session().query(SearchWord).filter_by(word_insensitive="Trucks") + SELECT searchword.id AS searchword_id, searchword.word AS searchword_word + FROM searchword + WHERE lower(searchword.word) = lower(:lower_1) + +The ``CaseInsensitiveComparator`` above implements part of the +:class:`.ColumnOperators` interface. A "coercion" operation like +lowercasing can be applied to all comparison operations (i.e. ``eq``, +``lt``, ``gt``, etc.) using :meth:`.Operators.operate`:: + + class CaseInsensitiveComparator(Comparator): + def operate(self, op, other): + return op(func.lower(self.__clause_element__()), func.lower(other)) + +Hybrid Value Objects +-------------------- + +Note in our previous example, if we were to compare the +``word_insensitive`` attribute of a ``SearchWord`` instance to a plain +Python string, the plain Python string would not be coerced to lower +case - the ``CaseInsensitiveComparator`` we built, being returned by +``@word_insensitive.comparator``, only applies to the SQL side. + +A more comprehensive form of the custom comparator is to construct a +*Hybrid Value Object*. This technique applies the target value or +expression to a value object which is then returned by the accessor in +all cases. The value object allows control of all operations upon +the value as well as how compared values are treated, both on the SQL +expression side as well as the Python value side. Replacing the +previous ``CaseInsensitiveComparator`` class with a new +``CaseInsensitiveWord`` class:: + + class CaseInsensitiveWord(Comparator): + "Hybrid value representing a lower case representation of a word." + + def __init__(self, word): + if isinstance(word, basestring): + self.word = word.lower() + elif isinstance(word, CaseInsensitiveWord): + self.word = word.word + else: + self.word = func.lower(word) + + def operate(self, op, other): + if not isinstance(other, CaseInsensitiveWord): + other = CaseInsensitiveWord(other) + return op(self.word, other.word) + + def __clause_element__(self): + return self.word + + def __str__(self): + return self.word + + key = 'word' + "Label to apply to Query tuple results" + +Above, the ``CaseInsensitiveWord`` object represents ``self.word``, +which may be a SQL function, or may be a Python native. By +overriding ``operate()`` and ``__clause_element__()`` to work in terms +of ``self.word``, all comparison operations will work against the +"converted" form of ``word``, whether it be SQL side or Python side. +Our ``SearchWord`` class can now deliver the ``CaseInsensitiveWord`` +object unconditionally from a single hybrid call:: + + class SearchWord(Base): + __tablename__ = 'searchword' + id = Column(Integer, primary_key=True) + word = Column(String(255), nullable=False) + + @hybrid_property + def word_insensitive(self): + return CaseInsensitiveWord(self.word) + +The ``word_insensitive`` attribute now has case-insensitive comparison +behavior universally, including SQL expression vs. Python expression +(note the Python value is converted to lower case on the Python side +here):: + + >>> print Session().query(SearchWord).filter_by(word_insensitive="Trucks") + SELECT searchword.id AS searchword_id, searchword.word AS searchword_word + FROM searchword + WHERE lower(searchword.word) = :lower_1 + +SQL expression versus SQL expression:: + + >>> sw1 = aliased(SearchWord) + >>> sw2 = aliased(SearchWord) + >>> print Session().query( + ... sw1.word_insensitive, + ... sw2.word_insensitive).\\ + ... filter( + ... sw1.word_insensitive > sw2.word_insensitive + ... ) + SELECT lower(searchword_1.word) AS lower_1, + lower(searchword_2.word) AS lower_2 + FROM searchword AS searchword_1, searchword AS searchword_2 + WHERE lower(searchword_1.word) > lower(searchword_2.word) + +Python only expression:: + + >>> ws1 = SearchWord(word="SomeWord") + >>> ws1.word_insensitive == "sOmEwOrD" + True + >>> ws1.word_insensitive == "XOmEwOrX" + False + >>> print ws1.word_insensitive + someword + +The Hybrid Value pattern is very useful for any kind of value that may +have multiple representations, such as timestamps, time deltas, units +of measurement, currencies and encrypted passwords. + +.. seealso:: + + `Hybrids and Value Agnostic Types + `_ + - on the techspot.zzzeek.org blog + + `Value Agnostic Types, Part II + `_ - + on the techspot.zzzeek.org blog + +.. _hybrid_transformers: + +Building Transformers +---------------------- + +A *transformer* is an object which can receive a :class:`.Query` +object and return a new one. The :class:`.Query` object includes a +method :meth:`.with_transformation` that returns a new :class:`.Query` +transformed by the given function. + +We can combine this with the :class:`.Comparator` class to produce one type +of recipe which can both set up the FROM clause of a query as well as assign +filtering criterion. + +Consider a mapped class ``Node``, which assembles using adjacency list +into a hierarchical tree pattern:: + + from sqlalchemy import Column, Integer, ForeignKey + from sqlalchemy.orm import relationship + from sqlalchemy.ext.declarative import declarative_base + Base = declarative_base() + + class Node(Base): + __tablename__ = 'node' + id =Column(Integer, primary_key=True) + parent_id = Column(Integer, ForeignKey('node.id')) + parent = relationship("Node", remote_side=id) + +Suppose we wanted to add an accessor ``grandparent``. This would +return the ``parent`` of ``Node.parent``. When we have an instance of +``Node``, this is simple:: + + from sqlalchemy.ext.hybrid import hybrid_property + + class Node(Base): + # ... + + @hybrid_property + def grandparent(self): + return self.parent.parent + +For the expression, things are not so clear. We'd need to construct +a :class:`.Query` where we :meth:`~.Query.join` twice along +``Node.parent`` to get to the ``grandparent``. We can instead return +a transforming callable that we'll combine with the +:class:`.Comparator` class to receive any :class:`.Query` object, and +return a new one that's joined to the ``Node.parent`` attribute and +filtered based on the given criterion:: + + from sqlalchemy.ext.hybrid import Comparator + + class GrandparentTransformer(Comparator): + def operate(self, op, other): + def transform(q): + cls = self.__clause_element__() + parent_alias = aliased(cls) + return q.join(parent_alias, cls.parent).\\ + filter(op(parent_alias.parent, other)) + return transform + + Base = declarative_base() + + class Node(Base): + __tablename__ = 'node' + id =Column(Integer, primary_key=True) + parent_id = Column(Integer, ForeignKey('node.id')) + parent = relationship("Node", remote_side=id) + + @hybrid_property + def grandparent(self): + return self.parent.parent + + @grandparent.comparator + def grandparent(cls): + return GrandparentTransformer(cls) + +The ``GrandparentTransformer`` overrides the core +:meth:`.Operators.operate` method at the base of the +:class:`.Comparator` hierarchy to return a query-transforming +callable, which then runs the given comparison operation in a +particular context. Such as, in the example above, the ``operate`` +method is called, given the :attr:`.Operators.eq` callable as well as +the right side of the comparison ``Node(id=5)``. A function +``transform`` is then returned which will transform a :class:`.Query` +first to join to ``Node.parent``, then to compare ``parent_alias`` +using :attr:`.Operators.eq` against the left and right sides, passing +into :class:`.Query.filter`: + +.. sourcecode:: pycon+sql + + >>> from sqlalchemy.orm import Session + >>> session = Session() + {sql}>>> session.query(Node).\\ + ... with_transformation(Node.grandparent==Node(id=5)).\\ + ... all() + SELECT node.id AS node_id, node.parent_id AS node_parent_id + FROM node JOIN node AS node_1 ON node_1.id = node.parent_id + WHERE :param_1 = node_1.parent_id + {stop} + +We can modify the pattern to be more verbose but flexible by separating +the "join" step from the "filter" step. The tricky part here is ensuring +that successive instances of ``GrandparentTransformer`` use the same +:class:`.AliasedClass` object against ``Node``. Below we use a simple +memoizing approach that associates a ``GrandparentTransformer`` +with each class:: + + class Node(Base): + + # ... + + @grandparent.comparator + def grandparent(cls): + # memoize a GrandparentTransformer + # per class + if '_gp' not in cls.__dict__: + cls._gp = GrandparentTransformer(cls) + return cls._gp + + class GrandparentTransformer(Comparator): + + def __init__(self, cls): + self.parent_alias = aliased(cls) + + @property + def join(self): + def go(q): + return q.join(self.parent_alias, Node.parent) + return go + + def operate(self, op, other): + return op(self.parent_alias.parent, other) + +.. sourcecode:: pycon+sql + + {sql}>>> session.query(Node).\\ + ... with_transformation(Node.grandparent.join).\\ + ... filter(Node.grandparent==Node(id=5)) + SELECT node.id AS node_id, node.parent_id AS node_parent_id + FROM node JOIN node AS node_1 ON node_1.id = node.parent_id + WHERE :param_1 = node_1.parent_id + {stop} + +The "transformer" pattern is an experimental pattern that starts +to make usage of some functional programming paradigms. +While it's only recommended for advanced and/or patient developers, +there's probably a whole lot of amazing things it can be used for. + +""" +from .. import util +from ..orm import attributes, interfaces + +HYBRID_METHOD = util.symbol('HYBRID_METHOD') +"""Symbol indicating an :class:`InspectionAttr` that's + of type :class:`.hybrid_method`. + + Is assigned to the :attr:`.InspectionAttr.extension_type` + attibute. + + .. seealso:: + + :attr:`.Mapper.all_orm_attributes` + +""" + +HYBRID_PROPERTY = util.symbol('HYBRID_PROPERTY') +"""Symbol indicating an :class:`InspectionAttr` that's + of type :class:`.hybrid_method`. + + Is assigned to the :attr:`.InspectionAttr.extension_type` + attibute. + + .. seealso:: + + :attr:`.Mapper.all_orm_attributes` + +""" + + +class hybrid_method(interfaces.InspectionAttrInfo): + """A decorator which allows definition of a Python object method with both + instance-level and class-level behavior. + + """ + + is_attribute = True + extension_type = HYBRID_METHOD + + def __init__(self, func, expr=None): + """Create a new :class:`.hybrid_method`. + + Usage is typically via decorator:: + + from sqlalchemy.ext.hybrid import hybrid_method + + class SomeClass(object): + @hybrid_method + def value(self, x, y): + return self._value + x + y + + @value.expression + def value(self, x, y): + return func.some_function(self._value, x, y) + + """ + self.func = func + self.expr = expr or func + + def __get__(self, instance, owner): + if instance is None: + return self.expr.__get__(owner, owner.__class__) + else: + return self.func.__get__(instance, owner) + + def expression(self, expr): + """Provide a modifying decorator that defines a + SQL-expression producing method.""" + + self.expr = expr + return self + + +class hybrid_property(interfaces.InspectionAttrInfo): + """A decorator which allows definition of a Python descriptor with both + instance-level and class-level behavior. + + """ + + is_attribute = True + extension_type = HYBRID_PROPERTY + + def __init__(self, fget, fset=None, fdel=None, expr=None): + """Create a new :class:`.hybrid_property`. + + Usage is typically via decorator:: + + from sqlalchemy.ext.hybrid import hybrid_property + + class SomeClass(object): + @hybrid_property + def value(self): + return self._value + + @value.setter + def value(self, value): + self._value = value + + """ + self.fget = fget + self.fset = fset + self.fdel = fdel + self.expr = expr or fget + util.update_wrapper(self, fget) + + def __get__(self, instance, owner): + if instance is None: + return self.expr(owner) + else: + return self.fget(instance) + + def __set__(self, instance, value): + if self.fset is None: + raise AttributeError("can't set attribute") + self.fset(instance, value) + + def __delete__(self, instance): + if self.fdel is None: + raise AttributeError("can't delete attribute") + self.fdel(instance) + + def setter(self, fset): + """Provide a modifying decorator that defines a value-setter method.""" + + self.fset = fset + return self + + def deleter(self, fdel): + """Provide a modifying decorator that defines a + value-deletion method.""" + + self.fdel = fdel + return self + + def expression(self, expr): + """Provide a modifying decorator that defines a SQL-expression + producing method.""" + + self.expr = expr + return self + + def comparator(self, comparator): + """Provide a modifying decorator that defines a custom + comparator producing method. + + The return value of the decorated method should be an instance of + :class:`~.hybrid.Comparator`. + + """ + + proxy_attr = attributes.\ + create_proxied_attribute(self) + + def expr(owner): + return proxy_attr(owner, self.__name__, self, comparator(owner)) + self.expr = expr + return self + + +class Comparator(interfaces.PropComparator): + """A helper class that allows easy construction of custom + :class:`~.orm.interfaces.PropComparator` + classes for usage with hybrids.""" + + property = None + + def __init__(self, expression): + self.expression = expression + + def __clause_element__(self): + expr = self.expression + while hasattr(expr, '__clause_element__'): + expr = expr.__clause_element__() + return expr + + def adapt_to_entity(self, adapt_to_entity): + # interesting.... + return self diff --git a/lib/python3.4/site-packages/sqlalchemy/ext/instrumentation.py b/lib/python3.4/site-packages/sqlalchemy/ext/instrumentation.py new file mode 100644 index 0000000..30a0ab7 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/ext/instrumentation.py @@ -0,0 +1,414 @@ +"""Extensible class instrumentation. + +The :mod:`sqlalchemy.ext.instrumentation` package provides for alternate +systems of class instrumentation within the ORM. Class instrumentation +refers to how the ORM places attributes on the class which maintain +data and track changes to that data, as well as event hooks installed +on the class. + +.. note:: + The extension package is provided for the benefit of integration + with other object management packages, which already perform + their own instrumentation. It is not intended for general use. + +For examples of how the instrumentation extension is used, +see the example :ref:`examples_instrumentation`. + +.. versionchanged:: 0.8 + The :mod:`sqlalchemy.orm.instrumentation` was split out so + that all functionality having to do with non-standard + instrumentation was moved out to :mod:`sqlalchemy.ext.instrumentation`. + When imported, the module installs itself within + :mod:`sqlalchemy.orm.instrumentation` so that it + takes effect, including recognition of + ``__sa_instrumentation_manager__`` on mapped classes, as + well :data:`.instrumentation_finders` + being used to determine class instrumentation resolution. + +""" +from ..orm import instrumentation as orm_instrumentation +from ..orm.instrumentation import ( + ClassManager, InstrumentationFactory, _default_state_getter, + _default_dict_getter, _default_manager_getter +) +from ..orm import attributes, collections, base as orm_base +from .. import util +from ..orm import exc as orm_exc +import weakref + +INSTRUMENTATION_MANAGER = '__sa_instrumentation_manager__' +"""Attribute, elects custom instrumentation when present on a mapped class. + +Allows a class to specify a slightly or wildly different technique for +tracking changes made to mapped attributes and collections. + +Only one instrumentation implementation is allowed in a given object +inheritance hierarchy. + +The value of this attribute must be a callable and will be passed a class +object. The callable must return one of: + + - An instance of an InstrumentationManager or subclass + - An object implementing all or some of InstrumentationManager (TODO) + - A dictionary of callables, implementing all or some of the above (TODO) + - An instance of a ClassManager or subclass + +This attribute is consulted by SQLAlchemy instrumentation +resolution, once the :mod:`sqlalchemy.ext.instrumentation` module +has been imported. If custom finders are installed in the global +instrumentation_finders list, they may or may not choose to honor this +attribute. + +""" + + +def find_native_user_instrumentation_hook(cls): + """Find user-specified instrumentation management for a class.""" + return getattr(cls, INSTRUMENTATION_MANAGER, None) + +instrumentation_finders = [find_native_user_instrumentation_hook] +"""An extensible sequence of callables which return instrumentation +implementations + +When a class is registered, each callable will be passed a class object. +If None is returned, the +next finder in the sequence is consulted. Otherwise the return must be an +instrumentation factory that follows the same guidelines as +sqlalchemy.ext.instrumentation.INSTRUMENTATION_MANAGER. + +By default, the only finder is find_native_user_instrumentation_hook, which +searches for INSTRUMENTATION_MANAGER. If all finders return None, standard +ClassManager instrumentation is used. + +""" + + +class ExtendedInstrumentationRegistry(InstrumentationFactory): + """Extends :class:`.InstrumentationFactory` with additional + bookkeeping, to accommodate multiple types of + class managers. + + """ + _manager_finders = weakref.WeakKeyDictionary() + _state_finders = weakref.WeakKeyDictionary() + _dict_finders = weakref.WeakKeyDictionary() + _extended = False + + def _locate_extended_factory(self, class_): + for finder in instrumentation_finders: + factory = finder(class_) + if factory is not None: + manager = self._extended_class_manager(class_, factory) + return manager, factory + else: + return None, None + + def _check_conflicts(self, class_, factory): + existing_factories = self._collect_management_factories_for(class_).\ + difference([factory]) + if existing_factories: + raise TypeError( + "multiple instrumentation implementations specified " + "in %s inheritance hierarchy: %r" % ( + class_.__name__, list(existing_factories))) + + def _extended_class_manager(self, class_, factory): + manager = factory(class_) + if not isinstance(manager, ClassManager): + manager = _ClassInstrumentationAdapter(class_, manager) + + if factory != ClassManager and not self._extended: + # somebody invoked a custom ClassManager. + # reinstall global "getter" functions with the more + # expensive ones. + self._extended = True + _install_instrumented_lookups() + + self._manager_finders[class_] = manager.manager_getter() + self._state_finders[class_] = manager.state_getter() + self._dict_finders[class_] = manager.dict_getter() + return manager + + def _collect_management_factories_for(self, cls): + """Return a collection of factories in play or specified for a + hierarchy. + + Traverses the entire inheritance graph of a cls and returns a + collection of instrumentation factories for those classes. Factories + are extracted from active ClassManagers, if available, otherwise + instrumentation_finders is consulted. + + """ + hierarchy = util.class_hierarchy(cls) + factories = set() + for member in hierarchy: + manager = self.manager_of_class(member) + if manager is not None: + factories.add(manager.factory) + else: + for finder in instrumentation_finders: + factory = finder(member) + if factory is not None: + break + else: + factory = None + factories.add(factory) + factories.discard(None) + return factories + + def unregister(self, class_): + if class_ in self._manager_finders: + del self._manager_finders[class_] + del self._state_finders[class_] + del self._dict_finders[class_] + super(ExtendedInstrumentationRegistry, self).unregister(class_) + + def manager_of_class(self, cls): + if cls is None: + return None + try: + finder = self._manager_finders.get(cls, _default_manager_getter) + except TypeError: + # due to weakref lookup on invalid object + return None + else: + return finder(cls) + + def state_of(self, instance): + if instance is None: + raise AttributeError("None has no persistent state.") + return self._state_finders.get( + instance.__class__, _default_state_getter)(instance) + + def dict_of(self, instance): + if instance is None: + raise AttributeError("None has no persistent state.") + return self._dict_finders.get( + instance.__class__, _default_dict_getter)(instance) + + +orm_instrumentation._instrumentation_factory = \ + _instrumentation_factory = ExtendedInstrumentationRegistry() +orm_instrumentation.instrumentation_finders = instrumentation_finders + + +class InstrumentationManager(object): + """User-defined class instrumentation extension. + + :class:`.InstrumentationManager` can be subclassed in order + to change + how class instrumentation proceeds. This class exists for + the purposes of integration with other object management + frameworks which would like to entirely modify the + instrumentation methodology of the ORM, and is not intended + for regular usage. For interception of class instrumentation + events, see :class:`.InstrumentationEvents`. + + The API for this class should be considered as semi-stable, + and may change slightly with new releases. + + .. versionchanged:: 0.8 + :class:`.InstrumentationManager` was moved from + :mod:`sqlalchemy.orm.instrumentation` to + :mod:`sqlalchemy.ext.instrumentation`. + + """ + + # r4361 added a mandatory (cls) constructor to this interface. + # given that, perhaps class_ should be dropped from all of these + # signatures. + + def __init__(self, class_): + pass + + def manage(self, class_, manager): + setattr(class_, '_default_class_manager', manager) + + def dispose(self, class_, manager): + delattr(class_, '_default_class_manager') + + def manager_getter(self, class_): + def get(cls): + return cls._default_class_manager + return get + + def instrument_attribute(self, class_, key, inst): + pass + + def post_configure_attribute(self, class_, key, inst): + pass + + def install_descriptor(self, class_, key, inst): + setattr(class_, key, inst) + + def uninstall_descriptor(self, class_, key): + delattr(class_, key) + + def install_member(self, class_, key, implementation): + setattr(class_, key, implementation) + + def uninstall_member(self, class_, key): + delattr(class_, key) + + def instrument_collection_class(self, class_, key, collection_class): + return collections.prepare_instrumentation(collection_class) + + def get_instance_dict(self, class_, instance): + return instance.__dict__ + + def initialize_instance_dict(self, class_, instance): + pass + + def install_state(self, class_, instance, state): + setattr(instance, '_default_state', state) + + def remove_state(self, class_, instance): + delattr(instance, '_default_state') + + def state_getter(self, class_): + return lambda instance: getattr(instance, '_default_state') + + def dict_getter(self, class_): + return lambda inst: self.get_instance_dict(class_, inst) + + +class _ClassInstrumentationAdapter(ClassManager): + """Adapts a user-defined InstrumentationManager to a ClassManager.""" + + def __init__(self, class_, override): + self._adapted = override + self._get_state = self._adapted.state_getter(class_) + self._get_dict = self._adapted.dict_getter(class_) + + ClassManager.__init__(self, class_) + + def manage(self): + self._adapted.manage(self.class_, self) + + def dispose(self): + self._adapted.dispose(self.class_) + + def manager_getter(self): + return self._adapted.manager_getter(self.class_) + + def instrument_attribute(self, key, inst, propagated=False): + ClassManager.instrument_attribute(self, key, inst, propagated) + if not propagated: + self._adapted.instrument_attribute(self.class_, key, inst) + + def post_configure_attribute(self, key): + super(_ClassInstrumentationAdapter, self).post_configure_attribute(key) + self._adapted.post_configure_attribute(self.class_, key, self[key]) + + def install_descriptor(self, key, inst): + self._adapted.install_descriptor(self.class_, key, inst) + + def uninstall_descriptor(self, key): + self._adapted.uninstall_descriptor(self.class_, key) + + def install_member(self, key, implementation): + self._adapted.install_member(self.class_, key, implementation) + + def uninstall_member(self, key): + self._adapted.uninstall_member(self.class_, key) + + def instrument_collection_class(self, key, collection_class): + return self._adapted.instrument_collection_class( + self.class_, key, collection_class) + + def initialize_collection(self, key, state, factory): + delegate = getattr(self._adapted, 'initialize_collection', None) + if delegate: + return delegate(key, state, factory) + else: + return ClassManager.initialize_collection(self, key, + state, factory) + + def new_instance(self, state=None): + instance = self.class_.__new__(self.class_) + self.setup_instance(instance, state) + return instance + + def _new_state_if_none(self, instance): + """Install a default InstanceState if none is present. + + A private convenience method used by the __init__ decorator. + """ + if self.has_state(instance): + return False + else: + return self.setup_instance(instance) + + def setup_instance(self, instance, state=None): + self._adapted.initialize_instance_dict(self.class_, instance) + + if state is None: + state = self._state_constructor(instance, self) + + # the given instance is assumed to have no state + self._adapted.install_state(self.class_, instance, state) + return state + + def teardown_instance(self, instance): + self._adapted.remove_state(self.class_, instance) + + def has_state(self, instance): + try: + self._get_state(instance) + except orm_exc.NO_STATE: + return False + else: + return True + + def state_getter(self): + return self._get_state + + def dict_getter(self): + return self._get_dict + + +def _install_instrumented_lookups(): + """Replace global class/object management functions + with ExtendedInstrumentationRegistry implementations, which + allow multiple types of class managers to be present, + at the cost of performance. + + This function is called only by ExtendedInstrumentationRegistry + and unit tests specific to this behavior. + + The _reinstall_default_lookups() function can be called + after this one to re-establish the default functions. + + """ + _install_lookups( + dict( + instance_state=_instrumentation_factory.state_of, + instance_dict=_instrumentation_factory.dict_of, + manager_of_class=_instrumentation_factory.manager_of_class + ) + ) + + +def _reinstall_default_lookups(): + """Restore simplified lookups.""" + _install_lookups( + dict( + instance_state=_default_state_getter, + instance_dict=_default_dict_getter, + manager_of_class=_default_manager_getter + ) + ) + _instrumentation_factory._extended = False + + +def _install_lookups(lookups): + global instance_state, instance_dict, manager_of_class + instance_state = lookups['instance_state'] + instance_dict = lookups['instance_dict'] + manager_of_class = lookups['manager_of_class'] + orm_base.instance_state = attributes.instance_state = \ + orm_instrumentation.instance_state = instance_state + orm_base.instance_dict = attributes.instance_dict = \ + orm_instrumentation.instance_dict = instance_dict + orm_base.manager_of_class = attributes.manager_of_class = \ + orm_instrumentation.manager_of_class = manager_of_class diff --git a/lib/python3.4/site-packages/sqlalchemy/ext/mutable.py b/lib/python3.4/site-packages/sqlalchemy/ext/mutable.py new file mode 100644 index 0000000..97f720c --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/ext/mutable.py @@ -0,0 +1,701 @@ +# ext/mutable.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Provide support for tracking of in-place changes to scalar values, +which are propagated into ORM change events on owning parent objects. + +.. versionadded:: 0.7 :mod:`sqlalchemy.ext.mutable` replaces SQLAlchemy's + legacy approach to in-place mutations of scalar values; see + :ref:`07_migration_mutation_extension`. + +.. _mutable_scalars: + +Establishing Mutability on Scalar Column Values +=============================================== + +A typical example of a "mutable" structure is a Python dictionary. +Following the example introduced in :ref:`types_toplevel`, we +begin with a custom type that marshals Python dictionaries into +JSON strings before being persisted:: + + from sqlalchemy.types import TypeDecorator, VARCHAR + import json + + class JSONEncodedDict(TypeDecorator): + "Represents an immutable structure as a json-encoded string." + + impl = VARCHAR + + def process_bind_param(self, value, dialect): + if value is not None: + value = json.dumps(value) + return value + + def process_result_value(self, value, dialect): + if value is not None: + value = json.loads(value) + return value + +The usage of ``json`` is only for the purposes of example. The +:mod:`sqlalchemy.ext.mutable` extension can be used +with any type whose target Python type may be mutable, including +:class:`.PickleType`, :class:`.postgresql.ARRAY`, etc. + +When using the :mod:`sqlalchemy.ext.mutable` extension, the value itself +tracks all parents which reference it. Below, we illustrate the a simple +version of the :class:`.MutableDict` dictionary object, which applies +the :class:`.Mutable` mixin to a plain Python dictionary:: + + from sqlalchemy.ext.mutable import Mutable + + class MutableDict(Mutable, dict): + @classmethod + def coerce(cls, key, value): + "Convert plain dictionaries to MutableDict." + + if not isinstance(value, MutableDict): + if isinstance(value, dict): + return MutableDict(value) + + # this call will raise ValueError + return Mutable.coerce(key, value) + else: + return value + + def __setitem__(self, key, value): + "Detect dictionary set events and emit change events." + + dict.__setitem__(self, key, value) + self.changed() + + def __delitem__(self, key): + "Detect dictionary del events and emit change events." + + dict.__delitem__(self, key) + self.changed() + +The above dictionary class takes the approach of subclassing the Python +built-in ``dict`` to produce a dict +subclass which routes all mutation events through ``__setitem__``. There are +variants on this approach, such as subclassing ``UserDict.UserDict`` or +``collections.MutableMapping``; the part that's important to this example is +that the :meth:`.Mutable.changed` method is called whenever an in-place +change to the datastructure takes place. + +We also redefine the :meth:`.Mutable.coerce` method which will be used to +convert any values that are not instances of ``MutableDict``, such +as the plain dictionaries returned by the ``json`` module, into the +appropriate type. Defining this method is optional; we could just as well +created our ``JSONEncodedDict`` such that it always returns an instance +of ``MutableDict``, and additionally ensured that all calling code +uses ``MutableDict`` explicitly. When :meth:`.Mutable.coerce` is not +overridden, any values applied to a parent object which are not instances +of the mutable type will raise a ``ValueError``. + +Our new ``MutableDict`` type offers a class method +:meth:`~.Mutable.as_mutable` which we can use within column metadata +to associate with types. This method grabs the given type object or +class and associates a listener that will detect all future mappings +of this type, applying event listening instrumentation to the mapped +attribute. Such as, with classical table metadata:: + + from sqlalchemy import Table, Column, Integer + + my_data = Table('my_data', metadata, + Column('id', Integer, primary_key=True), + Column('data', MutableDict.as_mutable(JSONEncodedDict)) + ) + +Above, :meth:`~.Mutable.as_mutable` returns an instance of ``JSONEncodedDict`` +(if the type object was not an instance already), which will intercept any +attributes which are mapped against this type. Below we establish a simple +mapping against the ``my_data`` table:: + + from sqlalchemy import mapper + + class MyDataClass(object): + pass + + # associates mutation listeners with MyDataClass.data + mapper(MyDataClass, my_data) + +The ``MyDataClass.data`` member will now be notified of in place changes +to its value. + +There's no difference in usage when using declarative:: + + from sqlalchemy.ext.declarative import declarative_base + + Base = declarative_base() + + class MyDataClass(Base): + __tablename__ = 'my_data' + id = Column(Integer, primary_key=True) + data = Column(MutableDict.as_mutable(JSONEncodedDict)) + +Any in-place changes to the ``MyDataClass.data`` member +will flag the attribute as "dirty" on the parent object:: + + >>> from sqlalchemy.orm import Session + + >>> sess = Session() + >>> m1 = MyDataClass(data={'value1':'foo'}) + >>> sess.add(m1) + >>> sess.commit() + + >>> m1.data['value1'] = 'bar' + >>> assert m1 in sess.dirty + True + +The ``MutableDict`` can be associated with all future instances +of ``JSONEncodedDict`` in one step, using +:meth:`~.Mutable.associate_with`. This is similar to +:meth:`~.Mutable.as_mutable` except it will intercept all occurrences +of ``MutableDict`` in all mappings unconditionally, without +the need to declare it individually:: + + MutableDict.associate_with(JSONEncodedDict) + + class MyDataClass(Base): + __tablename__ = 'my_data' + id = Column(Integer, primary_key=True) + data = Column(JSONEncodedDict) + + +Supporting Pickling +-------------------- + +The key to the :mod:`sqlalchemy.ext.mutable` extension relies upon the +placement of a ``weakref.WeakKeyDictionary`` upon the value object, which +stores a mapping of parent mapped objects keyed to the attribute name under +which they are associated with this value. ``WeakKeyDictionary`` objects are +not picklable, due to the fact that they contain weakrefs and function +callbacks. In our case, this is a good thing, since if this dictionary were +picklable, it could lead to an excessively large pickle size for our value +objects that are pickled by themselves outside of the context of the parent. +The developer responsibility here is only to provide a ``__getstate__`` method +that excludes the :meth:`~MutableBase._parents` collection from the pickle +stream:: + + class MyMutableType(Mutable): + def __getstate__(self): + d = self.__dict__.copy() + d.pop('_parents', None) + return d + +With our dictionary example, we need to return the contents of the dict itself +(and also restore them on __setstate__):: + + class MutableDict(Mutable, dict): + # .... + + def __getstate__(self): + return dict(self) + + def __setstate__(self, state): + self.update(state) + +In the case that our mutable value object is pickled as it is attached to one +or more parent objects that are also part of the pickle, the :class:`.Mutable` +mixin will re-establish the :attr:`.Mutable._parents` collection on each value +object as the owning parents themselves are unpickled. + +.. _mutable_composites: + +Establishing Mutability on Composites +===================================== + +Composites are a special ORM feature which allow a single scalar attribute to +be assigned an object value which represents information "composed" from one +or more columns from the underlying mapped table. The usual example is that of +a geometric "point", and is introduced in :ref:`mapper_composite`. + +.. versionchanged:: 0.7 + The internals of :func:`.orm.composite` have been + greatly simplified and in-place mutation detection is no longer enabled by + default; instead, the user-defined value must detect changes on its own and + propagate them to all owning parents. The :mod:`sqlalchemy.ext.mutable` + extension provides the helper class :class:`.MutableComposite`, which is a + slight variant on the :class:`.Mutable` class. + +As is the case with :class:`.Mutable`, the user-defined composite class +subclasses :class:`.MutableComposite` as a mixin, and detects and delivers +change events to its parents via the :meth:`.MutableComposite.changed` method. +In the case of a composite class, the detection is usually via the usage of +Python descriptors (i.e. ``@property``), or alternatively via the special +Python method ``__setattr__()``. Below we expand upon the ``Point`` class +introduced in :ref:`mapper_composite` to subclass :class:`.MutableComposite` +and to also route attribute set events via ``__setattr__`` to the +:meth:`.MutableComposite.changed` method:: + + from sqlalchemy.ext.mutable import MutableComposite + + class Point(MutableComposite): + def __init__(self, x, y): + self.x = x + self.y = y + + def __setattr__(self, key, value): + "Intercept set events" + + # set the attribute + object.__setattr__(self, key, value) + + # alert all parents to the change + self.changed() + + def __composite_values__(self): + return self.x, self.y + + def __eq__(self, other): + return isinstance(other, Point) and \\ + other.x == self.x and \\ + other.y == self.y + + def __ne__(self, other): + return not self.__eq__(other) + +The :class:`.MutableComposite` class uses a Python metaclass to automatically +establish listeners for any usage of :func:`.orm.composite` that specifies our +``Point`` type. Below, when ``Point`` is mapped to the ``Vertex`` class, +listeners are established which will route change events from ``Point`` +objects to each of the ``Vertex.start`` and ``Vertex.end`` attributes:: + + from sqlalchemy.orm import composite, mapper + from sqlalchemy import Table, Column + + vertices = Table('vertices', metadata, + Column('id', Integer, primary_key=True), + Column('x1', Integer), + Column('y1', Integer), + Column('x2', Integer), + Column('y2', Integer), + ) + + class Vertex(object): + pass + + mapper(Vertex, vertices, properties={ + 'start': composite(Point, vertices.c.x1, vertices.c.y1), + 'end': composite(Point, vertices.c.x2, vertices.c.y2) + }) + +Any in-place changes to the ``Vertex.start`` or ``Vertex.end`` members +will flag the attribute as "dirty" on the parent object:: + + >>> from sqlalchemy.orm import Session + + >>> sess = Session() + >>> v1 = Vertex(start=Point(3, 4), end=Point(12, 15)) + >>> sess.add(v1) + >>> sess.commit() + + >>> v1.end.x = 8 + >>> assert v1 in sess.dirty + True + +Coercing Mutable Composites +--------------------------- + +The :meth:`.MutableBase.coerce` method is also supported on composite types. +In the case of :class:`.MutableComposite`, the :meth:`.MutableBase.coerce` +method is only called for attribute set operations, not load operations. +Overriding the :meth:`.MutableBase.coerce` method is essentially equivalent +to using a :func:`.validates` validation routine for all attributes which +make use of the custom composite type:: + + class Point(MutableComposite): + # other Point methods + # ... + + def coerce(cls, key, value): + if isinstance(value, tuple): + value = Point(*value) + elif not isinstance(value, Point): + raise ValueError("tuple or Point expected") + return value + +.. versionadded:: 0.7.10,0.8.0b2 + Support for the :meth:`.MutableBase.coerce` method in conjunction with + objects of type :class:`.MutableComposite`. + +Supporting Pickling +-------------------- + +As is the case with :class:`.Mutable`, the :class:`.MutableComposite` helper +class uses a ``weakref.WeakKeyDictionary`` available via the +:meth:`MutableBase._parents` attribute which isn't picklable. If we need to +pickle instances of ``Point`` or its owning class ``Vertex``, we at least need +to define a ``__getstate__`` that doesn't include the ``_parents`` dictionary. +Below we define both a ``__getstate__`` and a ``__setstate__`` that package up +the minimal form of our ``Point`` class:: + + class Point(MutableComposite): + # ... + + def __getstate__(self): + return self.x, self.y + + def __setstate__(self, state): + self.x, self.y = state + +As with :class:`.Mutable`, the :class:`.MutableComposite` augments the +pickling process of the parent's object-relational state so that the +:meth:`MutableBase._parents` collection is restored to all ``Point`` objects. + +""" +from ..orm.attributes import flag_modified +from .. import event, types +from ..orm import mapper, object_mapper, Mapper +from ..util import memoized_property +import weakref + + +class MutableBase(object): + """Common base class to :class:`.Mutable` + and :class:`.MutableComposite`. + + """ + + @memoized_property + def _parents(self): + """Dictionary of parent object->attribute name on the parent. + + This attribute is a so-called "memoized" property. It initializes + itself with a new ``weakref.WeakKeyDictionary`` the first time + it is accessed, returning the same object upon subsequent access. + + """ + + return weakref.WeakKeyDictionary() + + @classmethod + def coerce(cls, key, value): + """Given a value, coerce it into the target type. + + Can be overridden by custom subclasses to coerce incoming + data into a particular type. + + By default, raises ``ValueError``. + + This method is called in different scenarios depending on if + the parent class is of type :class:`.Mutable` or of type + :class:`.MutableComposite`. In the case of the former, it is called + for both attribute-set operations as well as during ORM loading + operations. For the latter, it is only called during attribute-set + operations; the mechanics of the :func:`.composite` construct + handle coercion during load operations. + + + :param key: string name of the ORM-mapped attribute being set. + :param value: the incoming value. + :return: the method should return the coerced value, or raise + ``ValueError`` if the coercion cannot be completed. + + """ + if value is None: + return None + msg = "Attribute '%s' does not accept objects of type %s" + raise ValueError(msg % (key, type(value))) + + @classmethod + def _get_listen_keys(cls, attribute): + """Given a descriptor attribute, return a ``set()`` of the attribute + keys which indicate a change in the state of this attribute. + + This is normally just ``set([attribute.key])``, but can be overridden + to provide for additional keys. E.g. a :class:`.MutableComposite` + augments this set with the attribute keys associated with the columns + that comprise the composite value. + + This collection is consulted in the case of intercepting the + :meth:`.InstanceEvents.refresh` and + :meth:`.InstanceEvents.refresh_flush` events, which pass along a list + of attribute names that have been refreshed; the list is compared + against this set to determine if action needs to be taken. + + .. versionadded:: 1.0.5 + + """ + return set([attribute.key]) + + @classmethod + def _listen_on_attribute(cls, attribute, coerce, parent_cls): + """Establish this type as a mutation listener for the given + mapped descriptor. + + """ + key = attribute.key + if parent_cls is not attribute.class_: + return + + # rely on "propagate" here + parent_cls = attribute.class_ + + listen_keys = cls._get_listen_keys(attribute) + + def load(state, *args): + """Listen for objects loaded or refreshed. + + Wrap the target data member's value with + ``Mutable``. + + """ + val = state.dict.get(key, None) + if val is not None: + if coerce: + val = cls.coerce(key, val) + state.dict[key] = val + val._parents[state.obj()] = key + + def load_attrs(state, ctx, attrs): + if not attrs or listen_keys.intersection(attrs): + load(state) + + def set(target, value, oldvalue, initiator): + """Listen for set/replace events on the target + data member. + + Establish a weak reference to the parent object + on the incoming value, remove it for the one + outgoing. + + """ + if value is oldvalue: + return value + + if not isinstance(value, cls): + value = cls.coerce(key, value) + if value is not None: + value._parents[target.obj()] = key + if isinstance(oldvalue, cls): + oldvalue._parents.pop(target.obj(), None) + return value + + def pickle(state, state_dict): + val = state.dict.get(key, None) + if val is not None: + if 'ext.mutable.values' not in state_dict: + state_dict['ext.mutable.values'] = [] + state_dict['ext.mutable.values'].append(val) + + def unpickle(state, state_dict): + if 'ext.mutable.values' in state_dict: + for val in state_dict['ext.mutable.values']: + val._parents[state.obj()] = key + + event.listen(parent_cls, 'load', load, + raw=True, propagate=True) + event.listen(parent_cls, 'refresh', load_attrs, + raw=True, propagate=True) + event.listen(parent_cls, 'refresh_flush', load_attrs, + raw=True, propagate=True) + event.listen(attribute, 'set', set, + raw=True, retval=True, propagate=True) + event.listen(parent_cls, 'pickle', pickle, + raw=True, propagate=True) + event.listen(parent_cls, 'unpickle', unpickle, + raw=True, propagate=True) + + +class Mutable(MutableBase): + """Mixin that defines transparent propagation of change + events to a parent object. + + See the example in :ref:`mutable_scalars` for usage information. + + """ + + def changed(self): + """Subclasses should call this method whenever change events occur.""" + + for parent, key in self._parents.items(): + flag_modified(parent, key) + + @classmethod + def associate_with_attribute(cls, attribute): + """Establish this type as a mutation listener for the given + mapped descriptor. + + """ + cls._listen_on_attribute(attribute, True, attribute.class_) + + @classmethod + def associate_with(cls, sqltype): + """Associate this wrapper with all future mapped columns + of the given type. + + This is a convenience method that calls + ``associate_with_attribute`` automatically. + + .. warning:: + + The listeners established by this method are *global* + to all mappers, and are *not* garbage collected. Only use + :meth:`.associate_with` for types that are permanent to an + application, not with ad-hoc types else this will cause unbounded + growth in memory usage. + + """ + + def listen_for_type(mapper, class_): + for prop in mapper.column_attrs: + if isinstance(prop.columns[0].type, sqltype): + cls.associate_with_attribute(getattr(class_, prop.key)) + + event.listen(mapper, 'mapper_configured', listen_for_type) + + @classmethod + def as_mutable(cls, sqltype): + """Associate a SQL type with this mutable Python type. + + This establishes listeners that will detect ORM mappings against + the given type, adding mutation event trackers to those mappings. + + The type is returned, unconditionally as an instance, so that + :meth:`.as_mutable` can be used inline:: + + Table('mytable', metadata, + Column('id', Integer, primary_key=True), + Column('data', MyMutableType.as_mutable(PickleType)) + ) + + Note that the returned type is always an instance, even if a class + is given, and that only columns which are declared specifically with + that type instance receive additional instrumentation. + + To associate a particular mutable type with all occurrences of a + particular type, use the :meth:`.Mutable.associate_with` classmethod + of the particular :class:`.Mutable` subclass to establish a global + association. + + .. warning:: + + The listeners established by this method are *global* + to all mappers, and are *not* garbage collected. Only use + :meth:`.as_mutable` for types that are permanent to an application, + not with ad-hoc types else this will cause unbounded growth + in memory usage. + + """ + sqltype = types.to_instance(sqltype) + + def listen_for_type(mapper, class_): + for prop in mapper.column_attrs: + if prop.columns[0].type is sqltype: + cls.associate_with_attribute(getattr(class_, prop.key)) + + event.listen(mapper, 'mapper_configured', listen_for_type) + + return sqltype + + +class MutableComposite(MutableBase): + """Mixin that defines transparent propagation of change + events on a SQLAlchemy "composite" object to its + owning parent or parents. + + See the example in :ref:`mutable_composites` for usage information. + + """ + + @classmethod + def _get_listen_keys(cls, attribute): + return set([attribute.key]).union(attribute.property._attribute_keys) + + def changed(self): + """Subclasses should call this method whenever change events occur.""" + + for parent, key in self._parents.items(): + + prop = object_mapper(parent).get_property(key) + for value, attr_name in zip( + self.__composite_values__(), + prop._attribute_keys): + setattr(parent, attr_name, value) + + +def _setup_composite_listener(): + def _listen_for_type(mapper, class_): + for prop in mapper.iterate_properties: + if (hasattr(prop, 'composite_class') and + isinstance(prop.composite_class, type) and + issubclass(prop.composite_class, MutableComposite)): + prop.composite_class._listen_on_attribute( + getattr(class_, prop.key), False, class_) + if not event.contains(Mapper, "mapper_configured", _listen_for_type): + event.listen(Mapper, 'mapper_configured', _listen_for_type) +_setup_composite_listener() + + +class MutableDict(Mutable, dict): + """A dictionary type that implements :class:`.Mutable`. + + The :class:`.MutableDict` object implements a dictionary that will + emit change events to the underlying mapping when the contents of + the dictionary are altered, including when values are added or removed. + + Note that :class:`.MutableDict` does **not** apply mutable tracking to the + *values themselves* inside the dictionary. Therefore it is not a sufficient + solution for the use case of tracking deep changes to a *recursive* + dictionary structure, such as a JSON structure. To support this use case, + build a subclass of :class:`.MutableDict` that provides appropriate + coersion to the values placed in the dictionary so that they too are + "mutable", and emit events up to their parent structure. + + .. versionadded:: 0.8 + + """ + + def __setitem__(self, key, value): + """Detect dictionary set events and emit change events.""" + dict.__setitem__(self, key, value) + self.changed() + + def setdefault(self, key, value): + result = dict.setdefault(self, key, value) + self.changed() + return result + + def __delitem__(self, key): + """Detect dictionary del events and emit change events.""" + dict.__delitem__(self, key) + self.changed() + + def update(self, *a, **kw): + dict.update(self, *a, **kw) + self.changed() + + def pop(self, *arg): + result = dict.pop(self, *arg) + self.changed() + return result + + def popitem(self): + result = dict.popitem(self) + self.changed() + return result + + def clear(self): + dict.clear(self) + self.changed() + + @classmethod + def coerce(cls, key, value): + """Convert plain dictionary to instance of this class.""" + if not isinstance(value, cls): + if isinstance(value, dict): + return cls(value) + return Mutable.coerce(key, value) + else: + return value + + def __getstate__(self): + return dict(self) + + def __setstate__(self, state): + self.update(state) diff --git a/lib/python3.4/site-packages/sqlalchemy/ext/orderinglist.py b/lib/python3.4/site-packages/sqlalchemy/ext/orderinglist.py new file mode 100644 index 0000000..d060a4f --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/ext/orderinglist.py @@ -0,0 +1,380 @@ +# ext/orderinglist.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""A custom list that manages index/position information for contained +elements. + +:author: Jason Kirtland + +``orderinglist`` is a helper for mutable ordered relationships. It will +intercept list operations performed on a :func:`.relationship`-managed +collection and +automatically synchronize changes in list position onto a target scalar +attribute. + +Example: A ``slide`` table, where each row refers to zero or more entries +in a related ``bullet`` table. The bullets within a slide are +displayed in order based on the value of the ``position`` column in the +``bullet`` table. As entries are reordered in memory, the value of the +``position`` attribute should be updated to reflect the new sort order:: + + + Base = declarative_base() + + class Slide(Base): + __tablename__ = 'slide' + + id = Column(Integer, primary_key=True) + name = Column(String) + + bullets = relationship("Bullet", order_by="Bullet.position") + + class Bullet(Base): + __tablename__ = 'bullet' + id = Column(Integer, primary_key=True) + slide_id = Column(Integer, ForeignKey('slide.id')) + position = Column(Integer) + text = Column(String) + +The standard relationship mapping will produce a list-like attribute on each +``Slide`` containing all related ``Bullet`` objects, +but coping with changes in ordering is not handled automatically. +When appending a ``Bullet`` into ``Slide.bullets``, the ``Bullet.position`` +attribute will remain unset until manually assigned. When the ``Bullet`` +is inserted into the middle of the list, the following ``Bullet`` objects +will also need to be renumbered. + +The :class:`.OrderingList` object automates this task, managing the +``position`` attribute on all ``Bullet`` objects in the collection. It is +constructed using the :func:`.ordering_list` factory:: + + from sqlalchemy.ext.orderinglist import ordering_list + + Base = declarative_base() + + class Slide(Base): + __tablename__ = 'slide' + + id = Column(Integer, primary_key=True) + name = Column(String) + + bullets = relationship("Bullet", order_by="Bullet.position", + collection_class=ordering_list('position')) + + class Bullet(Base): + __tablename__ = 'bullet' + id = Column(Integer, primary_key=True) + slide_id = Column(Integer, ForeignKey('slide.id')) + position = Column(Integer) + text = Column(String) + +With the above mapping the ``Bullet.position`` attribute is managed:: + + s = Slide() + s.bullets.append(Bullet()) + s.bullets.append(Bullet()) + s.bullets[1].position + >>> 1 + s.bullets.insert(1, Bullet()) + s.bullets[2].position + >>> 2 + +The :class:`.OrderingList` construct only works with **changes** to a +collection, and not the initial load from the database, and requires that the +list be sorted when loaded. Therefore, be sure to specify ``order_by`` on the +:func:`.relationship` against the target ordering attribute, so that the +ordering is correct when first loaded. + +.. warning:: + + :class:`.OrderingList` only provides limited functionality when a primary + key column or unique column is the target of the sort. Operations + that are unsupported or are problematic include: + + * two entries must trade values. This is not supported directly in the + case of a primary key or unique constraint because it means at least + one row would need to be temporarily removed first, or changed to + a third, neutral value while the switch occurs. + + * an entry must be deleted in order to make room for a new entry. + SQLAlchemy's unit of work performs all INSERTs before DELETEs within a + single flush. In the case of a primary key, it will trade + an INSERT/DELETE of the same primary key for an UPDATE statement in order + to lessen the impact of this limitation, however this does not take place + for a UNIQUE column. + A future feature will allow the "DELETE before INSERT" behavior to be + possible, allevating this limitation, though this feature will require + explicit configuration at the mapper level for sets of columns that + are to be handled in this way. + +:func:`.ordering_list` takes the name of the related object's ordering +attribute as an argument. By default, the zero-based integer index of the +object's position in the :func:`.ordering_list` is synchronized with the +ordering attribute: index 0 will get position 0, index 1 position 1, etc. To +start numbering at 1 or some other integer, provide ``count_from=1``. + + +""" +from ..orm.collections import collection, collection_adapter +from .. import util + +__all__ = ['ordering_list'] + + +def ordering_list(attr, count_from=None, **kw): + """Prepares an :class:`OrderingList` factory for use in mapper definitions. + + Returns an object suitable for use as an argument to a Mapper + relationship's ``collection_class`` option. e.g.:: + + from sqlalchemy.ext.orderinglist import ordering_list + + class Slide(Base): + __tablename__ = 'slide' + + id = Column(Integer, primary_key=True) + name = Column(String) + + bullets = relationship("Bullet", order_by="Bullet.position", + collection_class=ordering_list('position')) + + :param attr: + Name of the mapped attribute to use for storage and retrieval of + ordering information + + :param count_from: + Set up an integer-based ordering, starting at ``count_from``. For + example, ``ordering_list('pos', count_from=1)`` would create a 1-based + list in SQL, storing the value in the 'pos' column. Ignored if + ``ordering_func`` is supplied. + + Additional arguments are passed to the :class:`.OrderingList` constructor. + + """ + + kw = _unsugar_count_from(count_from=count_from, **kw) + return lambda: OrderingList(attr, **kw) + + +# Ordering utility functions + + +def count_from_0(index, collection): + """Numbering function: consecutive integers starting at 0.""" + + return index + + +def count_from_1(index, collection): + """Numbering function: consecutive integers starting at 1.""" + + return index + 1 + + +def count_from_n_factory(start): + """Numbering function: consecutive integers starting at arbitrary start.""" + + def f(index, collection): + return index + start + try: + f.__name__ = 'count_from_%i' % start + except TypeError: + pass + return f + + +def _unsugar_count_from(**kw): + """Builds counting functions from keyword arguments. + + Keyword argument filter, prepares a simple ``ordering_func`` from a + ``count_from`` argument, otherwise passes ``ordering_func`` on unchanged. + """ + + count_from = kw.pop('count_from', None) + if kw.get('ordering_func', None) is None and count_from is not None: + if count_from == 0: + kw['ordering_func'] = count_from_0 + elif count_from == 1: + kw['ordering_func'] = count_from_1 + else: + kw['ordering_func'] = count_from_n_factory(count_from) + return kw + + +class OrderingList(list): + """A custom list that manages position information for its children. + + The :class:`.OrderingList` object is normally set up using the + :func:`.ordering_list` factory function, used in conjunction with + the :func:`.relationship` function. + + """ + + def __init__(self, ordering_attr=None, ordering_func=None, + reorder_on_append=False): + """A custom list that manages position information for its children. + + ``OrderingList`` is a ``collection_class`` list implementation that + syncs position in a Python list with a position attribute on the + mapped objects. + + This implementation relies on the list starting in the proper order, + so be **sure** to put an ``order_by`` on your relationship. + + :param ordering_attr: + Name of the attribute that stores the object's order in the + relationship. + + :param ordering_func: Optional. A function that maps the position in + the Python list to a value to store in the + ``ordering_attr``. Values returned are usually (but need not be!) + integers. + + An ``ordering_func`` is called with two positional parameters: the + index of the element in the list, and the list itself. + + If omitted, Python list indexes are used for the attribute values. + Two basic pre-built numbering functions are provided in this module: + ``count_from_0`` and ``count_from_1``. For more exotic examples + like stepped numbering, alphabetical and Fibonacci numbering, see + the unit tests. + + :param reorder_on_append: + Default False. When appending an object with an existing (non-None) + ordering value, that value will be left untouched unless + ``reorder_on_append`` is true. This is an optimization to avoid a + variety of dangerous unexpected database writes. + + SQLAlchemy will add instances to the list via append() when your + object loads. If for some reason the result set from the database + skips a step in the ordering (say, row '1' is missing but you get + '2', '3', and '4'), reorder_on_append=True would immediately + renumber the items to '1', '2', '3'. If you have multiple sessions + making changes, any of whom happen to load this collection even in + passing, all of the sessions would try to "clean up" the numbering + in their commits, possibly causing all but one to fail with a + concurrent modification error. + + Recommend leaving this with the default of False, and just call + ``reorder()`` if you're doing ``append()`` operations with + previously ordered instances or when doing some housekeeping after + manual sql operations. + + """ + self.ordering_attr = ordering_attr + if ordering_func is None: + ordering_func = count_from_0 + self.ordering_func = ordering_func + self.reorder_on_append = reorder_on_append + + # More complex serialization schemes (multi column, e.g.) are possible by + # subclassing and reimplementing these two methods. + def _get_order_value(self, entity): + return getattr(entity, self.ordering_attr) + + def _set_order_value(self, entity, value): + setattr(entity, self.ordering_attr, value) + + def reorder(self): + """Synchronize ordering for the entire collection. + + Sweeps through the list and ensures that each object has accurate + ordering information set. + + """ + for index, entity in enumerate(self): + self._order_entity(index, entity, True) + + # As of 0.5, _reorder is no longer semi-private + _reorder = reorder + + def _order_entity(self, index, entity, reorder=True): + have = self._get_order_value(entity) + + # Don't disturb existing ordering if reorder is False + if have is not None and not reorder: + return + + should_be = self.ordering_func(index, self) + if have != should_be: + self._set_order_value(entity, should_be) + + def append(self, entity): + super(OrderingList, self).append(entity) + self._order_entity(len(self) - 1, entity, self.reorder_on_append) + + def _raw_append(self, entity): + """Append without any ordering behavior.""" + + super(OrderingList, self).append(entity) + _raw_append = collection.adds(1)(_raw_append) + + def insert(self, index, entity): + super(OrderingList, self).insert(index, entity) + self._reorder() + + def remove(self, entity): + super(OrderingList, self).remove(entity) + + adapter = collection_adapter(self) + if adapter and adapter._referenced_by_owner: + self._reorder() + + def pop(self, index=-1): + entity = super(OrderingList, self).pop(index) + self._reorder() + return entity + + def __setitem__(self, index, entity): + if isinstance(index, slice): + step = index.step or 1 + start = index.start or 0 + if start < 0: + start += len(self) + stop = index.stop or len(self) + if stop < 0: + stop += len(self) + + for i in range(start, stop, step): + self.__setitem__(i, entity[i]) + else: + self._order_entity(index, entity, True) + super(OrderingList, self).__setitem__(index, entity) + + def __delitem__(self, index): + super(OrderingList, self).__delitem__(index) + self._reorder() + + def __setslice__(self, start, end, values): + super(OrderingList, self).__setslice__(start, end, values) + self._reorder() + + def __delslice__(self, start, end): + super(OrderingList, self).__delslice__(start, end) + self._reorder() + + def __reduce__(self): + return _reconstitute, (self.__class__, self.__dict__, list(self)) + + for func_name, func in list(locals().items()): + if (util.callable(func) and func.__name__ == func_name and + not func.__doc__ and hasattr(list, func_name)): + func.__doc__ = getattr(list, func_name).__doc__ + del func_name, func + + +def _reconstitute(cls, dict_, items): + """ Reconstitute an :class:`.OrderingList`. + + This is the adjoint to :meth:`.OrderingList.__reduce__`. It is used for + unpickling :class:`.OrderingList` objects. + + """ + obj = cls.__new__(cls) + obj.__dict__.update(dict_) + list.extend(obj, items) + return obj diff --git a/lib/python3.4/site-packages/sqlalchemy/ext/serializer.py b/lib/python3.4/site-packages/sqlalchemy/ext/serializer.py new file mode 100644 index 0000000..893f7be --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/ext/serializer.py @@ -0,0 +1,159 @@ +# ext/serializer.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Serializer/Deserializer objects for usage with SQLAlchemy query structures, +allowing "contextual" deserialization. + +Any SQLAlchemy query structure, either based on sqlalchemy.sql.* +or sqlalchemy.orm.* can be used. The mappers, Tables, Columns, Session +etc. which are referenced by the structure are not persisted in serialized +form, but are instead re-associated with the query structure +when it is deserialized. + +Usage is nearly the same as that of the standard Python pickle module:: + + from sqlalchemy.ext.serializer import loads, dumps + metadata = MetaData(bind=some_engine) + Session = scoped_session(sessionmaker()) + + # ... define mappers + + query = Session.query(MyClass). + filter(MyClass.somedata=='foo').order_by(MyClass.sortkey) + + # pickle the query + serialized = dumps(query) + + # unpickle. Pass in metadata + scoped_session + query2 = loads(serialized, metadata, Session) + + print query2.all() + +Similar restrictions as when using raw pickle apply; mapped classes must be +themselves be pickleable, meaning they are importable from a module-level +namespace. + +The serializer module is only appropriate for query structures. It is not +needed for: + +* instances of user-defined classes. These contain no references to engines, + sessions or expression constructs in the typical case and can be serialized + directly. + +* Table metadata that is to be loaded entirely from the serialized structure + (i.e. is not already declared in the application). Regular + pickle.loads()/dumps() can be used to fully dump any ``MetaData`` object, + typically one which was reflected from an existing database at some previous + point in time. The serializer module is specifically for the opposite case, + where the Table metadata is already present in memory. + +""" + +from ..orm import class_mapper +from ..orm.session import Session +from ..orm.mapper import Mapper +from ..orm.interfaces import MapperProperty +from ..orm.attributes import QueryableAttribute +from .. import Table, Column +from ..engine import Engine +from ..util import pickle, byte_buffer, b64encode, b64decode, text_type +import re + + +__all__ = ['Serializer', 'Deserializer', 'dumps', 'loads'] + + +def Serializer(*args, **kw): + pickler = pickle.Pickler(*args, **kw) + + def persistent_id(obj): + # print "serializing:", repr(obj) + if isinstance(obj, QueryableAttribute): + cls = obj.impl.class_ + key = obj.impl.key + id = "attribute:" + key + ":" + b64encode(pickle.dumps(cls)) + elif isinstance(obj, Mapper) and not obj.non_primary: + id = "mapper:" + b64encode(pickle.dumps(obj.class_)) + elif isinstance(obj, MapperProperty) and not obj.parent.non_primary: + id = "mapperprop:" + b64encode(pickle.dumps(obj.parent.class_)) + \ + ":" + obj.key + elif isinstance(obj, Table): + id = "table:" + text_type(obj.key) + elif isinstance(obj, Column) and isinstance(obj.table, Table): + id = "column:" + \ + text_type(obj.table.key) + ":" + text_type(obj.key) + elif isinstance(obj, Session): + id = "session:" + elif isinstance(obj, Engine): + id = "engine:" + else: + return None + return id + + pickler.persistent_id = persistent_id + return pickler + +our_ids = re.compile( + r'(mapperprop|mapper|table|column|session|attribute|engine):(.*)') + + +def Deserializer(file, metadata=None, scoped_session=None, engine=None): + unpickler = pickle.Unpickler(file) + + def get_engine(): + if engine: + return engine + elif scoped_session and scoped_session().bind: + return scoped_session().bind + elif metadata and metadata.bind: + return metadata.bind + else: + return None + + def persistent_load(id): + m = our_ids.match(text_type(id)) + if not m: + return None + else: + type_, args = m.group(1, 2) + if type_ == 'attribute': + key, clsarg = args.split(":") + cls = pickle.loads(b64decode(clsarg)) + return getattr(cls, key) + elif type_ == "mapper": + cls = pickle.loads(b64decode(args)) + return class_mapper(cls) + elif type_ == "mapperprop": + mapper, keyname = args.split(':') + cls = pickle.loads(b64decode(mapper)) + return class_mapper(cls).attrs[keyname] + elif type_ == "table": + return metadata.tables[args] + elif type_ == "column": + table, colname = args.split(':') + return metadata.tables[table].c[colname] + elif type_ == "session": + return scoped_session() + elif type_ == "engine": + return get_engine() + else: + raise Exception("Unknown token: %s" % type_) + unpickler.persistent_load = persistent_load + return unpickler + + +def dumps(obj, protocol=0): + buf = byte_buffer() + pickler = Serializer(buf, protocol) + pickler.dump(obj) + return buf.getvalue() + + +def loads(data, metadata=None, scoped_session=None, engine=None): + buf = byte_buffer(data) + unpickler = Deserializer(buf, metadata, scoped_session, engine) + return unpickler.load() diff --git a/lib/python3.4/site-packages/sqlalchemy/inspection.py b/lib/python3.4/site-packages/sqlalchemy/inspection.py new file mode 100644 index 0000000..5c16c45 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/inspection.py @@ -0,0 +1,93 @@ +# sqlalchemy/inspect.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""The inspection module provides the :func:`.inspect` function, +which delivers runtime information about a wide variety +of SQLAlchemy objects, both within the Core as well as the +ORM. + +The :func:`.inspect` function is the entry point to SQLAlchemy's +public API for viewing the configuration and construction +of in-memory objects. Depending on the type of object +passed to :func:`.inspect`, the return value will either be +a related object which provides a known interface, or in many +cases it will return the object itself. + +The rationale for :func:`.inspect` is twofold. One is that +it replaces the need to be aware of a large variety of "information +getting" functions in SQLAlchemy, such as :meth:`.Inspector.from_engine`, +:func:`.orm.attributes.instance_state`, :func:`.orm.class_mapper`, +and others. The other is that the return value of :func:`.inspect` +is guaranteed to obey a documented API, thus allowing third party +tools which build on top of SQLAlchemy configurations to be constructed +in a forwards-compatible way. + +.. versionadded:: 0.8 The :func:`.inspect` system is introduced + as of version 0.8. + +""" + +from . import util, exc +_registrars = util.defaultdict(list) + + +def inspect(subject, raiseerr=True): + """Produce an inspection object for the given target. + + The returned value in some cases may be the + same object as the one given, such as if a + :class:`.Mapper` object is passed. In other + cases, it will be an instance of the registered + inspection type for the given object, such as + if an :class:`.engine.Engine` is passed, an + :class:`.Inspector` object is returned. + + :param subject: the subject to be inspected. + :param raiseerr: When ``True``, if the given subject + does not + correspond to a known SQLAlchemy inspected type, + :class:`sqlalchemy.exc.NoInspectionAvailable` + is raised. If ``False``, ``None`` is returned. + + """ + type_ = type(subject) + for cls in type_.__mro__: + if cls in _registrars: + reg = _registrars[cls] + if reg is True: + return subject + ret = reg(subject) + if ret is not None: + break + else: + reg = ret = None + + if raiseerr and ( + reg is None or ret is None + ): + raise exc.NoInspectionAvailable( + "No inspection system is " + "available for object of type %s" % + type_) + return ret + + +def _inspects(*types): + def decorate(fn_or_cls): + for type_ in types: + if type_ in _registrars: + raise AssertionError( + "Type %s is already " + "registered" % type_) + _registrars[type_] = fn_or_cls + return fn_or_cls + return decorate + + +def _self_inspects(cls): + _inspects(cls)(True) + return cls diff --git a/lib/python3.4/site-packages/sqlalchemy/interfaces.py b/lib/python3.4/site-packages/sqlalchemy/interfaces.py new file mode 100644 index 0000000..464ad9f --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/interfaces.py @@ -0,0 +1,312 @@ +# sqlalchemy/interfaces.py +# Copyright (C) 2007-2016 the SQLAlchemy authors and contributors +# +# Copyright (C) 2007 Jason Kirtland jek@discorporate.us +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Deprecated core event interfaces. + +This module is **deprecated** and is superseded by the +event system. + +""" + +from . import event, util + + +class PoolListener(object): + """Hooks into the lifecycle of connections in a :class:`.Pool`. + + .. note:: + + :class:`.PoolListener` is deprecated. Please + refer to :class:`.PoolEvents`. + + Usage:: + + class MyListener(PoolListener): + def connect(self, dbapi_con, con_record): + '''perform connect operations''' + # etc. + + # create a new pool with a listener + p = QueuePool(..., listeners=[MyListener()]) + + # add a listener after the fact + p.add_listener(MyListener()) + + # usage with create_engine() + e = create_engine("url://", listeners=[MyListener()]) + + All of the standard connection :class:`~sqlalchemy.pool.Pool` types can + accept event listeners for key connection lifecycle events: + creation, pool check-out and check-in. There are no events fired + when a connection closes. + + For any given DB-API connection, there will be one ``connect`` + event, `n` number of ``checkout`` events, and either `n` or `n - 1` + ``checkin`` events. (If a ``Connection`` is detached from its + pool via the ``detach()`` method, it won't be checked back in.) + + These are low-level events for low-level objects: raw Python + DB-API connections, without the conveniences of the SQLAlchemy + ``Connection`` wrapper, ``Dialect`` services or ``ClauseElement`` + execution. If you execute SQL through the connection, explicitly + closing all cursors and other resources is recommended. + + Events also receive a ``_ConnectionRecord``, a long-lived internal + ``Pool`` object that basically represents a "slot" in the + connection pool. ``_ConnectionRecord`` objects have one public + attribute of note: ``info``, a dictionary whose contents are + scoped to the lifetime of the DB-API connection managed by the + record. You can use this shared storage area however you like. + + There is no need to subclass ``PoolListener`` to handle events. + Any class that implements one or more of these methods can be used + as a pool listener. The ``Pool`` will inspect the methods + provided by a listener object and add the listener to one or more + internal event queues based on its capabilities. In terms of + efficiency and function call overhead, you're much better off only + providing implementations for the hooks you'll be using. + + """ + + @classmethod + def _adapt_listener(cls, self, listener): + """Adapt a :class:`.PoolListener` to individual + :class:`event.Dispatch` events. + + """ + + listener = util.as_interface(listener, + methods=('connect', 'first_connect', + 'checkout', 'checkin')) + if hasattr(listener, 'connect'): + event.listen(self, 'connect', listener.connect) + if hasattr(listener, 'first_connect'): + event.listen(self, 'first_connect', listener.first_connect) + if hasattr(listener, 'checkout'): + event.listen(self, 'checkout', listener.checkout) + if hasattr(listener, 'checkin'): + event.listen(self, 'checkin', listener.checkin) + + def connect(self, dbapi_con, con_record): + """Called once for each new DB-API connection or Pool's ``creator()``. + + dbapi_con + A newly connected raw DB-API connection (not a SQLAlchemy + ``Connection`` wrapper). + + con_record + The ``_ConnectionRecord`` that persistently manages the connection + + """ + + def first_connect(self, dbapi_con, con_record): + """Called exactly once for the first DB-API connection. + + dbapi_con + A newly connected raw DB-API connection (not a SQLAlchemy + ``Connection`` wrapper). + + con_record + The ``_ConnectionRecord`` that persistently manages the connection + + """ + + def checkout(self, dbapi_con, con_record, con_proxy): + """Called when a connection is retrieved from the Pool. + + dbapi_con + A raw DB-API connection + + con_record + The ``_ConnectionRecord`` that persistently manages the connection + + con_proxy + The ``_ConnectionFairy`` which manages the connection for the span of + the current checkout. + + If you raise an ``exc.DisconnectionError``, the current + connection will be disposed and a fresh connection retrieved. + Processing of all checkout listeners will abort and restart + using the new connection. + """ + + def checkin(self, dbapi_con, con_record): + """Called when a connection returns to the pool. + + Note that the connection may be closed, and may be None if the + connection has been invalidated. ``checkin`` will not be called + for detached connections. (They do not return to the pool.) + + dbapi_con + A raw DB-API connection + + con_record + The ``_ConnectionRecord`` that persistently manages the connection + + """ + + +class ConnectionProxy(object): + """Allows interception of statement execution by Connections. + + .. note:: + + :class:`.ConnectionProxy` is deprecated. Please + refer to :class:`.ConnectionEvents`. + + Either or both of the ``execute()`` and ``cursor_execute()`` + may be implemented to intercept compiled statement and + cursor level executions, e.g.:: + + class MyProxy(ConnectionProxy): + def execute(self, conn, execute, clauseelement, + *multiparams, **params): + print "compiled statement:", clauseelement + return execute(clauseelement, *multiparams, **params) + + def cursor_execute(self, execute, cursor, statement, + parameters, context, executemany): + print "raw statement:", statement + return execute(cursor, statement, parameters, context) + + The ``execute`` argument is a function that will fulfill the default + execution behavior for the operation. The signature illustrated + in the example should be used. + + The proxy is installed into an :class:`~sqlalchemy.engine.Engine` via + the ``proxy`` argument:: + + e = create_engine('someurl://', proxy=MyProxy()) + + """ + + @classmethod + def _adapt_listener(cls, self, listener): + + def adapt_execute(conn, clauseelement, multiparams, params): + + def execute_wrapper(clauseelement, *multiparams, **params): + return clauseelement, multiparams, params + + return listener.execute(conn, execute_wrapper, + clauseelement, *multiparams, + **params) + + event.listen(self, 'before_execute', adapt_execute) + + def adapt_cursor_execute(conn, cursor, statement, + parameters, context, executemany): + + def execute_wrapper( + cursor, + statement, + parameters, + context, + ): + return statement, parameters + + return listener.cursor_execute( + execute_wrapper, + cursor, + statement, + parameters, + context, + executemany, + ) + + event.listen(self, 'before_cursor_execute', adapt_cursor_execute) + + def do_nothing_callback(*arg, **kw): + pass + + def adapt_listener(fn): + + def go(conn, *arg, **kw): + fn(conn, do_nothing_callback, *arg, **kw) + + return util.update_wrapper(go, fn) + + event.listen(self, 'begin', adapt_listener(listener.begin)) + event.listen(self, 'rollback', + adapt_listener(listener.rollback)) + event.listen(self, 'commit', adapt_listener(listener.commit)) + event.listen(self, 'savepoint', + adapt_listener(listener.savepoint)) + event.listen(self, 'rollback_savepoint', + adapt_listener(listener.rollback_savepoint)) + event.listen(self, 'release_savepoint', + adapt_listener(listener.release_savepoint)) + event.listen(self, 'begin_twophase', + adapt_listener(listener.begin_twophase)) + event.listen(self, 'prepare_twophase', + adapt_listener(listener.prepare_twophase)) + event.listen(self, 'rollback_twophase', + adapt_listener(listener.rollback_twophase)) + event.listen(self, 'commit_twophase', + adapt_listener(listener.commit_twophase)) + + def execute(self, conn, execute, clauseelement, *multiparams, **params): + """Intercept high level execute() events.""" + + return execute(clauseelement, *multiparams, **params) + + def cursor_execute(self, execute, cursor, statement, parameters, + context, executemany): + """Intercept low-level cursor execute() events.""" + + return execute(cursor, statement, parameters, context) + + def begin(self, conn, begin): + """Intercept begin() events.""" + + return begin() + + def rollback(self, conn, rollback): + """Intercept rollback() events.""" + + return rollback() + + def commit(self, conn, commit): + """Intercept commit() events.""" + + return commit() + + def savepoint(self, conn, savepoint, name=None): + """Intercept savepoint() events.""" + + return savepoint(name=name) + + def rollback_savepoint(self, conn, rollback_savepoint, name, context): + """Intercept rollback_savepoint() events.""" + + return rollback_savepoint(name, context) + + def release_savepoint(self, conn, release_savepoint, name, context): + """Intercept release_savepoint() events.""" + + return release_savepoint(name, context) + + def begin_twophase(self, conn, begin_twophase, xid): + """Intercept begin_twophase() events.""" + + return begin_twophase(xid) + + def prepare_twophase(self, conn, prepare_twophase, xid): + """Intercept prepare_twophase() events.""" + + return prepare_twophase(xid) + + def rollback_twophase(self, conn, rollback_twophase, xid, is_prepared): + """Intercept rollback_twophase() events.""" + + return rollback_twophase(xid, is_prepared) + + def commit_twophase(self, conn, commit_twophase, xid, is_prepared): + """Intercept commit_twophase() events.""" + + return commit_twophase(xid, is_prepared) diff --git a/lib/python3.4/site-packages/sqlalchemy/log.py b/lib/python3.4/site-packages/sqlalchemy/log.py new file mode 100644 index 0000000..b23de90 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/log.py @@ -0,0 +1,217 @@ +# sqlalchemy/log.py +# Copyright (C) 2006-2016 the SQLAlchemy authors and contributors +# +# Includes alterations by Vinay Sajip vinay_sajip@yahoo.co.uk +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Logging control and utilities. + +Control of logging for SA can be performed from the regular python logging +module. The regular dotted module namespace is used, starting at +'sqlalchemy'. For class-level logging, the class name is appended. + +The "echo" keyword parameter, available on SQLA :class:`.Engine` +and :class:`.Pool` objects, corresponds to a logger specific to that +instance only. + +""" + +import logging +import sys + +# set initial level to WARN. This so that +# log statements don't occur in the absence of explicit +# logging being enabled for 'sqlalchemy'. +rootlogger = logging.getLogger('sqlalchemy') +if rootlogger.level == logging.NOTSET: + rootlogger.setLevel(logging.WARN) + + +def _add_default_handler(logger): + handler = logging.StreamHandler(sys.stdout) + handler.setFormatter(logging.Formatter( + '%(asctime)s %(levelname)s %(name)s %(message)s')) + logger.addHandler(handler) + + +_logged_classes = set() + + +def class_logger(cls): + logger = logging.getLogger(cls.__module__ + "." + cls.__name__) + cls._should_log_debug = lambda self: logger.isEnabledFor(logging.DEBUG) + cls._should_log_info = lambda self: logger.isEnabledFor(logging.INFO) + cls.logger = logger + _logged_classes.add(cls) + return cls + + +class Identified(object): + logging_name = None + + def _should_log_debug(self): + return self.logger.isEnabledFor(logging.DEBUG) + + def _should_log_info(self): + return self.logger.isEnabledFor(logging.INFO) + + +class InstanceLogger(object): + """A logger adapter (wrapper) for :class:`.Identified` subclasses. + + This allows multiple instances (e.g. Engine or Pool instances) + to share a logger, but have its verbosity controlled on a + per-instance basis. + + The basic functionality is to return a logging level + which is based on an instance's echo setting. + + Default implementation is: + + 'debug' -> logging.DEBUG + True -> logging.INFO + False -> Effective level of underlying logger + (logging.WARNING by default) + None -> same as False + """ + + # Map echo settings to logger levels + _echo_map = { + None: logging.NOTSET, + False: logging.NOTSET, + True: logging.INFO, + 'debug': logging.DEBUG, + } + + def __init__(self, echo, name): + self.echo = echo + self.logger = logging.getLogger(name) + + # if echo flag is enabled and no handlers, + # add a handler to the list + if self._echo_map[echo] <= logging.INFO \ + and not self.logger.handlers: + _add_default_handler(self.logger) + + # + # Boilerplate convenience methods + # + def debug(self, msg, *args, **kwargs): + """Delegate a debug call to the underlying logger.""" + + self.log(logging.DEBUG, msg, *args, **kwargs) + + def info(self, msg, *args, **kwargs): + """Delegate an info call to the underlying logger.""" + + self.log(logging.INFO, msg, *args, **kwargs) + + def warning(self, msg, *args, **kwargs): + """Delegate a warning call to the underlying logger.""" + + self.log(logging.WARNING, msg, *args, **kwargs) + + warn = warning + + def error(self, msg, *args, **kwargs): + """ + Delegate an error call to the underlying logger. + """ + self.log(logging.ERROR, msg, *args, **kwargs) + + def exception(self, msg, *args, **kwargs): + """Delegate an exception call to the underlying logger.""" + + kwargs["exc_info"] = 1 + self.log(logging.ERROR, msg, *args, **kwargs) + + def critical(self, msg, *args, **kwargs): + """Delegate a critical call to the underlying logger.""" + + self.log(logging.CRITICAL, msg, *args, **kwargs) + + def log(self, level, msg, *args, **kwargs): + """Delegate a log call to the underlying logger. + + The level here is determined by the echo + flag as well as that of the underlying logger, and + logger._log() is called directly. + + """ + + # inline the logic from isEnabledFor(), + # getEffectiveLevel(), to avoid overhead. + + if self.logger.manager.disable >= level: + return + + selected_level = self._echo_map[self.echo] + if selected_level == logging.NOTSET: + selected_level = self.logger.getEffectiveLevel() + + if level >= selected_level: + self.logger._log(level, msg, args, **kwargs) + + def isEnabledFor(self, level): + """Is this logger enabled for level 'level'?""" + + if self.logger.manager.disable >= level: + return False + return level >= self.getEffectiveLevel() + + def getEffectiveLevel(self): + """What's the effective level for this logger?""" + + level = self._echo_map[self.echo] + if level == logging.NOTSET: + level = self.logger.getEffectiveLevel() + return level + + +def instance_logger(instance, echoflag=None): + """create a logger for an instance that implements :class:`.Identified`.""" + + if instance.logging_name: + name = "%s.%s.%s" % (instance.__class__.__module__, + instance.__class__.__name__, + instance.logging_name) + else: + name = "%s.%s" % (instance.__class__.__module__, + instance.__class__.__name__) + + instance._echo = echoflag + + if echoflag in (False, None): + # if no echo setting or False, return a Logger directly, + # avoiding overhead of filtering + logger = logging.getLogger(name) + else: + # if a specified echo flag, return an EchoLogger, + # which checks the flag, overrides normal log + # levels by calling logger._log() + logger = InstanceLogger(echoflag, name) + + instance.logger = logger + + +class echo_property(object): + __doc__ = """\ + When ``True``, enable log output for this element. + + This has the effect of setting the Python logging level for the namespace + of this element's class and object reference. A value of boolean ``True`` + indicates that the loglevel ``logging.INFO`` will be set for the logger, + whereas the string value ``debug`` will set the loglevel to + ``logging.DEBUG``. + """ + + def __get__(self, instance, owner): + if instance is None: + return self + else: + return instance._echo + + def __set__(self, instance, value): + instance_logger(instance, echoflag=value) diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/__init__.py b/lib/python3.4/site-packages/sqlalchemy/orm/__init__.py new file mode 100644 index 0000000..7425737 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/orm/__init__.py @@ -0,0 +1,275 @@ +# orm/__init__.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +Functional constructs for ORM configuration. + +See the SQLAlchemy object relational tutorial and mapper configuration +documentation for an overview of how this module is used. + +""" + +from . import exc +from .mapper import ( + Mapper, + _mapper_registry, + class_mapper, + configure_mappers, + reconstructor, + validates +) +from .interfaces import ( + EXT_CONTINUE, + EXT_STOP, + PropComparator, +) +from .deprecated_interfaces import ( + MapperExtension, + SessionExtension, + AttributeExtension, +) +from .util import ( + aliased, + join, + object_mapper, + outerjoin, + polymorphic_union, + was_deleted, + with_parent, + with_polymorphic, +) +from .properties import ColumnProperty +from .relationships import RelationshipProperty +from .descriptor_props import ( + ComparableProperty, + CompositeProperty, + SynonymProperty, +) +from .relationships import ( + foreign, + remote, +) +from .session import ( + Session, + object_session, + sessionmaker, + make_transient, + make_transient_to_detached +) +from .scoping import ( + scoped_session +) +from . import mapper as mapperlib +from .query import AliasOption, Query, Bundle +from ..util.langhelpers import public_factory +from .. import util as _sa_util +from . import strategies as _strategies + + +def create_session(bind=None, **kwargs): + """Create a new :class:`.Session` + with no automation enabled by default. + + This function is used primarily for testing. The usual + route to :class:`.Session` creation is via its constructor + or the :func:`.sessionmaker` function. + + :param bind: optional, a single Connectable to use for all + database access in the created + :class:`~sqlalchemy.orm.session.Session`. + + :param \*\*kwargs: optional, passed through to the + :class:`.Session` constructor. + + :returns: an :class:`~sqlalchemy.orm.session.Session` instance + + The defaults of create_session() are the opposite of that of + :func:`sessionmaker`; ``autoflush`` and ``expire_on_commit`` are + False, ``autocommit`` is True. In this sense the session acts + more like the "classic" SQLAlchemy 0.3 session with these. + + Usage:: + + >>> from sqlalchemy.orm import create_session + >>> session = create_session() + + It is recommended to use :func:`sessionmaker` instead of + create_session(). + + """ + kwargs.setdefault('autoflush', False) + kwargs.setdefault('autocommit', True) + kwargs.setdefault('expire_on_commit', False) + return Session(bind=bind, **kwargs) + +relationship = public_factory(RelationshipProperty, ".orm.relationship") + + +def relation(*arg, **kw): + """A synonym for :func:`relationship`.""" + + return relationship(*arg, **kw) + + +def dynamic_loader(argument, **kw): + """Construct a dynamically-loading mapper property. + + This is essentially the same as + using the ``lazy='dynamic'`` argument with :func:`relationship`:: + + dynamic_loader(SomeClass) + + # is the same as + + relationship(SomeClass, lazy="dynamic") + + See the section :ref:`dynamic_relationship` for more details + on dynamic loading. + + """ + kw['lazy'] = 'dynamic' + return relationship(argument, **kw) + + +column_property = public_factory(ColumnProperty, ".orm.column_property") +composite = public_factory(CompositeProperty, ".orm.composite") + + +def backref(name, **kwargs): + """Create a back reference with explicit keyword arguments, which are the + same arguments one can send to :func:`relationship`. + + Used with the ``backref`` keyword argument to :func:`relationship` in + place of a string argument, e.g.:: + + 'items':relationship( + SomeItem, backref=backref('parent', lazy='subquery')) + + .. seealso:: + + :ref:`relationships_backref` + + """ + + return (name, kwargs) + + +def deferred(*columns, **kw): + """Indicate a column-based mapped attribute that by default will + not load unless accessed. + + :param \*columns: columns to be mapped. This is typically a single + :class:`.Column` object, however a collection is supported in order + to support multiple columns mapped under the same attribute. + + :param \**kw: additional keyword arguments passed to + :class:`.ColumnProperty`. + + .. seealso:: + + :ref:`deferred` + + """ + return ColumnProperty(deferred=True, *columns, **kw) + + +mapper = public_factory(Mapper, ".orm.mapper") + +synonym = public_factory(SynonymProperty, ".orm.synonym") + +comparable_property = public_factory(ComparableProperty, + ".orm.comparable_property") + + +@_sa_util.deprecated("0.7", message=":func:`.compile_mappers` " + "is renamed to :func:`.configure_mappers`") +def compile_mappers(): + """Initialize the inter-mapper relationships of all mappers that have + been defined. + + """ + configure_mappers() + + +def clear_mappers(): + """Remove all mappers from all classes. + + This function removes all instrumentation from classes and disposes + of their associated mappers. Once called, the classes are unmapped + and can be later re-mapped with new mappers. + + :func:`.clear_mappers` is *not* for normal use, as there is literally no + valid usage for it outside of very specific testing scenarios. Normally, + mappers are permanent structural components of user-defined classes, and + are never discarded independently of their class. If a mapped class + itself is garbage collected, its mapper is automatically disposed of as + well. As such, :func:`.clear_mappers` is only for usage in test suites + that re-use the same classes with different mappings, which is itself an + extremely rare use case - the only such use case is in fact SQLAlchemy's + own test suite, and possibly the test suites of other ORM extension + libraries which intend to test various combinations of mapper construction + upon a fixed set of classes. + + """ + mapperlib._CONFIGURE_MUTEX.acquire() + try: + while _mapper_registry: + try: + # can't even reliably call list(weakdict) in jython + mapper, b = _mapper_registry.popitem() + mapper.dispose() + except KeyError: + pass + finally: + mapperlib._CONFIGURE_MUTEX.release() + +from . import strategy_options + +joinedload = strategy_options.joinedload._unbound_fn +joinedload_all = strategy_options.joinedload._unbound_all_fn +contains_eager = strategy_options.contains_eager._unbound_fn +defer = strategy_options.defer._unbound_fn +undefer = strategy_options.undefer._unbound_fn +undefer_group = strategy_options.undefer_group._unbound_fn +load_only = strategy_options.load_only._unbound_fn +lazyload = strategy_options.lazyload._unbound_fn +lazyload_all = strategy_options.lazyload_all._unbound_all_fn +subqueryload = strategy_options.subqueryload._unbound_fn +subqueryload_all = strategy_options.subqueryload_all._unbound_all_fn +immediateload = strategy_options.immediateload._unbound_fn +noload = strategy_options.noload._unbound_fn +defaultload = strategy_options.defaultload._unbound_fn + +from .strategy_options import Load + + +def eagerload(*args, **kwargs): + """A synonym for :func:`joinedload()`.""" + return joinedload(*args, **kwargs) + + +def eagerload_all(*args, **kwargs): + """A synonym for :func:`joinedload_all()`""" + return joinedload_all(*args, **kwargs) + + +contains_alias = public_factory(AliasOption, ".orm.contains_alias") + + +def __go(lcls): + global __all__ + from .. import util as sa_util + from . import dynamic + from . import events + import inspect as _inspect + + __all__ = sorted(name for name, obj in lcls.items() + if not (name.startswith('_') or _inspect.ismodule(obj))) + + _sa_util.dependencies.resolve_all("sqlalchemy.orm") + +__go(locals()) diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/attributes.py b/lib/python3.4/site-packages/sqlalchemy/orm/attributes.py new file mode 100644 index 0000000..16b3264 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/orm/attributes.py @@ -0,0 +1,1598 @@ +# orm/attributes.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Defines instrumentation for class attributes and their interaction +with instances. + +This module is usually not directly visible to user applications, but +defines a large part of the ORM's interactivity. + + +""" + +import operator +from .. import util, event, inspection +from . import interfaces, collections, exc as orm_exc + +from .base import instance_state, instance_dict, manager_of_class + +from .base import PASSIVE_NO_RESULT, ATTR_WAS_SET, ATTR_EMPTY, NO_VALUE,\ + NEVER_SET, NO_CHANGE, CALLABLES_OK, SQL_OK, RELATED_OBJECT_OK,\ + INIT_OK, NON_PERSISTENT_OK, LOAD_AGAINST_COMMITTED, PASSIVE_OFF,\ + PASSIVE_RETURN_NEVER_SET, PASSIVE_NO_INITIALIZE, PASSIVE_NO_FETCH,\ + PASSIVE_NO_FETCH_RELATED, PASSIVE_ONLY_PERSISTENT, NO_AUTOFLUSH +from .base import state_str, instance_str + + +@inspection._self_inspects +class QueryableAttribute(interfaces._MappedAttribute, + interfaces.InspectionAttr, + interfaces.PropComparator): + """Base class for :term:`descriptor` objects that intercept + attribute events on behalf of a :class:`.MapperProperty` + object. The actual :class:`.MapperProperty` is accessible + via the :attr:`.QueryableAttribute.property` + attribute. + + + .. seealso:: + + :class:`.InstrumentedAttribute` + + :class:`.MapperProperty` + + :attr:`.Mapper.all_orm_descriptors` + + :attr:`.Mapper.attrs` + """ + + is_attribute = True + + def __init__(self, class_, key, impl=None, + comparator=None, parententity=None, + of_type=None): + self.class_ = class_ + self.key = key + self.impl = impl + self.comparator = comparator + self._parententity = parententity + self._of_type = of_type + + manager = manager_of_class(class_) + # manager is None in the case of AliasedClass + if manager: + # propagate existing event listeners from + # immediate superclass + for base in manager._bases: + if key in base: + self.dispatch._update(base[key].dispatch) + + @util.memoized_property + def _supports_population(self): + return self.impl.supports_population + + def get_history(self, instance, passive=PASSIVE_OFF): + return self.impl.get_history(instance_state(instance), + instance_dict(instance), passive) + + def __selectable__(self): + # TODO: conditionally attach this method based on clause_element ? + return self + + @util.memoized_property + def info(self): + """Return the 'info' dictionary for the underlying SQL element. + + The behavior here is as follows: + + * If the attribute is a column-mapped property, i.e. + :class:`.ColumnProperty`, which is mapped directly + to a schema-level :class:`.Column` object, this attribute + will return the :attr:`.SchemaItem.info` dictionary associated + with the core-level :class:`.Column` object. + + * If the attribute is a :class:`.ColumnProperty` but is mapped to + any other kind of SQL expression other than a :class:`.Column`, + the attribute will refer to the :attr:`.MapperProperty.info` + dictionary associated directly with the :class:`.ColumnProperty`, + assuming the SQL expression itself does not have its own ``.info`` + attribute (which should be the case, unless a user-defined SQL + construct has defined one). + + * If the attribute refers to any other kind of + :class:`.MapperProperty`, including :class:`.RelationshipProperty`, + the attribute will refer to the :attr:`.MapperProperty.info` + dictionary associated with that :class:`.MapperProperty`. + + * To access the :attr:`.MapperProperty.info` dictionary of the + :class:`.MapperProperty` unconditionally, including for a + :class:`.ColumnProperty` that's associated directly with a + :class:`.schema.Column`, the attribute can be referred to using + :attr:`.QueryableAttribute.property` attribute, as + ``MyClass.someattribute.property.info``. + + .. versionadded:: 0.8.0 + + .. seealso:: + + :attr:`.SchemaItem.info` + + :attr:`.MapperProperty.info` + + """ + return self.comparator.info + + @util.memoized_property + def parent(self): + """Return an inspection instance representing the parent. + + This will be either an instance of :class:`.Mapper` + or :class:`.AliasedInsp`, depending upon the nature + of the parent entity which this attribute is associated + with. + + """ + return inspection.inspect(self._parententity) + + @property + def expression(self): + return self.comparator.__clause_element__() + + def __clause_element__(self): + return self.comparator.__clause_element__() + + def _query_clause_element(self): + """like __clause_element__(), but called specifically + by :class:`.Query` to allow special behavior.""" + + return self.comparator._query_clause_element() + + def adapt_to_entity(self, adapt_to_entity): + assert not self._of_type + return self.__class__(adapt_to_entity.entity, + self.key, impl=self.impl, + comparator=self.comparator.adapt_to_entity( + adapt_to_entity), + parententity=adapt_to_entity) + + def of_type(self, cls): + return QueryableAttribute( + self.class_, + self.key, + self.impl, + self.comparator.of_type(cls), + self._parententity, + of_type=cls) + + def label(self, name): + return self._query_clause_element().label(name) + + def operate(self, op, *other, **kwargs): + return op(self.comparator, *other, **kwargs) + + def reverse_operate(self, op, other, **kwargs): + return op(other, self.comparator, **kwargs) + + def hasparent(self, state, optimistic=False): + return self.impl.hasparent(state, optimistic=optimistic) is not False + + def __getattr__(self, key): + try: + return getattr(self.comparator, key) + except AttributeError: + raise AttributeError( + 'Neither %r object nor %r object associated with %s ' + 'has an attribute %r' % ( + type(self).__name__, + type(self.comparator).__name__, + self, + key) + ) + + def __str__(self): + return "%s.%s" % (self.class_.__name__, self.key) + + @util.memoized_property + def property(self): + """Return the :class:`.MapperProperty` associated with this + :class:`.QueryableAttribute`. + + + Return values here will commonly be instances of + :class:`.ColumnProperty` or :class:`.RelationshipProperty`. + + + """ + return self.comparator.property + + +class InstrumentedAttribute(QueryableAttribute): + """Class bound instrumented attribute which adds basic + :term:`descriptor` methods. + + See :class:`.QueryableAttribute` for a description of most features. + + + """ + + def __set__(self, instance, value): + self.impl.set(instance_state(instance), + instance_dict(instance), value, None) + + def __delete__(self, instance): + self.impl.delete(instance_state(instance), instance_dict(instance)) + + def __get__(self, instance, owner): + if instance is None: + return self + + dict_ = instance_dict(instance) + if self._supports_population and self.key in dict_: + return dict_[self.key] + else: + return self.impl.get(instance_state(instance), dict_) + + +def create_proxied_attribute(descriptor): + """Create an QueryableAttribute / user descriptor hybrid. + + Returns a new QueryableAttribute type that delegates descriptor + behavior and getattr() to the given descriptor. + """ + + # TODO: can move this to descriptor_props if the need for this + # function is removed from ext/hybrid.py + + class Proxy(QueryableAttribute): + """Presents the :class:`.QueryableAttribute` interface as a + proxy on top of a Python descriptor / :class:`.PropComparator` + combination. + + """ + + def __init__(self, class_, key, descriptor, + comparator, + adapt_to_entity=None, doc=None, + original_property=None): + self.class_ = class_ + self.key = key + self.descriptor = descriptor + self.original_property = original_property + self._comparator = comparator + self._adapt_to_entity = adapt_to_entity + self.__doc__ = doc + + @property + def property(self): + return self.comparator.property + + @util.memoized_property + def comparator(self): + if util.callable(self._comparator): + self._comparator = self._comparator() + if self._adapt_to_entity: + self._comparator = self._comparator.adapt_to_entity( + self._adapt_to_entity) + return self._comparator + + def adapt_to_entity(self, adapt_to_entity): + return self.__class__(adapt_to_entity.entity, + self.key, + self.descriptor, + self._comparator, + adapt_to_entity) + + def __get__(self, instance, owner): + if instance is None: + return self + else: + return self.descriptor.__get__(instance, owner) + + def __str__(self): + return "%s.%s" % (self.class_.__name__, self.key) + + def __getattr__(self, attribute): + """Delegate __getattr__ to the original descriptor and/or + comparator.""" + + try: + return getattr(descriptor, attribute) + except AttributeError: + try: + return getattr(self.comparator, attribute) + except AttributeError: + raise AttributeError( + 'Neither %r object nor %r object associated with %s ' + 'has an attribute %r' % ( + type(descriptor).__name__, + type(self.comparator).__name__, + self, + attribute) + ) + + Proxy.__name__ = type(descriptor).__name__ + 'Proxy' + + util.monkeypatch_proxied_specials(Proxy, type(descriptor), + name='descriptor', + from_instance=descriptor) + return Proxy + +OP_REMOVE = util.symbol("REMOVE") +OP_APPEND = util.symbol("APPEND") +OP_REPLACE = util.symbol("REPLACE") + + +class Event(object): + """A token propagated throughout the course of a chain of attribute + events. + + Serves as an indicator of the source of the event and also provides + a means of controlling propagation across a chain of attribute + operations. + + The :class:`.Event` object is sent as the ``initiator`` argument + when dealing with the :meth:`.AttributeEvents.append`, + :meth:`.AttributeEvents.set`, + and :meth:`.AttributeEvents.remove` events. + + The :class:`.Event` object is currently interpreted by the backref + event handlers, and is used to control the propagation of operations + across two mutually-dependent attributes. + + .. versionadded:: 0.9.0 + + :var impl: The :class:`.AttributeImpl` which is the current event + initiator. + + :var op: The symbol :attr:`.OP_APPEND`, :attr:`.OP_REMOVE` or + :attr:`.OP_REPLACE`, indicating the source operation. + + """ + + __slots__ = 'impl', 'op', 'parent_token' + + def __init__(self, attribute_impl, op): + self.impl = attribute_impl + self.op = op + self.parent_token = self.impl.parent_token + + def __eq__(self, other): + return isinstance(other, Event) and \ + other.impl is self.impl and \ + other.op == self.op + + @property + def key(self): + return self.impl.key + + def hasparent(self, state): + return self.impl.hasparent(state) + + +class AttributeImpl(object): + """internal implementation for instrumented attributes.""" + + def __init__(self, class_, key, + callable_, dispatch, trackparent=False, extension=None, + compare_function=None, active_history=False, + parent_token=None, expire_missing=True, + send_modified_events=True, + **kwargs): + """Construct an AttributeImpl. + + \class_ + associated class + + key + string name of the attribute + + \callable_ + optional function which generates a callable based on a parent + instance, which produces the "default" values for a scalar or + collection attribute when it's first accessed, if not present + already. + + trackparent + if True, attempt to track if an instance has a parent attached + to it via this attribute. + + extension + a single or list of AttributeExtension object(s) which will + receive set/delete/append/remove/etc. events. Deprecated. + The event package is now used. + + compare_function + a function that compares two values which are normally + assignable to this attribute. + + active_history + indicates that get_history() should always return the "old" value, + even if it means executing a lazy callable upon attribute change. + + parent_token + Usually references the MapperProperty, used as a key for + the hasparent() function to identify an "owning" attribute. + Allows multiple AttributeImpls to all match a single + owner attribute. + + expire_missing + if False, don't add an "expiry" callable to this attribute + during state.expire_attributes(None), if no value is present + for this key. + + send_modified_events + if False, the InstanceState._modified_event method will have no + effect; this means the attribute will never show up as changed in a + history entry. + """ + self.class_ = class_ + self.key = key + self.callable_ = callable_ + self.dispatch = dispatch + self.trackparent = trackparent + self.parent_token = parent_token or self + self.send_modified_events = send_modified_events + if compare_function is None: + self.is_equal = operator.eq + else: + self.is_equal = compare_function + + # TODO: pass in the manager here + # instead of doing a lookup + attr = manager_of_class(class_)[key] + + for ext in util.to_list(extension or []): + ext._adapt_listener(attr, ext) + + if active_history: + self.dispatch._active_history = True + + self.expire_missing = expire_missing + + __slots__ = ( + 'class_', 'key', 'callable_', 'dispatch', 'trackparent', + 'parent_token', 'send_modified_events', 'is_equal', 'expire_missing' + ) + + def __str__(self): + return "%s.%s" % (self.class_.__name__, self.key) + + def _get_active_history(self): + """Backwards compat for impl.active_history""" + + return self.dispatch._active_history + + def _set_active_history(self, value): + self.dispatch._active_history = value + + active_history = property(_get_active_history, _set_active_history) + + def hasparent(self, state, optimistic=False): + """Return the boolean value of a `hasparent` flag attached to + the given state. + + The `optimistic` flag determines what the default return value + should be if no `hasparent` flag can be located. + + As this function is used to determine if an instance is an + *orphan*, instances that were loaded from storage should be + assumed to not be orphans, until a True/False value for this + flag is set. + + An instance attribute that is loaded by a callable function + will also not have a `hasparent` flag. + + """ + msg = "This AttributeImpl is not configured to track parents." + assert self.trackparent, msg + + return state.parents.get(id(self.parent_token), optimistic) \ + is not False + + def sethasparent(self, state, parent_state, value): + """Set a boolean flag on the given item corresponding to + whether or not it is attached to a parent object via the + attribute represented by this ``InstrumentedAttribute``. + + """ + msg = "This AttributeImpl is not configured to track parents." + assert self.trackparent, msg + + id_ = id(self.parent_token) + if value: + state.parents[id_] = parent_state + else: + if id_ in state.parents: + last_parent = state.parents[id_] + + if last_parent is not False and \ + last_parent.key != parent_state.key: + + if last_parent.obj() is None: + raise orm_exc.StaleDataError( + "Removing state %s from parent " + "state %s along attribute '%s', " + "but the parent record " + "has gone stale, can't be sure this " + "is the most recent parent." % + (state_str(state), + state_str(parent_state), + self.key)) + + return + + state.parents[id_] = False + + def get_history(self, state, dict_, passive=PASSIVE_OFF): + raise NotImplementedError() + + def get_all_pending(self, state, dict_, passive=PASSIVE_NO_INITIALIZE): + """Return a list of tuples of (state, obj) + for all objects in this attribute's current state + + history. + + Only applies to object-based attributes. + + This is an inlining of existing functionality + which roughly corresponds to: + + get_state_history( + state, + key, + passive=PASSIVE_NO_INITIALIZE).sum() + + """ + raise NotImplementedError() + + def initialize(self, state, dict_): + """Initialize the given state's attribute with an empty value.""" + + # As of 1.0, we don't actually set a value in + # dict_. This is so that the state of the object does not get + # modified without emitting the appropriate events. + + + return None + + def get(self, state, dict_, passive=PASSIVE_OFF): + """Retrieve a value from the given object. + If a callable is assembled on this object's attribute, and + passive is False, the callable will be executed and the + resulting value will be set as the new value for this attribute. + """ + if self.key in dict_: + return dict_[self.key] + else: + # if history present, don't load + key = self.key + if key not in state.committed_state or \ + state.committed_state[key] is NEVER_SET: + if not passive & CALLABLES_OK: + return PASSIVE_NO_RESULT + + if key in state.expired_attributes: + value = state._load_expired(state, passive) + elif key in state.callables: + callable_ = state.callables[key] + value = callable_(state, passive) + elif self.callable_: + value = self.callable_(state, passive) + else: + value = ATTR_EMPTY + + if value is PASSIVE_NO_RESULT or value is NEVER_SET: + return value + elif value is ATTR_WAS_SET: + try: + return dict_[key] + except KeyError: + # TODO: no test coverage here. + raise KeyError( + "Deferred loader for attribute " + "%r failed to populate " + "correctly" % key) + elif value is not ATTR_EMPTY: + return self.set_committed_value(state, dict_, value) + + if not passive & INIT_OK: + return NEVER_SET + else: + # Return a new, empty value + return self.initialize(state, dict_) + + def append(self, state, dict_, value, initiator, passive=PASSIVE_OFF): + self.set(state, dict_, value, initiator, passive=passive) + + def remove(self, state, dict_, value, initiator, passive=PASSIVE_OFF): + self.set(state, dict_, None, initiator, + passive=passive, check_old=value) + + def pop(self, state, dict_, value, initiator, passive=PASSIVE_OFF): + self.set(state, dict_, None, initiator, + passive=passive, check_old=value, pop=True) + + def set(self, state, dict_, value, initiator, + passive=PASSIVE_OFF, check_old=None, pop=False): + raise NotImplementedError() + + def get_committed_value(self, state, dict_, passive=PASSIVE_OFF): + """return the unchanged value of this attribute""" + + if self.key in state.committed_state: + value = state.committed_state[self.key] + if value in (NO_VALUE, NEVER_SET): + return None + else: + return value + else: + return self.get(state, dict_, passive=passive) + + def set_committed_value(self, state, dict_, value): + """set an attribute value on the given instance and 'commit' it.""" + + dict_[self.key] = value + state._commit(dict_, [self.key]) + return value + + +class ScalarAttributeImpl(AttributeImpl): + """represents a scalar value-holding InstrumentedAttribute.""" + + accepts_scalar_loader = True + uses_objects = False + supports_population = True + collection = False + + __slots__ = '_replace_token', '_append_token', '_remove_token' + + def __init__(self, *arg, **kw): + super(ScalarAttributeImpl, self).__init__(*arg, **kw) + self._replace_token = self._append_token = None + self._remove_token = None + + def _init_append_token(self): + self._replace_token = self._append_token = Event(self, OP_REPLACE) + return self._replace_token + + _init_append_or_replace_token = _init_append_token + + def _init_remove_token(self): + self._remove_token = Event(self, OP_REMOVE) + return self._remove_token + + def delete(self, state, dict_): + + # TODO: catch key errors, convert to attributeerror? + if self.dispatch._active_history: + old = self.get(state, dict_, PASSIVE_RETURN_NEVER_SET) + else: + old = dict_.get(self.key, NO_VALUE) + + if self.dispatch.remove: + self.fire_remove_event(state, dict_, old, self._remove_token) + state._modified_event(dict_, self, old) + del dict_[self.key] + + def get_history(self, state, dict_, passive=PASSIVE_OFF): + if self.key in dict_: + return History.from_scalar_attribute(self, state, dict_[self.key]) + else: + if passive & INIT_OK: + passive ^= INIT_OK + current = self.get(state, dict_, passive=passive) + if current is PASSIVE_NO_RESULT: + return HISTORY_BLANK + else: + return History.from_scalar_attribute(self, state, current) + + def set(self, state, dict_, value, initiator, + passive=PASSIVE_OFF, check_old=None, pop=False): + if self.dispatch._active_history: + old = self.get(state, dict_, PASSIVE_RETURN_NEVER_SET) + else: + old = dict_.get(self.key, NO_VALUE) + + if self.dispatch.set: + value = self.fire_replace_event(state, dict_, + value, old, initiator) + state._modified_event(dict_, self, old) + dict_[self.key] = value + + def fire_replace_event(self, state, dict_, value, previous, initiator): + for fn in self.dispatch.set: + value = fn( + state, value, previous, + initiator or self._replace_token or + self._init_append_or_replace_token()) + return value + + def fire_remove_event(self, state, dict_, value, initiator): + for fn in self.dispatch.remove: + fn(state, value, + initiator or self._remove_token or self._init_remove_token()) + + @property + def type(self): + self.property.columns[0].type + + +class ScalarObjectAttributeImpl(ScalarAttributeImpl): + """represents a scalar-holding InstrumentedAttribute, + where the target object is also instrumented. + + Adds events to delete/set operations. + + """ + + accepts_scalar_loader = False + uses_objects = True + supports_population = True + collection = False + + __slots__ = () + + def delete(self, state, dict_): + old = self.get(state, dict_) + self.fire_remove_event( + state, dict_, old, + self._remove_token or self._init_remove_token()) + del dict_[self.key] + + def get_history(self, state, dict_, passive=PASSIVE_OFF): + if self.key in dict_: + return History.from_object_attribute(self, state, dict_[self.key]) + else: + if passive & INIT_OK: + passive ^= INIT_OK + current = self.get(state, dict_, passive=passive) + if current is PASSIVE_NO_RESULT: + return HISTORY_BLANK + else: + return History.from_object_attribute(self, state, current) + + def get_all_pending(self, state, dict_, passive=PASSIVE_NO_INITIALIZE): + if self.key in dict_: + current = dict_[self.key] + elif passive & CALLABLES_OK: + current = self.get(state, dict_, passive=passive) + else: + return [] + + # can't use __hash__(), can't use __eq__() here + if current is not None and \ + current is not PASSIVE_NO_RESULT and \ + current is not NEVER_SET: + ret = [(instance_state(current), current)] + else: + ret = [(None, None)] + + if self.key in state.committed_state: + original = state.committed_state[self.key] + if original is not None and \ + original is not PASSIVE_NO_RESULT and \ + original is not NEVER_SET and \ + original is not current: + + ret.append((instance_state(original), original)) + return ret + + def set(self, state, dict_, value, initiator, + passive=PASSIVE_OFF, check_old=None, pop=False): + """Set a value on the given InstanceState. + + """ + if self.dispatch._active_history: + old = self.get( + state, dict_, passive=PASSIVE_ONLY_PERSISTENT | NO_AUTOFLUSH) + else: + old = self.get(state, dict_, passive=PASSIVE_NO_FETCH ^ INIT_OK) + + if check_old is not None and \ + old is not PASSIVE_NO_RESULT and \ + check_old is not old: + if pop: + return + else: + raise ValueError( + "Object %s not associated with %s on attribute '%s'" % ( + instance_str(check_old), + state_str(state), + self.key + )) + + value = self.fire_replace_event(state, dict_, value, old, initiator) + dict_[self.key] = value + + def fire_remove_event(self, state, dict_, value, initiator): + if self.trackparent and value is not None: + self.sethasparent(instance_state(value), state, False) + + for fn in self.dispatch.remove: + fn(state, value, initiator or + self._remove_token or self._init_remove_token()) + + state._modified_event(dict_, self, value) + + def fire_replace_event(self, state, dict_, value, previous, initiator): + if self.trackparent: + if (previous is not value and + previous not in (None, PASSIVE_NO_RESULT, NEVER_SET)): + self.sethasparent(instance_state(previous), state, False) + + for fn in self.dispatch.set: + value = fn( + state, value, previous, initiator or + self._replace_token or self._init_append_or_replace_token()) + + state._modified_event(dict_, self, previous) + + if self.trackparent: + if value is not None: + self.sethasparent(instance_state(value), state, True) + + return value + + +class CollectionAttributeImpl(AttributeImpl): + """A collection-holding attribute that instruments changes in membership. + + Only handles collections of instrumented objects. + + InstrumentedCollectionAttribute holds an arbitrary, user-specified + container object (defaulting to a list) and brokers access to the + CollectionAdapter, a "view" onto that object that presents consistent bag + semantics to the orm layer independent of the user data implementation. + + """ + accepts_scalar_loader = False + uses_objects = True + supports_population = True + collection = True + + __slots__ = 'copy', 'collection_factory', '_append_token', '_remove_token' + + def __init__(self, class_, key, callable_, dispatch, + typecallable=None, trackparent=False, extension=None, + copy_function=None, compare_function=None, **kwargs): + super(CollectionAttributeImpl, self).__init__( + class_, + key, + callable_, dispatch, + trackparent=trackparent, + extension=extension, + compare_function=compare_function, + **kwargs) + + if copy_function is None: + copy_function = self.__copy + self.copy = copy_function + self.collection_factory = typecallable + self._append_token = None + self._remove_token = None + + if getattr(self.collection_factory, "_sa_linker", None): + + @event.listens_for(self, "init_collection") + def link(target, collection, collection_adapter): + collection._sa_linker(collection_adapter) + + @event.listens_for(self, "dispose_collection") + def unlink(target, collection, collection_adapter): + collection._sa_linker(None) + + def _init_append_token(self): + self._append_token = Event(self, OP_APPEND) + return self._append_token + + def _init_remove_token(self): + self._remove_token = Event(self, OP_REMOVE) + return self._remove_token + + def __copy(self, item): + return [y for y in collections.collection_adapter(item)] + + def get_history(self, state, dict_, passive=PASSIVE_OFF): + current = self.get(state, dict_, passive=passive) + if current is PASSIVE_NO_RESULT: + return HISTORY_BLANK + else: + return History.from_collection(self, state, current) + + def get_all_pending(self, state, dict_, passive=PASSIVE_NO_INITIALIZE): + # NOTE: passive is ignored here at the moment + + if self.key not in dict_: + return [] + + current = dict_[self.key] + current = getattr(current, '_sa_adapter') + + if self.key in state.committed_state: + original = state.committed_state[self.key] + if original not in (NO_VALUE, NEVER_SET): + current_states = [((c is not None) and + instance_state(c) or None, c) + for c in current] + original_states = [((c is not None) and + instance_state(c) or None, c) + for c in original] + + current_set = dict(current_states) + original_set = dict(original_states) + + return \ + [(s, o) for s, o in current_states + if s not in original_set] + \ + [(s, o) for s, o in current_states + if s in original_set] + \ + [(s, o) for s, o in original_states + if s not in current_set] + + return [(instance_state(o), o) for o in current] + + def fire_append_event(self, state, dict_, value, initiator): + for fn in self.dispatch.append: + value = fn( + state, value, + initiator or self._append_token or self._init_append_token()) + + state._modified_event(dict_, self, NEVER_SET, True) + + if self.trackparent and value is not None: + self.sethasparent(instance_state(value), state, True) + + return value + + def fire_pre_remove_event(self, state, dict_, initiator): + state._modified_event(dict_, self, NEVER_SET, True) + + def fire_remove_event(self, state, dict_, value, initiator): + if self.trackparent and value is not None: + self.sethasparent(instance_state(value), state, False) + + for fn in self.dispatch.remove: + fn(state, value, + initiator or self._remove_token or self._init_remove_token()) + + state._modified_event(dict_, self, NEVER_SET, True) + + def delete(self, state, dict_): + if self.key not in dict_: + return + + state._modified_event(dict_, self, NEVER_SET, True) + + collection = self.get_collection(state, state.dict) + collection.clear_with_event() + # TODO: catch key errors, convert to attributeerror? + del dict_[self.key] + + def initialize(self, state, dict_): + """Initialize this attribute with an empty collection.""" + + _, user_data = self._initialize_collection(state) + dict_[self.key] = user_data + return user_data + + def _initialize_collection(self, state): + + adapter, collection = state.manager.initialize_collection( + self.key, state, self.collection_factory) + + self.dispatch.init_collection(state, collection, adapter) + + return adapter, collection + + def append(self, state, dict_, value, initiator, passive=PASSIVE_OFF): + collection = self.get_collection(state, dict_, passive=passive) + if collection is PASSIVE_NO_RESULT: + value = self.fire_append_event(state, dict_, value, initiator) + assert self.key not in dict_, \ + "Collection was loaded during event handling." + state._get_pending_mutation(self.key).append(value) + else: + collection.append_with_event(value, initiator) + + def remove(self, state, dict_, value, initiator, passive=PASSIVE_OFF): + collection = self.get_collection(state, state.dict, passive=passive) + if collection is PASSIVE_NO_RESULT: + self.fire_remove_event(state, dict_, value, initiator) + assert self.key not in dict_, \ + "Collection was loaded during event handling." + state._get_pending_mutation(self.key).remove(value) + else: + collection.remove_with_event(value, initiator) + + def pop(self, state, dict_, value, initiator, passive=PASSIVE_OFF): + try: + # TODO: better solution here would be to add + # a "popper" role to collections.py to complement + # "remover". + self.remove(state, dict_, value, initiator, passive=passive) + except (ValueError, KeyError, IndexError): + pass + + def set(self, state, dict_, value, initiator, + passive=PASSIVE_OFF, pop=False): + """Set a value on the given object. + + """ + + self._set_iterable( + state, dict_, value, + lambda adapter, i: adapter.adapt_like_to_iterable(i)) + + def _set_iterable(self, state, dict_, iterable, adapter=None): + """Set a collection value from an iterable of state-bearers. + + ``adapter`` is an optional callable invoked with a CollectionAdapter + and the iterable. Should return an iterable of state-bearing + instances suitable for appending via a CollectionAdapter. Can be used + for, e.g., adapting an incoming dictionary into an iterator of values + rather than keys. + + """ + # pulling a new collection first so that an adaptation exception does + # not trigger a lazy load of the old collection. + new_collection, user_data = self._initialize_collection(state) + if adapter: + new_values = list(adapter(new_collection, iterable)) + else: + new_values = list(iterable) + + old = self.get(state, dict_, passive=PASSIVE_ONLY_PERSISTENT) + if old is PASSIVE_NO_RESULT: + old = self.initialize(state, dict_) + elif old is iterable: + # ignore re-assignment of the current collection, as happens + # implicitly with in-place operators (foo.collection |= other) + return + + # place a copy of "old" in state.committed_state + state._modified_event(dict_, self, old, True) + + old_collection = old._sa_adapter + + dict_[self.key] = user_data + + collections.bulk_replace(new_values, old_collection, new_collection) + + del old._sa_adapter + self.dispatch.dispose_collection(state, old, old_collection) + + def _invalidate_collection(self, collection): + adapter = getattr(collection, '_sa_adapter') + adapter.invalidated = True + + def set_committed_value(self, state, dict_, value): + """Set an attribute value on the given instance and 'commit' it.""" + + collection, user_data = self._initialize_collection(state) + + if value: + collection.append_multiple_without_event(value) + + state.dict[self.key] = user_data + + state._commit(dict_, [self.key]) + + if self.key in state._pending_mutations: + # pending items exist. issue a modified event, + # add/remove new items. + state._modified_event(dict_, self, user_data, True) + + pending = state._pending_mutations.pop(self.key) + added = pending.added_items + removed = pending.deleted_items + for item in added: + collection.append_without_event(item) + for item in removed: + collection.remove_without_event(item) + + return user_data + + def get_collection(self, state, dict_, + user_data=None, passive=PASSIVE_OFF): + """Retrieve the CollectionAdapter associated with the given state. + + Creates a new CollectionAdapter if one does not exist. + + """ + if user_data is None: + user_data = self.get(state, dict_, passive=passive) + if user_data is PASSIVE_NO_RESULT: + return user_data + + return getattr(user_data, '_sa_adapter') + + +def backref_listeners(attribute, key, uselist): + """Apply listeners to synchronize a two-way relationship.""" + + # use easily recognizable names for stack traces + + parent_token = attribute.impl.parent_token + parent_impl = attribute.impl + + def _acceptable_key_err(child_state, initiator, child_impl): + raise ValueError( + "Bidirectional attribute conflict detected: " + 'Passing object %s to attribute "%s" ' + 'triggers a modify event on attribute "%s" ' + 'via the backref "%s".' % ( + state_str(child_state), + initiator.parent_token, + child_impl.parent_token, + attribute.impl.parent_token + ) + ) + + def emit_backref_from_scalar_set_event(state, child, oldchild, initiator): + if oldchild is child: + return child + if oldchild is not None and \ + oldchild is not PASSIVE_NO_RESULT and \ + oldchild is not NEVER_SET: + # With lazy=None, there's no guarantee that the full collection is + # present when updating via a backref. + old_state, old_dict = instance_state(oldchild),\ + instance_dict(oldchild) + impl = old_state.manager[key].impl + + if initiator.impl is not impl or \ + initiator.op not in (OP_REPLACE, OP_REMOVE): + impl.pop(old_state, + old_dict, + state.obj(), + parent_impl._append_token or + parent_impl._init_append_token(), + passive=PASSIVE_NO_FETCH) + + if child is not None: + child_state, child_dict = instance_state(child),\ + instance_dict(child) + child_impl = child_state.manager[key].impl + if initiator.parent_token is not parent_token and \ + initiator.parent_token is not child_impl.parent_token: + _acceptable_key_err(state, initiator, child_impl) + elif initiator.impl is not child_impl or \ + initiator.op not in (OP_APPEND, OP_REPLACE): + child_impl.append( + child_state, + child_dict, + state.obj(), + initiator, + passive=PASSIVE_NO_FETCH) + return child + + def emit_backref_from_collection_append_event(state, child, initiator): + if child is None: + return + + child_state, child_dict = instance_state(child), \ + instance_dict(child) + child_impl = child_state.manager[key].impl + + if initiator.parent_token is not parent_token and \ + initiator.parent_token is not child_impl.parent_token: + _acceptable_key_err(state, initiator, child_impl) + elif initiator.impl is not child_impl or \ + initiator.op not in (OP_APPEND, OP_REPLACE): + child_impl.append( + child_state, + child_dict, + state.obj(), + initiator, + passive=PASSIVE_NO_FETCH) + return child + + def emit_backref_from_collection_remove_event(state, child, initiator): + if child is not None: + child_state, child_dict = instance_state(child),\ + instance_dict(child) + child_impl = child_state.manager[key].impl + if initiator.impl is not child_impl or \ + initiator.op not in (OP_REMOVE, OP_REPLACE): + child_impl.pop( + child_state, + child_dict, + state.obj(), + initiator, + passive=PASSIVE_NO_FETCH) + + if uselist: + event.listen(attribute, "append", + emit_backref_from_collection_append_event, + retval=True, raw=True) + else: + event.listen(attribute, "set", + emit_backref_from_scalar_set_event, + retval=True, raw=True) + # TODO: need coverage in test/orm/ of remove event + event.listen(attribute, "remove", + emit_backref_from_collection_remove_event, + retval=True, raw=True) + +_NO_HISTORY = util.symbol('NO_HISTORY') +_NO_STATE_SYMBOLS = frozenset([ + id(PASSIVE_NO_RESULT), + id(NO_VALUE), + id(NEVER_SET)]) + +History = util.namedtuple("History", [ + "added", "unchanged", "deleted" +]) + + +class History(History): + """A 3-tuple of added, unchanged and deleted values, + representing the changes which have occurred on an instrumented + attribute. + + The easiest way to get a :class:`.History` object for a particular + attribute on an object is to use the :func:`.inspect` function:: + + from sqlalchemy import inspect + + hist = inspect(myobject).attrs.myattribute.history + + Each tuple member is an iterable sequence: + + * ``added`` - the collection of items added to the attribute (the first + tuple element). + + * ``unchanged`` - the collection of items that have not changed on the + attribute (the second tuple element). + + * ``deleted`` - the collection of items that have been removed from the + attribute (the third tuple element). + + """ + + def __bool__(self): + return self != HISTORY_BLANK + __nonzero__ = __bool__ + + def empty(self): + """Return True if this :class:`.History` has no changes + and no existing, unchanged state. + + """ + + return not bool( + (self.added or self.deleted) + or self.unchanged + ) + + def sum(self): + """Return a collection of added + unchanged + deleted.""" + + return (self.added or []) +\ + (self.unchanged or []) +\ + (self.deleted or []) + + def non_deleted(self): + """Return a collection of added + unchanged.""" + + return (self.added or []) +\ + (self.unchanged or []) + + def non_added(self): + """Return a collection of unchanged + deleted.""" + + return (self.unchanged or []) +\ + (self.deleted or []) + + def has_changes(self): + """Return True if this :class:`.History` has changes.""" + + return bool(self.added or self.deleted) + + def as_state(self): + return History( + [(c is not None) + and instance_state(c) or None + for c in self.added], + [(c is not None) + and instance_state(c) or None + for c in self.unchanged], + [(c is not None) + and instance_state(c) or None + for c in self.deleted], + ) + + @classmethod + def from_scalar_attribute(cls, attribute, state, current): + original = state.committed_state.get(attribute.key, _NO_HISTORY) + + if original is _NO_HISTORY: + if current is NEVER_SET: + return cls((), (), ()) + else: + return cls((), [current], ()) + # don't let ClauseElement expressions here trip things up + elif attribute.is_equal(current, original) is True: + return cls((), [current], ()) + else: + # current convention on native scalars is to not + # include information + # about missing previous value in "deleted", but + # we do include None, which helps in some primary + # key situations + if id(original) in _NO_STATE_SYMBOLS: + deleted = () + else: + deleted = [original] + if current is NEVER_SET: + return cls((), (), deleted) + else: + return cls([current], (), deleted) + + @classmethod + def from_object_attribute(cls, attribute, state, current): + original = state.committed_state.get(attribute.key, _NO_HISTORY) + + if original is _NO_HISTORY: + if current is NO_VALUE or current is NEVER_SET: + return cls((), (), ()) + else: + return cls((), [current], ()) + elif current is original: + return cls((), [current], ()) + else: + # current convention on related objects is to not + # include information + # about missing previous value in "deleted", and + # to also not include None - the dependency.py rules + # ignore the None in any case. + if id(original) in _NO_STATE_SYMBOLS or original is None: + deleted = () + else: + deleted = [original] + if current is NO_VALUE or current is NEVER_SET: + return cls((), (), deleted) + else: + return cls([current], (), deleted) + + @classmethod + def from_collection(cls, attribute, state, current): + original = state.committed_state.get(attribute.key, _NO_HISTORY) + + if current is NO_VALUE or current is NEVER_SET: + return cls((), (), ()) + + current = getattr(current, '_sa_adapter') + if original in (NO_VALUE, NEVER_SET): + return cls(list(current), (), ()) + elif original is _NO_HISTORY: + return cls((), list(current), ()) + else: + + current_states = [((c is not None) and instance_state(c) + or None, c) + for c in current + ] + original_states = [((c is not None) and instance_state(c) + or None, c) + for c in original + ] + + current_set = dict(current_states) + original_set = dict(original_states) + + return cls( + [o for s, o in current_states if s not in original_set], + [o for s, o in current_states if s in original_set], + [o for s, o in original_states if s not in current_set] + ) + +HISTORY_BLANK = History(None, None, None) + + +def get_history(obj, key, passive=PASSIVE_OFF): + """Return a :class:`.History` record for the given object + and attribute key. + + :param obj: an object whose class is instrumented by the + attributes package. + + :param key: string attribute name. + + :param passive: indicates loading behavior for the attribute + if the value is not already present. This is a + bitflag attribute, which defaults to the symbol + :attr:`.PASSIVE_OFF` indicating all necessary SQL + should be emitted. + + """ + if passive is True: + util.warn_deprecated("Passing True for 'passive' is deprecated. " + "Use attributes.PASSIVE_NO_INITIALIZE") + passive = PASSIVE_NO_INITIALIZE + elif passive is False: + util.warn_deprecated("Passing False for 'passive' is " + "deprecated. Use attributes.PASSIVE_OFF") + passive = PASSIVE_OFF + + return get_state_history(instance_state(obj), key, passive) + + +def get_state_history(state, key, passive=PASSIVE_OFF): + return state.get_history(key, passive) + + +def has_parent(cls, obj, key, optimistic=False): + """TODO""" + manager = manager_of_class(cls) + state = instance_state(obj) + return manager.has_parent(state, key, optimistic) + + +def register_attribute(class_, key, **kw): + comparator = kw.pop('comparator', None) + parententity = kw.pop('parententity', None) + doc = kw.pop('doc', None) + desc = register_descriptor(class_, key, + comparator, parententity, doc=doc) + register_attribute_impl(class_, key, **kw) + return desc + + +def register_attribute_impl(class_, key, + uselist=False, callable_=None, + useobject=False, + impl_class=None, backref=None, **kw): + + manager = manager_of_class(class_) + if uselist: + factory = kw.pop('typecallable', None) + typecallable = manager.instrument_collection_class( + key, factory or list) + else: + typecallable = kw.pop('typecallable', None) + + dispatch = manager[key].dispatch + + if impl_class: + impl = impl_class(class_, key, typecallable, dispatch, **kw) + elif uselist: + impl = CollectionAttributeImpl(class_, key, callable_, dispatch, + typecallable=typecallable, **kw) + elif useobject: + impl = ScalarObjectAttributeImpl(class_, key, callable_, + dispatch, **kw) + else: + impl = ScalarAttributeImpl(class_, key, callable_, dispatch, **kw) + + manager[key].impl = impl + + if backref: + backref_listeners(manager[key], backref, uselist) + + manager.post_configure_attribute(key) + return manager[key] + + +def register_descriptor(class_, key, comparator=None, + parententity=None, doc=None): + manager = manager_of_class(class_) + + descriptor = InstrumentedAttribute(class_, key, comparator=comparator, + parententity=parententity) + + descriptor.__doc__ = doc + + manager.instrument_attribute(key, descriptor) + return descriptor + + +def unregister_attribute(class_, key): + manager_of_class(class_).uninstrument_attribute(key) + + +def init_collection(obj, key): + """Initialize a collection attribute and return the collection adapter. + + This function is used to provide direct access to collection internals + for a previously unloaded attribute. e.g.:: + + collection_adapter = init_collection(someobject, 'elements') + for elem in values: + collection_adapter.append_without_event(elem) + + For an easier way to do the above, see + :func:`~sqlalchemy.orm.attributes.set_committed_value`. + + obj is an instrumented object instance. An InstanceState + is accepted directly for backwards compatibility but + this usage is deprecated. + + """ + state = instance_state(obj) + dict_ = state.dict + return init_state_collection(state, dict_, key) + + +def init_state_collection(state, dict_, key): + """Initialize a collection attribute and return the collection adapter.""" + + attr = state.manager[key].impl + user_data = attr.initialize(state, dict_) + return attr.get_collection(state, dict_, user_data) + + +def set_committed_value(instance, key, value): + """Set the value of an attribute with no history events. + + Cancels any previous history present. The value should be + a scalar value for scalar-holding attributes, or + an iterable for any collection-holding attribute. + + This is the same underlying method used when a lazy loader + fires off and loads additional data from the database. + In particular, this method can be used by application code + which has loaded additional attributes or collections through + separate queries, which can then be attached to an instance + as though it were part of its original loaded state. + + """ + state, dict_ = instance_state(instance), instance_dict(instance) + state.manager[key].impl.set_committed_value(state, dict_, value) + + +def set_attribute(instance, key, value): + """Set the value of an attribute, firing history events. + + This function may be used regardless of instrumentation + applied directly to the class, i.e. no descriptors are required. + Custom attribute management schemes will need to make usage + of this method to establish attribute state as understood + by SQLAlchemy. + + """ + state, dict_ = instance_state(instance), instance_dict(instance) + state.manager[key].impl.set(state, dict_, value, None) + + +def get_attribute(instance, key): + """Get the value of an attribute, firing any callables required. + + This function may be used regardless of instrumentation + applied directly to the class, i.e. no descriptors are required. + Custom attribute management schemes will need to make usage + of this method to make usage of attribute state as understood + by SQLAlchemy. + + """ + state, dict_ = instance_state(instance), instance_dict(instance) + return state.manager[key].impl.get(state, dict_) + + +def del_attribute(instance, key): + """Delete the value of an attribute, firing history events. + + This function may be used regardless of instrumentation + applied directly to the class, i.e. no descriptors are required. + Custom attribute management schemes will need to make usage + of this method to establish attribute state as understood + by SQLAlchemy. + + """ + state, dict_ = instance_state(instance), instance_dict(instance) + state.manager[key].impl.delete(state, dict_) + + +def flag_modified(instance, key): + """Mark an attribute on an instance as 'modified'. + + This sets the 'modified' flag on the instance and + establishes an unconditional change event for the given attribute. + + """ + state, dict_ = instance_state(instance), instance_dict(instance) + impl = state.manager[key].impl + state._modified_event(dict_, impl, NO_VALUE, force=True) diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/base.py b/lib/python3.4/site-packages/sqlalchemy/orm/base.py new file mode 100644 index 0000000..7947cd7 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/orm/base.py @@ -0,0 +1,540 @@ +# orm/base.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Constants and rudimental functions used throughout the ORM. + +""" + +from .. import util, inspection, exc as sa_exc +from ..sql import expression +from . import exc +import operator + +PASSIVE_NO_RESULT = util.symbol( + 'PASSIVE_NO_RESULT', + """Symbol returned by a loader callable or other attribute/history + retrieval operation when a value could not be determined, based + on loader callable flags. + """ +) + +ATTR_WAS_SET = util.symbol( + 'ATTR_WAS_SET', + """Symbol returned by a loader callable to indicate the + retrieved value, or values, were assigned to their attributes + on the target object. + """ +) + +ATTR_EMPTY = util.symbol( + 'ATTR_EMPTY', + """Symbol used internally to indicate an attribute had no callable.""" +) + +NO_VALUE = util.symbol( + 'NO_VALUE', + """Symbol which may be placed as the 'previous' value of an attribute, + indicating no value was loaded for an attribute when it was modified, + and flags indicated we were not to load it. + """ +) + +NEVER_SET = util.symbol( + 'NEVER_SET', + """Symbol which may be placed as the 'previous' value of an attribute + indicating that the attribute had not been assigned to previously. + """ +) + +NO_CHANGE = util.symbol( + "NO_CHANGE", + """No callables or SQL should be emitted on attribute access + and no state should change + """, canonical=0 +) + +CALLABLES_OK = util.symbol( + "CALLABLES_OK", + """Loader callables can be fired off if a value + is not present. + """, canonical=1 +) + +SQL_OK = util.symbol( + "SQL_OK", + """Loader callables can emit SQL at least on scalar value attributes.""", + canonical=2 +) + +RELATED_OBJECT_OK = util.symbol( + "RELATED_OBJECT_OK", + """Callables can use SQL to load related objects as well + as scalar value attributes. + """, canonical=4 +) + +INIT_OK = util.symbol( + "INIT_OK", + """Attributes should be initialized with a blank + value (None or an empty collection) upon get, if no other + value can be obtained. + """, canonical=8 +) + +NON_PERSISTENT_OK = util.symbol( + "NON_PERSISTENT_OK", + """Callables can be emitted if the parent is not persistent.""", + canonical=16 +) + +LOAD_AGAINST_COMMITTED = util.symbol( + "LOAD_AGAINST_COMMITTED", + """Callables should use committed values as primary/foreign keys during a + load. + """, canonical=32 +) + +NO_AUTOFLUSH = util.symbol( + "NO_AUTOFLUSH", + """Loader callables should disable autoflush.""", + canonical=64 +) + +# pre-packaged sets of flags used as inputs +PASSIVE_OFF = util.symbol( + "PASSIVE_OFF", + "Callables can be emitted in all cases.", + canonical=(RELATED_OBJECT_OK | NON_PERSISTENT_OK | + INIT_OK | CALLABLES_OK | SQL_OK) +) +PASSIVE_RETURN_NEVER_SET = util.symbol( + "PASSIVE_RETURN_NEVER_SET", + """PASSIVE_OFF ^ INIT_OK""", + canonical=PASSIVE_OFF ^ INIT_OK +) +PASSIVE_NO_INITIALIZE = util.symbol( + "PASSIVE_NO_INITIALIZE", + "PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK", + canonical=PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK +) +PASSIVE_NO_FETCH = util.symbol( + "PASSIVE_NO_FETCH", + "PASSIVE_OFF ^ SQL_OK", + canonical=PASSIVE_OFF ^ SQL_OK +) +PASSIVE_NO_FETCH_RELATED = util.symbol( + "PASSIVE_NO_FETCH_RELATED", + "PASSIVE_OFF ^ RELATED_OBJECT_OK", + canonical=PASSIVE_OFF ^ RELATED_OBJECT_OK +) +PASSIVE_ONLY_PERSISTENT = util.symbol( + "PASSIVE_ONLY_PERSISTENT", + "PASSIVE_OFF ^ NON_PERSISTENT_OK", + canonical=PASSIVE_OFF ^ NON_PERSISTENT_OK +) + +DEFAULT_MANAGER_ATTR = '_sa_class_manager' +DEFAULT_STATE_ATTR = '_sa_instance_state' +_INSTRUMENTOR = ('mapper', 'instrumentor') + +EXT_CONTINUE = util.symbol('EXT_CONTINUE') +EXT_STOP = util.symbol('EXT_STOP') + +ONETOMANY = util.symbol( + 'ONETOMANY', + """Indicates the one-to-many direction for a :func:`.relationship`. + + This symbol is typically used by the internals but may be exposed within + certain API features. + + """) + +MANYTOONE = util.symbol( + 'MANYTOONE', + """Indicates the many-to-one direction for a :func:`.relationship`. + + This symbol is typically used by the internals but may be exposed within + certain API features. + + """) + +MANYTOMANY = util.symbol( + 'MANYTOMANY', + """Indicates the many-to-many direction for a :func:`.relationship`. + + This symbol is typically used by the internals but may be exposed within + certain API features. + + """) + +NOT_EXTENSION = util.symbol( + 'NOT_EXTENSION', + """Symbol indicating an :class:`InspectionAttr` that's + not part of sqlalchemy.ext. + + Is assigned to the :attr:`.InspectionAttr.extension_type` + attibute. + + """) + +_never_set = frozenset([NEVER_SET]) + +_none_set = frozenset([None, NEVER_SET, PASSIVE_NO_RESULT]) + +_SET_DEFERRED_EXPIRED = util.symbol("SET_DEFERRED_EXPIRED") + +_DEFER_FOR_STATE = util.symbol("DEFER_FOR_STATE") + + +def _generative(*assertions): + """Mark a method as generative, e.g. method-chained.""" + + @util.decorator + def generate(fn, *args, **kw): + self = args[0]._clone() + for assertion in assertions: + assertion(self, fn.__name__) + fn(self, *args[1:], **kw) + return self + return generate + + +# these can be replaced by sqlalchemy.ext.instrumentation +# if augmented class instrumentation is enabled. +def manager_of_class(cls): + return cls.__dict__.get(DEFAULT_MANAGER_ATTR, None) + +instance_state = operator.attrgetter(DEFAULT_STATE_ATTR) + +instance_dict = operator.attrgetter('__dict__') + + +def instance_str(instance): + """Return a string describing an instance.""" + + return state_str(instance_state(instance)) + + +def state_str(state): + """Return a string describing an instance via its InstanceState.""" + + if state is None: + return "None" + else: + return '<%s at 0x%x>' % (state.class_.__name__, id(state.obj())) + + +def state_class_str(state): + """Return a string describing an instance's class via its + InstanceState. + """ + + if state is None: + return "None" + else: + return '<%s>' % (state.class_.__name__, ) + + +def attribute_str(instance, attribute): + return instance_str(instance) + "." + attribute + + +def state_attribute_str(state, attribute): + return state_str(state) + "." + attribute + + +def object_mapper(instance): + """Given an object, return the primary Mapper associated with the object + instance. + + Raises :class:`sqlalchemy.orm.exc.UnmappedInstanceError` + if no mapping is configured. + + This function is available via the inspection system as:: + + inspect(instance).mapper + + Using the inspection system will raise + :class:`sqlalchemy.exc.NoInspectionAvailable` if the instance is + not part of a mapping. + + """ + return object_state(instance).mapper + + +def object_state(instance): + """Given an object, return the :class:`.InstanceState` + associated with the object. + + Raises :class:`sqlalchemy.orm.exc.UnmappedInstanceError` + if no mapping is configured. + + Equivalent functionality is available via the :func:`.inspect` + function as:: + + inspect(instance) + + Using the inspection system will raise + :class:`sqlalchemy.exc.NoInspectionAvailable` if the instance is + not part of a mapping. + + """ + state = _inspect_mapped_object(instance) + if state is None: + raise exc.UnmappedInstanceError(instance) + else: + return state + + +@inspection._inspects(object) +def _inspect_mapped_object(instance): + try: + return instance_state(instance) + # TODO: whats the py-2/3 syntax to catch two + # different kinds of exceptions at once ? + except exc.UnmappedClassError: + return None + except exc.NO_STATE: + return None + + +def _class_to_mapper(class_or_mapper): + insp = inspection.inspect(class_or_mapper, False) + if insp is not None: + return insp.mapper + else: + raise exc.UnmappedClassError(class_or_mapper) + + +def _mapper_or_none(entity): + """Return the :class:`.Mapper` for the given class or None if the + class is not mapped. + """ + + insp = inspection.inspect(entity, False) + if insp is not None: + return insp.mapper + else: + return None + + +def _is_mapped_class(entity): + """Return True if the given object is a mapped class, + :class:`.Mapper`, or :class:`.AliasedClass`. + """ + + insp = inspection.inspect(entity, False) + return insp is not None and \ + not insp.is_clause_element and \ + ( + insp.is_mapper or insp.is_aliased_class + ) + + +def _attr_as_key(attr): + if hasattr(attr, 'key'): + return attr.key + else: + return expression._column_as_key(attr) + + +def _orm_columns(entity): + insp = inspection.inspect(entity, False) + if hasattr(insp, 'selectable'): + return [c for c in insp.selectable.c] + else: + return [entity] + + +def _is_aliased_class(entity): + insp = inspection.inspect(entity, False) + return insp is not None and \ + getattr(insp, "is_aliased_class", False) + + +def _entity_descriptor(entity, key): + """Return a class attribute given an entity and string name. + + May return :class:`.InstrumentedAttribute` or user-defined + attribute. + + """ + insp = inspection.inspect(entity) + if insp.is_selectable: + description = entity + entity = insp.c + elif insp.is_aliased_class: + entity = insp.entity + description = entity + elif hasattr(insp, "mapper"): + description = entity = insp.mapper.class_ + else: + description = entity + + try: + return getattr(entity, key) + except AttributeError: + raise sa_exc.InvalidRequestError( + "Entity '%s' has no property '%s'" % + (description, key) + ) + +_state_mapper = util.dottedgetter('manager.mapper') + + +@inspection._inspects(type) +def _inspect_mapped_class(class_, configure=False): + try: + class_manager = manager_of_class(class_) + if not class_manager.is_mapped: + return None + mapper = class_manager.mapper + except exc.NO_STATE: + return None + else: + if configure and mapper._new_mappers: + mapper._configure_all() + return mapper + + +def class_mapper(class_, configure=True): + """Given a class, return the primary :class:`.Mapper` associated + with the key. + + Raises :exc:`.UnmappedClassError` if no mapping is configured + on the given class, or :exc:`.ArgumentError` if a non-class + object is passed. + + Equivalent functionality is available via the :func:`.inspect` + function as:: + + inspect(some_mapped_class) + + Using the inspection system will raise + :class:`sqlalchemy.exc.NoInspectionAvailable` if the class is not mapped. + + """ + mapper = _inspect_mapped_class(class_, configure=configure) + if mapper is None: + if not isinstance(class_, type): + raise sa_exc.ArgumentError( + "Class object expected, got '%r'." % (class_, )) + raise exc.UnmappedClassError(class_) + else: + return mapper + + +class InspectionAttr(object): + """A base class applied to all ORM objects that can be returned + by the :func:`.inspect` function. + + The attributes defined here allow the usage of simple boolean + checks to test basic facts about the object returned. + + While the boolean checks here are basically the same as using + the Python isinstance() function, the flags here can be used without + the need to import all of these classes, and also such that + the SQLAlchemy class system can change while leaving the flags + here intact for forwards-compatibility. + + """ + __slots__ = () + + is_selectable = False + """Return True if this object is an instance of :class:`.Selectable`.""" + + is_aliased_class = False + """True if this object is an instance of :class:`.AliasedClass`.""" + + is_instance = False + """True if this object is an instance of :class:`.InstanceState`.""" + + is_mapper = False + """True if this object is an instance of :class:`.Mapper`.""" + + is_property = False + """True if this object is an instance of :class:`.MapperProperty`.""" + + is_attribute = False + """True if this object is a Python :term:`descriptor`. + + This can refer to one of many types. Usually a + :class:`.QueryableAttribute` which handles attributes events on behalf + of a :class:`.MapperProperty`. But can also be an extension type + such as :class:`.AssociationProxy` or :class:`.hybrid_property`. + The :attr:`.InspectionAttr.extension_type` will refer to a constant + identifying the specific subtype. + + .. seealso:: + + :attr:`.Mapper.all_orm_descriptors` + + """ + + is_clause_element = False + """True if this object is an instance of :class:`.ClauseElement`.""" + + extension_type = NOT_EXTENSION + """The extension type, if any. + Defaults to :data:`.interfaces.NOT_EXTENSION` + + .. versionadded:: 0.8.0 + + .. seealso:: + + :data:`.HYBRID_METHOD` + + :data:`.HYBRID_PROPERTY` + + :data:`.ASSOCIATION_PROXY` + + """ + + +class InspectionAttrInfo(InspectionAttr): + """Adds the ``.info`` attribute to :class:`.InspectionAttr`. + + The rationale for :class:`.InspectionAttr` vs. :class:`.InspectionAttrInfo` + is that the former is compatible as a mixin for classes that specify + ``__slots__``; this is essentially an implementation artifact. + + """ + + @util.memoized_property + def info(self): + """Info dictionary associated with the object, allowing user-defined + data to be associated with this :class:`.InspectionAttr`. + + The dictionary is generated when first accessed. Alternatively, + it can be specified as a constructor argument to the + :func:`.column_property`, :func:`.relationship`, or :func:`.composite` + functions. + + .. versionadded:: 0.8 Added support for .info to all + :class:`.MapperProperty` subclasses. + + .. versionchanged:: 1.0.0 :attr:`.MapperProperty.info` is also + available on extension types via the + :attr:`.InspectionAttrInfo.info` attribute, so that it can apply + to a wider variety of ORM and extension constructs. + + .. seealso:: + + :attr:`.QueryableAttribute.info` + + :attr:`.SchemaItem.info` + + """ + return {} + + +class _MappedAttribute(object): + """Mixin for attributes which should be replaced by mapper-assigned + attributes. + + """ + __slots__ = () diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/collections.py b/lib/python3.4/site-packages/sqlalchemy/orm/collections.py new file mode 100644 index 0000000..f3c609f --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/orm/collections.py @@ -0,0 +1,1580 @@ +# orm/collections.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Support for collections of mapped entities. + +The collections package supplies the machinery used to inform the ORM of +collection membership changes. An instrumentation via decoration approach is +used, allowing arbitrary types (including built-ins) to be used as entity +collections without requiring inheritance from a base class. + +Instrumentation decoration relays membership change events to the +:class:`.CollectionAttributeImpl` that is currently managing the collection. +The decorators observe function call arguments and return values, tracking +entities entering or leaving the collection. Two decorator approaches are +provided. One is a bundle of generic decorators that map function arguments +and return values to events:: + + from sqlalchemy.orm.collections import collection + class MyClass(object): + # ... + + @collection.adds(1) + def store(self, item): + self.data.append(item) + + @collection.removes_return() + def pop(self): + return self.data.pop() + + +The second approach is a bundle of targeted decorators that wrap appropriate +append and remove notifiers around the mutation methods present in the +standard Python ``list``, ``set`` and ``dict`` interfaces. These could be +specified in terms of generic decorator recipes, but are instead hand-tooled +for increased efficiency. The targeted decorators occasionally implement +adapter-like behavior, such as mapping bulk-set methods (``extend``, +``update``, ``__setslice__``, etc.) into the series of atomic mutation events +that the ORM requires. + +The targeted decorators are used internally for automatic instrumentation of +entity collection classes. Every collection class goes through a +transformation process roughly like so: + +1. If the class is a built-in, substitute a trivial sub-class +2. Is this class already instrumented? +3. Add in generic decorators +4. Sniff out the collection interface through duck-typing +5. Add targeted decoration to any undecorated interface method + +This process modifies the class at runtime, decorating methods and adding some +bookkeeping properties. This isn't possible (or desirable) for built-in +classes like ``list``, so trivial sub-classes are substituted to hold +decoration:: + + class InstrumentedList(list): + pass + +Collection classes can be specified in ``relationship(collection_class=)`` as +types or a function that returns an instance. Collection classes are +inspected and instrumented during the mapper compilation phase. The +collection_class callable will be executed once to produce a specimen +instance, and the type of that specimen will be instrumented. Functions that +return built-in types like ``lists`` will be adapted to produce instrumented +instances. + +When extending a known type like ``list``, additional decorations are not +generally not needed. Odds are, the extension method will delegate to a +method that's already instrumented. For example:: + + class QueueIsh(list): + def push(self, item): + self.append(item) + def shift(self): + return self.pop(0) + +There's no need to decorate these methods. ``append`` and ``pop`` are already +instrumented as part of the ``list`` interface. Decorating them would fire +duplicate events, which should be avoided. + +The targeted decoration tries not to rely on other methods in the underlying +collection class, but some are unavoidable. Many depend on 'read' methods +being present to properly instrument a 'write', for example, ``__setitem__`` +needs ``__getitem__``. "Bulk" methods like ``update`` and ``extend`` may also +reimplemented in terms of atomic appends and removes, so the ``extend`` +decoration will actually perform many ``append`` operations and not call the +underlying method at all. + +Tight control over bulk operation and the firing of events is also possible by +implementing the instrumentation internally in your methods. The basic +instrumentation package works under the general assumption that collection +mutation will not raise unusual exceptions. If you want to closely +orchestrate append and remove events with exception management, internal +instrumentation may be the answer. Within your method, +``collection_adapter(self)`` will retrieve an object that you can use for +explicit control over triggering append and remove events. + +The owning object and :class:`.CollectionAttributeImpl` are also reachable +through the adapter, allowing for some very sophisticated behavior. + +""" + +import inspect +import operator +import weakref + +from ..sql import expression +from .. import util, exc as sa_exc +from . import base + +from sqlalchemy.util.compat import inspect_getargspec + +__all__ = ['collection', 'collection_adapter', + 'mapped_collection', 'column_mapped_collection', + 'attribute_mapped_collection'] + +__instrumentation_mutex = util.threading.Lock() + + +class _PlainColumnGetter(object): + """Plain column getter, stores collection of Column objects + directly. + + Serializes to a :class:`._SerializableColumnGetterV2` + which has more expensive __call__() performance + and some rare caveats. + + """ + + def __init__(self, cols): + self.cols = cols + self.composite = len(cols) > 1 + + def __reduce__(self): + return _SerializableColumnGetterV2._reduce_from_cols(self.cols) + + def _cols(self, mapper): + return self.cols + + def __call__(self, value): + state = base.instance_state(value) + m = base._state_mapper(state) + + key = [ + m._get_state_attr_by_column(state, state.dict, col) + for col in self._cols(m) + ] + + if self.composite: + return tuple(key) + else: + return key[0] + + +class _SerializableColumnGetter(object): + """Column-based getter used in version 0.7.6 only. + + Remains here for pickle compatibility with 0.7.6. + + """ + + def __init__(self, colkeys): + self.colkeys = colkeys + self.composite = len(colkeys) > 1 + + def __reduce__(self): + return _SerializableColumnGetter, (self.colkeys,) + + def __call__(self, value): + state = base.instance_state(value) + m = base._state_mapper(state) + key = [m._get_state_attr_by_column( + state, state.dict, + m.mapped_table.columns[k]) + for k in self.colkeys] + if self.composite: + return tuple(key) + else: + return key[0] + + +class _SerializableColumnGetterV2(_PlainColumnGetter): + """Updated serializable getter which deals with + multi-table mapped classes. + + Two extremely unusual cases are not supported. + Mappings which have tables across multiple metadata + objects, or which are mapped to non-Table selectables + linked across inheriting mappers may fail to function + here. + + """ + + def __init__(self, colkeys): + self.colkeys = colkeys + self.composite = len(colkeys) > 1 + + def __reduce__(self): + return self.__class__, (self.colkeys,) + + @classmethod + def _reduce_from_cols(cls, cols): + def _table_key(c): + if not isinstance(c.table, expression.TableClause): + return None + else: + return c.table.key + colkeys = [(c.key, _table_key(c)) for c in cols] + return _SerializableColumnGetterV2, (colkeys,) + + def _cols(self, mapper): + cols = [] + metadata = getattr(mapper.local_table, 'metadata', None) + for (ckey, tkey) in self.colkeys: + if tkey is None or \ + metadata is None or \ + tkey not in metadata: + cols.append(mapper.local_table.c[ckey]) + else: + cols.append(metadata.tables[tkey].c[ckey]) + return cols + + +def column_mapped_collection(mapping_spec): + """A dictionary-based collection type with column-based keying. + + Returns a :class:`.MappedCollection` factory with a keying function + generated from mapping_spec, which may be a Column or a sequence + of Columns. + + The key value must be immutable for the lifetime of the object. You + can not, for example, map on foreign key values if those key values will + change during the session, i.e. from None to a database-assigned integer + after a session flush. + + """ + cols = [expression._only_column_elements(q, "mapping_spec") + for q in util.to_list(mapping_spec) + ] + keyfunc = _PlainColumnGetter(cols) + return lambda: MappedCollection(keyfunc) + + +class _SerializableAttrGetter(object): + def __init__(self, name): + self.name = name + self.getter = operator.attrgetter(name) + + def __call__(self, target): + return self.getter(target) + + def __reduce__(self): + return _SerializableAttrGetter, (self.name, ) + + +def attribute_mapped_collection(attr_name): + """A dictionary-based collection type with attribute-based keying. + + Returns a :class:`.MappedCollection` factory with a keying based on the + 'attr_name' attribute of entities in the collection, where ``attr_name`` + is the string name of the attribute. + + The key value must be immutable for the lifetime of the object. You + can not, for example, map on foreign key values if those key values will + change during the session, i.e. from None to a database-assigned integer + after a session flush. + + """ + getter = _SerializableAttrGetter(attr_name) + return lambda: MappedCollection(getter) + + +def mapped_collection(keyfunc): + """A dictionary-based collection type with arbitrary keying. + + Returns a :class:`.MappedCollection` factory with a keying function + generated from keyfunc, a callable that takes an entity and returns a + key value. + + The key value must be immutable for the lifetime of the object. You + can not, for example, map on foreign key values if those key values will + change during the session, i.e. from None to a database-assigned integer + after a session flush. + + """ + return lambda: MappedCollection(keyfunc) + + +class collection(object): + """Decorators for entity collection classes. + + The decorators fall into two groups: annotations and interception recipes. + + The annotating decorators (appender, remover, iterator, linker, converter, + internally_instrumented) indicate the method's purpose and take no + arguments. They are not written with parens:: + + @collection.appender + def append(self, append): ... + + The recipe decorators all require parens, even those that take no + arguments:: + + @collection.adds('entity') + def insert(self, position, entity): ... + + @collection.removes_return() + def popitem(self): ... + + """ + # Bundled as a class solely for ease of use: packaging, doc strings, + # importability. + + @staticmethod + def appender(fn): + """Tag the method as the collection appender. + + The appender method is called with one positional argument: the value + to append. The method will be automatically decorated with 'adds(1)' + if not already decorated:: + + @collection.appender + def add(self, append): ... + + # or, equivalently + @collection.appender + @collection.adds(1) + def add(self, append): ... + + # for mapping type, an 'append' may kick out a previous value + # that occupies that slot. consider d['a'] = 'foo'- any previous + # value in d['a'] is discarded. + @collection.appender + @collection.replaces(1) + def add(self, entity): + key = some_key_func(entity) + previous = None + if key in self: + previous = self[key] + self[key] = entity + return previous + + If the value to append is not allowed in the collection, you may + raise an exception. Something to remember is that the appender + will be called for each object mapped by a database query. If the + database contains rows that violate your collection semantics, you + will need to get creative to fix the problem, as access via the + collection will not work. + + If the appender method is internally instrumented, you must also + receive the keyword argument '_sa_initiator' and ensure its + promulgation to collection events. + + """ + fn._sa_instrument_role = 'appender' + return fn + + @staticmethod + def remover(fn): + """Tag the method as the collection remover. + + The remover method is called with one positional argument: the value + to remove. The method will be automatically decorated with + :meth:`removes_return` if not already decorated:: + + @collection.remover + def zap(self, entity): ... + + # or, equivalently + @collection.remover + @collection.removes_return() + def zap(self, ): ... + + If the value to remove is not present in the collection, you may + raise an exception or return None to ignore the error. + + If the remove method is internally instrumented, you must also + receive the keyword argument '_sa_initiator' and ensure its + promulgation to collection events. + + """ + fn._sa_instrument_role = 'remover' + return fn + + @staticmethod + def iterator(fn): + """Tag the method as the collection remover. + + The iterator method is called with no arguments. It is expected to + return an iterator over all collection members:: + + @collection.iterator + def __iter__(self): ... + + """ + fn._sa_instrument_role = 'iterator' + return fn + + @staticmethod + def internally_instrumented(fn): + """Tag the method as instrumented. + + This tag will prevent any decoration from being applied to the + method. Use this if you are orchestrating your own calls to + :func:`.collection_adapter` in one of the basic SQLAlchemy + interface methods, or to prevent an automatic ABC method + decoration from wrapping your implementation:: + + # normally an 'extend' method on a list-like class would be + # automatically intercepted and re-implemented in terms of + # SQLAlchemy events and append(). your implementation will + # never be called, unless: + @collection.internally_instrumented + def extend(self, items): ... + + """ + fn._sa_instrumented = True + return fn + + @staticmethod + def linker(fn): + """Tag the method as a "linked to attribute" event handler. + + This optional event handler will be called when the collection class + is linked to or unlinked from the InstrumentedAttribute. It is + invoked immediately after the '_sa_adapter' property is set on + the instance. A single argument is passed: the collection adapter + that has been linked, or None if unlinking. + + .. deprecated:: 1.0.0 - the :meth:`.collection.linker` handler + is superseded by the :meth:`.AttributeEvents.init_collection` + and :meth:`.AttributeEvents.dispose_collection` handlers. + + """ + fn._sa_instrument_role = 'linker' + return fn + + link = linker + """deprecated; synonym for :meth:`.collection.linker`.""" + + @staticmethod + def converter(fn): + """Tag the method as the collection converter. + + This optional method will be called when a collection is being + replaced entirely, as in:: + + myobj.acollection = [newvalue1, newvalue2] + + The converter method will receive the object being assigned and should + return an iterable of values suitable for use by the ``appender`` + method. A converter must not assign values or mutate the collection, + its sole job is to adapt the value the user provides into an iterable + of values for the ORM's use. + + The default converter implementation will use duck-typing to do the + conversion. A dict-like collection will be convert into an iterable + of dictionary values, and other types will simply be iterated:: + + @collection.converter + def convert(self, other): ... + + If the duck-typing of the object does not match the type of this + collection, a TypeError is raised. + + Supply an implementation of this method if you want to expand the + range of possible types that can be assigned in bulk or perform + validation on the values about to be assigned. + + """ + fn._sa_instrument_role = 'converter' + return fn + + @staticmethod + def adds(arg): + """Mark the method as adding an entity to the collection. + + Adds "add to collection" handling to the method. The decorator + argument indicates which method argument holds the SQLAlchemy-relevant + value. Arguments can be specified positionally (i.e. integer) or by + name:: + + @collection.adds(1) + def push(self, item): ... + + @collection.adds('entity') + def do_stuff(self, thing, entity=None): ... + + """ + def decorator(fn): + fn._sa_instrument_before = ('fire_append_event', arg) + return fn + return decorator + + @staticmethod + def replaces(arg): + """Mark the method as replacing an entity in the collection. + + Adds "add to collection" and "remove from collection" handling to + the method. The decorator argument indicates which method argument + holds the SQLAlchemy-relevant value to be added, and return value, if + any will be considered the value to remove. + + Arguments can be specified positionally (i.e. integer) or by name:: + + @collection.replaces(2) + def __setitem__(self, index, item): ... + + """ + def decorator(fn): + fn._sa_instrument_before = ('fire_append_event', arg) + fn._sa_instrument_after = 'fire_remove_event' + return fn + return decorator + + @staticmethod + def removes(arg): + """Mark the method as removing an entity in the collection. + + Adds "remove from collection" handling to the method. The decorator + argument indicates which method argument holds the SQLAlchemy-relevant + value to be removed. Arguments can be specified positionally (i.e. + integer) or by name:: + + @collection.removes(1) + def zap(self, item): ... + + For methods where the value to remove is not known at call-time, use + collection.removes_return. + + """ + def decorator(fn): + fn._sa_instrument_before = ('fire_remove_event', arg) + return fn + return decorator + + @staticmethod + def removes_return(): + """Mark the method as removing an entity in the collection. + + Adds "remove from collection" handling to the method. The return + value of the method, if any, is considered the value to remove. The + method arguments are not inspected:: + + @collection.removes_return() + def pop(self): ... + + For methods where the value to remove is known at call-time, use + collection.remove. + + """ + def decorator(fn): + fn._sa_instrument_after = 'fire_remove_event' + return fn + return decorator + + +collection_adapter = operator.attrgetter('_sa_adapter') +"""Fetch the :class:`.CollectionAdapter` for a collection.""" + + +class CollectionAdapter(object): + """Bridges between the ORM and arbitrary Python collections. + + Proxies base-level collection operations (append, remove, iterate) + to the underlying Python collection, and emits add/remove events for + entities entering or leaving the collection. + + The ORM uses :class:`.CollectionAdapter` exclusively for interaction with + entity collections. + + + """ + invalidated = False + + def __init__(self, attr, owner_state, data): + self._key = attr.key + self._data = weakref.ref(data) + self.owner_state = owner_state + data._sa_adapter = self + + def _warn_invalidated(self): + util.warn("This collection has been invalidated.") + + @property + def data(self): + "The entity collection being adapted." + return self._data() + + @property + def _referenced_by_owner(self): + """return True if the owner state still refers to this collection. + + This will return False within a bulk replace operation, + where this collection is the one being replaced. + + """ + return self.owner_state.dict[self._key] is self._data() + + @util.memoized_property + def attr(self): + return self.owner_state.manager[self._key].impl + + def adapt_like_to_iterable(self, obj): + """Converts collection-compatible objects to an iterable of values. + + Can be passed any type of object, and if the underlying collection + determines that it can be adapted into a stream of values it can + use, returns an iterable of values suitable for append()ing. + + This method may raise TypeError or any other suitable exception + if adaptation fails. + + If a converter implementation is not supplied on the collection, + a default duck-typing-based implementation is used. + + """ + converter = self._data()._sa_converter + if converter is not None: + return converter(obj) + + setting_type = util.duck_type_collection(obj) + receiving_type = util.duck_type_collection(self._data()) + + if obj is None or setting_type != receiving_type: + given = obj is None and 'None' or obj.__class__.__name__ + if receiving_type is None: + wanted = self._data().__class__.__name__ + else: + wanted = receiving_type.__name__ + + raise TypeError( + "Incompatible collection type: %s is not %s-like" % ( + given, wanted)) + + # If the object is an adapted collection, return the (iterable) + # adapter. + if getattr(obj, '_sa_adapter', None) is not None: + return obj._sa_adapter + elif setting_type == dict: + if util.py3k: + return obj.values() + else: + return getattr(obj, 'itervalues', obj.values)() + else: + return iter(obj) + + def append_with_event(self, item, initiator=None): + """Add an entity to the collection, firing mutation events.""" + + self._data()._sa_appender(item, _sa_initiator=initiator) + + def append_without_event(self, item): + """Add or restore an entity to the collection, firing no events.""" + self._data()._sa_appender(item, _sa_initiator=False) + + def append_multiple_without_event(self, items): + """Add or restore an entity to the collection, firing no events.""" + appender = self._data()._sa_appender + for item in items: + appender(item, _sa_initiator=False) + + def remove_with_event(self, item, initiator=None): + """Remove an entity from the collection, firing mutation events.""" + self._data()._sa_remover(item, _sa_initiator=initiator) + + def remove_without_event(self, item): + """Remove an entity from the collection, firing no events.""" + self._data()._sa_remover(item, _sa_initiator=False) + + def clear_with_event(self, initiator=None): + """Empty the collection, firing a mutation event for each entity.""" + + remover = self._data()._sa_remover + for item in list(self): + remover(item, _sa_initiator=initiator) + + def clear_without_event(self): + """Empty the collection, firing no events.""" + + remover = self._data()._sa_remover + for item in list(self): + remover(item, _sa_initiator=False) + + def __iter__(self): + """Iterate over entities in the collection.""" + + return iter(self._data()._sa_iterator()) + + def __len__(self): + """Count entities in the collection.""" + return len(list(self._data()._sa_iterator())) + + def __bool__(self): + return True + + __nonzero__ = __bool__ + + def fire_append_event(self, item, initiator=None): + """Notify that a entity has entered the collection. + + Initiator is a token owned by the InstrumentedAttribute that + initiated the membership mutation, and should be left as None + unless you are passing along an initiator value from a chained + operation. + + """ + if initiator is not False: + if self.invalidated: + self._warn_invalidated() + return self.attr.fire_append_event( + self.owner_state, + self.owner_state.dict, + item, initiator) + else: + return item + + def fire_remove_event(self, item, initiator=None): + """Notify that a entity has been removed from the collection. + + Initiator is the InstrumentedAttribute that initiated the membership + mutation, and should be left as None unless you are passing along + an initiator value from a chained operation. + + """ + if initiator is not False: + if self.invalidated: + self._warn_invalidated() + self.attr.fire_remove_event( + self.owner_state, + self.owner_state.dict, + item, initiator) + + def fire_pre_remove_event(self, initiator=None): + """Notify that an entity is about to be removed from the collection. + + Only called if the entity cannot be removed after calling + fire_remove_event(). + + """ + if self.invalidated: + self._warn_invalidated() + self.attr.fire_pre_remove_event( + self.owner_state, + self.owner_state.dict, + initiator=initiator) + + def __getstate__(self): + return {'key': self._key, + 'owner_state': self.owner_state, + 'data': self.data} + + def __setstate__(self, d): + self._key = d['key'] + self.owner_state = d['owner_state'] + self._data = weakref.ref(d['data']) + + +def bulk_replace(values, existing_adapter, new_adapter): + """Load a new collection, firing events based on prior like membership. + + Appends instances in ``values`` onto the ``new_adapter``. Events will be + fired for any instance not present in the ``existing_adapter``. Any + instances in ``existing_adapter`` not present in ``values`` will have + remove events fired upon them. + + :param values: An iterable of collection member instances + + :param existing_adapter: A :class:`.CollectionAdapter` of + instances to be replaced + + :param new_adapter: An empty :class:`.CollectionAdapter` + to load with ``values`` + + + """ + if not isinstance(values, list): + values = list(values) + + idset = util.IdentitySet + existing_idset = idset(existing_adapter or ()) + constants = existing_idset.intersection(values or ()) + additions = idset(values or ()).difference(constants) + removals = existing_idset.difference(constants) + + for member in values or (): + if member in additions: + new_adapter.append_with_event(member) + elif member in constants: + new_adapter.append_without_event(member) + + if existing_adapter: + for member in removals: + existing_adapter.remove_with_event(member) + + +def prepare_instrumentation(factory): + """Prepare a callable for future use as a collection class factory. + + Given a collection class factory (either a type or no-arg callable), + return another factory that will produce compatible instances when + called. + + This function is responsible for converting collection_class=list + into the run-time behavior of collection_class=InstrumentedList. + + """ + # Convert a builtin to 'Instrumented*' + if factory in __canned_instrumentation: + factory = __canned_instrumentation[factory] + + # Create a specimen + cls = type(factory()) + + # Did factory callable return a builtin? + if cls in __canned_instrumentation: + # Wrap it so that it returns our 'Instrumented*' + factory = __converting_factory(cls, factory) + cls = factory() + + # Instrument the class if needed. + if __instrumentation_mutex.acquire(): + try: + if getattr(cls, '_sa_instrumented', None) != id(cls): + _instrument_class(cls) + finally: + __instrumentation_mutex.release() + + return factory + + +def __converting_factory(specimen_cls, original_factory): + """Return a wrapper that converts a "canned" collection like + set, dict, list into the Instrumented* version. + + """ + + instrumented_cls = __canned_instrumentation[specimen_cls] + + def wrapper(): + collection = original_factory() + return instrumented_cls(collection) + + # often flawed but better than nothing + wrapper.__name__ = "%sWrapper" % original_factory.__name__ + wrapper.__doc__ = original_factory.__doc__ + + return wrapper + + +def _instrument_class(cls): + """Modify methods in a class and install instrumentation.""" + + # In the normal call flow, a request for any of the 3 basic collection + # types is transformed into one of our trivial subclasses + # (e.g. InstrumentedList). Catch anything else that sneaks in here... + if cls.__module__ == '__builtin__': + raise sa_exc.ArgumentError( + "Can not instrument a built-in type. Use a " + "subclass, even a trivial one.") + + roles, methods = _locate_roles_and_methods(cls) + + _setup_canned_roles(cls, roles, methods) + + _assert_required_roles(cls, roles, methods) + + _set_collection_attributes(cls, roles, methods) + + +def _locate_roles_and_methods(cls): + """search for _sa_instrument_role-decorated methods in + method resolution order, assign to roles. + + """ + + roles = {} + methods = {} + + for supercls in cls.__mro__: + for name, method in vars(supercls).items(): + if not util.callable(method): + continue + + # note role declarations + if hasattr(method, '_sa_instrument_role'): + role = method._sa_instrument_role + assert role in ('appender', 'remover', 'iterator', + 'linker', 'converter') + roles.setdefault(role, name) + + # transfer instrumentation requests from decorated function + # to the combined queue + before, after = None, None + if hasattr(method, '_sa_instrument_before'): + op, argument = method._sa_instrument_before + assert op in ('fire_append_event', 'fire_remove_event') + before = op, argument + if hasattr(method, '_sa_instrument_after'): + op = method._sa_instrument_after + assert op in ('fire_append_event', 'fire_remove_event') + after = op + if before: + methods[name] = before + (after, ) + elif after: + methods[name] = None, None, after + return roles, methods + + +def _setup_canned_roles(cls, roles, methods): + """see if this class has "canned" roles based on a known + collection type (dict, set, list). Apply those roles + as needed to the "roles" dictionary, and also + prepare "decorator" methods + + """ + collection_type = util.duck_type_collection(cls) + if collection_type in __interfaces: + canned_roles, decorators = __interfaces[collection_type] + for role, name in canned_roles.items(): + roles.setdefault(role, name) + + # apply ABC auto-decoration to methods that need it + for method, decorator in decorators.items(): + fn = getattr(cls, method, None) + if (fn and method not in methods and + not hasattr(fn, '_sa_instrumented')): + setattr(cls, method, decorator(fn)) + + +def _assert_required_roles(cls, roles, methods): + """ensure all roles are present, and apply implicit instrumentation if + needed + + """ + if 'appender' not in roles or not hasattr(cls, roles['appender']): + raise sa_exc.ArgumentError( + "Type %s must elect an appender method to be " + "a collection class" % cls.__name__) + elif (roles['appender'] not in methods and + not hasattr(getattr(cls, roles['appender']), '_sa_instrumented')): + methods[roles['appender']] = ('fire_append_event', 1, None) + + if 'remover' not in roles or not hasattr(cls, roles['remover']): + raise sa_exc.ArgumentError( + "Type %s must elect a remover method to be " + "a collection class" % cls.__name__) + elif (roles['remover'] not in methods and + not hasattr(getattr(cls, roles['remover']), '_sa_instrumented')): + methods[roles['remover']] = ('fire_remove_event', 1, None) + + if 'iterator' not in roles or not hasattr(cls, roles['iterator']): + raise sa_exc.ArgumentError( + "Type %s must elect an iterator method to be " + "a collection class" % cls.__name__) + + +def _set_collection_attributes(cls, roles, methods): + """apply ad-hoc instrumentation from decorators, class-level defaults + and implicit role declarations + + """ + for method_name, (before, argument, after) in methods.items(): + setattr(cls, method_name, + _instrument_membership_mutator(getattr(cls, method_name), + before, argument, after)) + # intern the role map + for role, method_name in roles.items(): + setattr(cls, '_sa_%s' % role, getattr(cls, method_name)) + + cls._sa_adapter = None + + if not hasattr(cls, '_sa_converter'): + cls._sa_converter = None + cls._sa_instrumented = id(cls) + + +def _instrument_membership_mutator(method, before, argument, after): + """Route method args and/or return value through the collection + adapter.""" + # This isn't smart enough to handle @adds(1) for 'def fn(self, (a, b))' + if before: + fn_args = list(util.flatten_iterator(inspect_getargspec(method)[0])) + if isinstance(argument, int): + pos_arg = argument + named_arg = len(fn_args) > argument and fn_args[argument] or None + else: + if argument in fn_args: + pos_arg = fn_args.index(argument) + else: + pos_arg = None + named_arg = argument + del fn_args + + def wrapper(*args, **kw): + if before: + if pos_arg is None: + if named_arg not in kw: + raise sa_exc.ArgumentError( + "Missing argument %s" % argument) + value = kw[named_arg] + else: + if len(args) > pos_arg: + value = args[pos_arg] + elif named_arg in kw: + value = kw[named_arg] + else: + raise sa_exc.ArgumentError( + "Missing argument %s" % argument) + + initiator = kw.pop('_sa_initiator', None) + if initiator is False: + executor = None + else: + executor = args[0]._sa_adapter + + if before and executor: + getattr(executor, before)(value, initiator) + + if not after or not executor: + return method(*args, **kw) + else: + res = method(*args, **kw) + if res is not None: + getattr(executor, after)(res, initiator) + return res + + wrapper._sa_instrumented = True + if hasattr(method, "_sa_instrument_role"): + wrapper._sa_instrument_role = method._sa_instrument_role + wrapper.__name__ = method.__name__ + wrapper.__doc__ = method.__doc__ + return wrapper + + +def __set(collection, item, _sa_initiator=None): + """Run set events, may eventually be inlined into decorators.""" + + if _sa_initiator is not False: + executor = collection._sa_adapter + if executor: + item = executor.fire_append_event(item, _sa_initiator) + return item + + +def __del(collection, item, _sa_initiator=None): + """Run del events, may eventually be inlined into decorators.""" + if _sa_initiator is not False: + executor = collection._sa_adapter + if executor: + executor.fire_remove_event(item, _sa_initiator) + + +def __before_delete(collection, _sa_initiator=None): + """Special method to run 'commit existing value' methods""" + executor = collection._sa_adapter + if executor: + executor.fire_pre_remove_event(_sa_initiator) + + +def _list_decorators(): + """Tailored instrumentation wrappers for any list-like class.""" + + def _tidy(fn): + fn._sa_instrumented = True + fn.__doc__ = getattr(list, fn.__name__).__doc__ + + def append(fn): + def append(self, item, _sa_initiator=None): + item = __set(self, item, _sa_initiator) + fn(self, item) + _tidy(append) + return append + + def remove(fn): + def remove(self, value, _sa_initiator=None): + __before_delete(self, _sa_initiator) + # testlib.pragma exempt:__eq__ + fn(self, value) + __del(self, value, _sa_initiator) + _tidy(remove) + return remove + + def insert(fn): + def insert(self, index, value): + value = __set(self, value) + fn(self, index, value) + _tidy(insert) + return insert + + def __setitem__(fn): + def __setitem__(self, index, value): + if not isinstance(index, slice): + existing = self[index] + if existing is not None: + __del(self, existing) + value = __set(self, value) + fn(self, index, value) + else: + # slice assignment requires __delitem__, insert, __len__ + step = index.step or 1 + start = index.start or 0 + if start < 0: + start += len(self) + if index.stop is not None: + stop = index.stop + else: + stop = len(self) + if stop < 0: + stop += len(self) + + if step == 1: + for i in range(start, stop, step): + if len(self) > start: + del self[start] + + for i, item in enumerate(value): + self.insert(i + start, item) + else: + rng = list(range(start, stop, step)) + if len(value) != len(rng): + raise ValueError( + "attempt to assign sequence of size %s to " + "extended slice of size %s" % (len(value), + len(rng))) + for i, item in zip(rng, value): + self.__setitem__(i, item) + _tidy(__setitem__) + return __setitem__ + + def __delitem__(fn): + def __delitem__(self, index): + if not isinstance(index, slice): + item = self[index] + __del(self, item) + fn(self, index) + else: + # slice deletion requires __getslice__ and a slice-groking + # __getitem__ for stepped deletion + # note: not breaking this into atomic dels + for item in self[index]: + __del(self, item) + fn(self, index) + _tidy(__delitem__) + return __delitem__ + + if util.py2k: + def __setslice__(fn): + def __setslice__(self, start, end, values): + for value in self[start:end]: + __del(self, value) + values = [__set(self, value) for value in values] + fn(self, start, end, values) + _tidy(__setslice__) + return __setslice__ + + def __delslice__(fn): + def __delslice__(self, start, end): + for value in self[start:end]: + __del(self, value) + fn(self, start, end) + _tidy(__delslice__) + return __delslice__ + + def extend(fn): + def extend(self, iterable): + for value in iterable: + self.append(value) + _tidy(extend) + return extend + + def __iadd__(fn): + def __iadd__(self, iterable): + # list.__iadd__ takes any iterable and seems to let TypeError + # raise as-is instead of returning NotImplemented + for value in iterable: + self.append(value) + return self + _tidy(__iadd__) + return __iadd__ + + def pop(fn): + def pop(self, index=-1): + __before_delete(self) + item = fn(self, index) + __del(self, item) + return item + _tidy(pop) + return pop + + if not util.py2k: + def clear(fn): + def clear(self, index=-1): + for item in self: + __del(self, item) + fn(self) + _tidy(clear) + return clear + + # __imul__ : not wrapping this. all members of the collection are already + # present, so no need to fire appends... wrapping it with an explicit + # decorator is still possible, so events on *= can be had if they're + # desired. hard to imagine a use case for __imul__, though. + + l = locals().copy() + l.pop('_tidy') + return l + + +def _dict_decorators(): + """Tailored instrumentation wrappers for any dict-like mapping class.""" + + def _tidy(fn): + fn._sa_instrumented = True + fn.__doc__ = getattr(dict, fn.__name__).__doc__ + + Unspecified = util.symbol('Unspecified') + + def __setitem__(fn): + def __setitem__(self, key, value, _sa_initiator=None): + if key in self: + __del(self, self[key], _sa_initiator) + value = __set(self, value, _sa_initiator) + fn(self, key, value) + _tidy(__setitem__) + return __setitem__ + + def __delitem__(fn): + def __delitem__(self, key, _sa_initiator=None): + if key in self: + __del(self, self[key], _sa_initiator) + fn(self, key) + _tidy(__delitem__) + return __delitem__ + + def clear(fn): + def clear(self): + for key in self: + __del(self, self[key]) + fn(self) + _tidy(clear) + return clear + + def pop(fn): + def pop(self, key, default=Unspecified): + if key in self: + __del(self, self[key]) + if default is Unspecified: + return fn(self, key) + else: + return fn(self, key, default) + _tidy(pop) + return pop + + def popitem(fn): + def popitem(self): + __before_delete(self) + item = fn(self) + __del(self, item[1]) + return item + _tidy(popitem) + return popitem + + def setdefault(fn): + def setdefault(self, key, default=None): + if key not in self: + self.__setitem__(key, default) + return default + else: + return self.__getitem__(key) + _tidy(setdefault) + return setdefault + + def update(fn): + def update(self, __other=Unspecified, **kw): + if __other is not Unspecified: + if hasattr(__other, 'keys'): + for key in list(__other): + if (key not in self or + self[key] is not __other[key]): + self[key] = __other[key] + else: + for key, value in __other: + if key not in self or self[key] is not value: + self[key] = value + for key in kw: + if key not in self or self[key] is not kw[key]: + self[key] = kw[key] + _tidy(update) + return update + + l = locals().copy() + l.pop('_tidy') + l.pop('Unspecified') + return l + +_set_binop_bases = (set, frozenset) + + +def _set_binops_check_strict(self, obj): + """Allow only set, frozenset and self.__class__-derived + objects in binops.""" + return isinstance(obj, _set_binop_bases + (self.__class__,)) + + +def _set_binops_check_loose(self, obj): + """Allow anything set-like to participate in set binops.""" + return (isinstance(obj, _set_binop_bases + (self.__class__,)) or + util.duck_type_collection(obj) == set) + + +def _set_decorators(): + """Tailored instrumentation wrappers for any set-like class.""" + + def _tidy(fn): + fn._sa_instrumented = True + fn.__doc__ = getattr(set, fn.__name__).__doc__ + + Unspecified = util.symbol('Unspecified') + + def add(fn): + def add(self, value, _sa_initiator=None): + if value not in self: + value = __set(self, value, _sa_initiator) + # testlib.pragma exempt:__hash__ + fn(self, value) + _tidy(add) + return add + + def discard(fn): + def discard(self, value, _sa_initiator=None): + # testlib.pragma exempt:__hash__ + if value in self: + __del(self, value, _sa_initiator) + # testlib.pragma exempt:__hash__ + fn(self, value) + _tidy(discard) + return discard + + def remove(fn): + def remove(self, value, _sa_initiator=None): + # testlib.pragma exempt:__hash__ + if value in self: + __del(self, value, _sa_initiator) + # testlib.pragma exempt:__hash__ + fn(self, value) + _tidy(remove) + return remove + + def pop(fn): + def pop(self): + __before_delete(self) + item = fn(self) + __del(self, item) + return item + _tidy(pop) + return pop + + def clear(fn): + def clear(self): + for item in list(self): + self.remove(item) + _tidy(clear) + return clear + + def update(fn): + def update(self, value): + for item in value: + self.add(item) + _tidy(update) + return update + + def __ior__(fn): + def __ior__(self, value): + if not _set_binops_check_strict(self, value): + return NotImplemented + for item in value: + self.add(item) + return self + _tidy(__ior__) + return __ior__ + + def difference_update(fn): + def difference_update(self, value): + for item in value: + self.discard(item) + _tidy(difference_update) + return difference_update + + def __isub__(fn): + def __isub__(self, value): + if not _set_binops_check_strict(self, value): + return NotImplemented + for item in value: + self.discard(item) + return self + _tidy(__isub__) + return __isub__ + + def intersection_update(fn): + def intersection_update(self, other): + want, have = self.intersection(other), set(self) + remove, add = have - want, want - have + + for item in remove: + self.remove(item) + for item in add: + self.add(item) + _tidy(intersection_update) + return intersection_update + + def __iand__(fn): + def __iand__(self, other): + if not _set_binops_check_strict(self, other): + return NotImplemented + want, have = self.intersection(other), set(self) + remove, add = have - want, want - have + + for item in remove: + self.remove(item) + for item in add: + self.add(item) + return self + _tidy(__iand__) + return __iand__ + + def symmetric_difference_update(fn): + def symmetric_difference_update(self, other): + want, have = self.symmetric_difference(other), set(self) + remove, add = have - want, want - have + + for item in remove: + self.remove(item) + for item in add: + self.add(item) + _tidy(symmetric_difference_update) + return symmetric_difference_update + + def __ixor__(fn): + def __ixor__(self, other): + if not _set_binops_check_strict(self, other): + return NotImplemented + want, have = self.symmetric_difference(other), set(self) + remove, add = have - want, want - have + + for item in remove: + self.remove(item) + for item in add: + self.add(item) + return self + _tidy(__ixor__) + return __ixor__ + + l = locals().copy() + l.pop('_tidy') + l.pop('Unspecified') + return l + + +class InstrumentedList(list): + """An instrumented version of the built-in list.""" + + +class InstrumentedSet(set): + """An instrumented version of the built-in set.""" + + +class InstrumentedDict(dict): + """An instrumented version of the built-in dict.""" + + +__canned_instrumentation = { + list: InstrumentedList, + set: InstrumentedSet, + dict: InstrumentedDict, +} + +__interfaces = { + list: ( + {'appender': 'append', 'remover': 'remove', + 'iterator': '__iter__'}, _list_decorators() + ), + + set: ({'appender': 'add', + 'remover': 'remove', + 'iterator': '__iter__'}, _set_decorators() + ), + + # decorators are required for dicts and object collections. + dict: ({'iterator': 'values'}, _dict_decorators()) if util.py3k + else ({'iterator': 'itervalues'}, _dict_decorators()), +} + + +class MappedCollection(dict): + """A basic dictionary-based collection class. + + Extends dict with the minimal bag semantics that collection + classes require. ``set`` and ``remove`` are implemented in terms + of a keying function: any callable that takes an object and + returns an object for use as a dictionary key. + + """ + + def __init__(self, keyfunc): + """Create a new collection with keying provided by keyfunc. + + keyfunc may be any callable that takes an object and returns an object + for use as a dictionary key. + + The keyfunc will be called every time the ORM needs to add a member by + value-only (such as when loading instances from the database) or + remove a member. The usual cautions about dictionary keying apply- + ``keyfunc(object)`` should return the same output for the life of the + collection. Keying based on mutable properties can result in + unreachable instances "lost" in the collection. + + """ + self.keyfunc = keyfunc + + @collection.appender + @collection.internally_instrumented + def set(self, value, _sa_initiator=None): + """Add an item by value, consulting the keyfunc for the key.""" + + key = self.keyfunc(value) + self.__setitem__(key, value, _sa_initiator) + + @collection.remover + @collection.internally_instrumented + def remove(self, value, _sa_initiator=None): + """Remove an item by value, consulting the keyfunc for the key.""" + + key = self.keyfunc(value) + # Let self[key] raise if key is not in this collection + # testlib.pragma exempt:__ne__ + if self[key] != value: + raise sa_exc.InvalidRequestError( + "Can not remove '%s': collection holds '%s' for key '%s'. " + "Possible cause: is the MappedCollection key function " + "based on mutable properties or properties that only obtain " + "values after flush?" % + (value, self[key], key)) + self.__delitem__(key, _sa_initiator) + + @collection.converter + def _convert(self, dictlike): + """Validate and convert a dict-like object into values for set()ing. + + This is called behind the scenes when a MappedCollection is replaced + entirely by another collection, as in:: + + myobj.mappedcollection = {'a':obj1, 'b': obj2} # ... + + Raises a TypeError if the key in any (key, value) pair in the dictlike + object does not match the key that this collection's keyfunc would + have assigned for that value. + + """ + for incoming_key, value in util.dictlike_iteritems(dictlike): + new_key = self.keyfunc(value) + if incoming_key != new_key: + raise TypeError( + "Found incompatible key %r for value %r; this " + "collection's " + "keying function requires a key of %r for this value." % ( + incoming_key, value, new_key)) + yield value + +# ensure instrumentation is associated with +# these built-in classes; if a user-defined class +# subclasses these and uses @internally_instrumented, +# the superclass is otherwise not instrumented. +# see [ticket:2406]. +_instrument_class(MappedCollection) +_instrument_class(InstrumentedList) +_instrument_class(InstrumentedSet) diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/dependency.py b/lib/python3.4/site-packages/sqlalchemy/orm/dependency.py new file mode 100644 index 0000000..a3e5b12 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/orm/dependency.py @@ -0,0 +1,1175 @@ +# orm/dependency.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Relationship dependencies. + +""" + +from .. import sql, util, exc as sa_exc +from . import attributes, exc, sync, unitofwork, \ + util as mapperutil +from .interfaces import ONETOMANY, MANYTOONE, MANYTOMANY + + +class DependencyProcessor(object): + def __init__(self, prop): + self.prop = prop + self.cascade = prop.cascade + self.mapper = prop.mapper + self.parent = prop.parent + self.secondary = prop.secondary + self.direction = prop.direction + self.post_update = prop.post_update + self.passive_deletes = prop.passive_deletes + self.passive_updates = prop.passive_updates + self.enable_typechecks = prop.enable_typechecks + if self.passive_deletes: + self._passive_delete_flag = attributes.PASSIVE_NO_INITIALIZE + else: + self._passive_delete_flag = attributes.PASSIVE_OFF + if self.passive_updates: + self._passive_update_flag = attributes.PASSIVE_NO_INITIALIZE + else: + self._passive_update_flag = attributes.PASSIVE_OFF + + self.key = prop.key + if not self.prop.synchronize_pairs: + raise sa_exc.ArgumentError( + "Can't build a DependencyProcessor for relationship %s. " + "No target attributes to populate between parent and " + "child are present" % + self.prop) + + @classmethod + def from_relationship(cls, prop): + return _direction_to_processor[prop.direction](prop) + + def hasparent(self, state): + """return True if the given object instance has a parent, + according to the ``InstrumentedAttribute`` handled by this + ``DependencyProcessor``. + + """ + return self.parent.class_manager.get_impl(self.key).hasparent(state) + + def per_property_preprocessors(self, uow): + """establish actions and dependencies related to a flush. + + These actions will operate on all relevant states in + the aggregate. + + """ + uow.register_preprocessor(self, True) + + def per_property_flush_actions(self, uow): + after_save = unitofwork.ProcessAll(uow, self, False, True) + before_delete = unitofwork.ProcessAll(uow, self, True, True) + + parent_saves = unitofwork.SaveUpdateAll( + uow, + self.parent.primary_base_mapper + ) + child_saves = unitofwork.SaveUpdateAll( + uow, + self.mapper.primary_base_mapper + ) + + parent_deletes = unitofwork.DeleteAll( + uow, + self.parent.primary_base_mapper + ) + child_deletes = unitofwork.DeleteAll( + uow, + self.mapper.primary_base_mapper + ) + + self.per_property_dependencies(uow, + parent_saves, + child_saves, + parent_deletes, + child_deletes, + after_save, + before_delete + ) + + def per_state_flush_actions(self, uow, states, isdelete): + """establish actions and dependencies related to a flush. + + These actions will operate on all relevant states + individually. This occurs only if there are cycles + in the 'aggregated' version of events. + + """ + + parent_base_mapper = self.parent.primary_base_mapper + child_base_mapper = self.mapper.primary_base_mapper + child_saves = unitofwork.SaveUpdateAll(uow, child_base_mapper) + child_deletes = unitofwork.DeleteAll(uow, child_base_mapper) + + # locate and disable the aggregate processors + # for this dependency + + if isdelete: + before_delete = unitofwork.ProcessAll(uow, self, True, True) + before_delete.disabled = True + else: + after_save = unitofwork.ProcessAll(uow, self, False, True) + after_save.disabled = True + + # check if the "child" side is part of the cycle + + if child_saves not in uow.cycles: + # based on the current dependencies we use, the saves/ + # deletes should always be in the 'cycles' collection + # together. if this changes, we will have to break up + # this method a bit more. + assert child_deletes not in uow.cycles + + # child side is not part of the cycle, so we will link per-state + # actions to the aggregate "saves", "deletes" actions + child_actions = [ + (child_saves, False), (child_deletes, True) + ] + child_in_cycles = False + else: + child_in_cycles = True + + # check if the "parent" side is part of the cycle + if not isdelete: + parent_saves = unitofwork.SaveUpdateAll( + uow, + self.parent.base_mapper) + parent_deletes = before_delete = None + if parent_saves in uow.cycles: + parent_in_cycles = True + else: + parent_deletes = unitofwork.DeleteAll( + uow, + self.parent.base_mapper) + parent_saves = after_save = None + if parent_deletes in uow.cycles: + parent_in_cycles = True + + # now create actions /dependencies for each state. + + for state in states: + # detect if there's anything changed or loaded + # by a preprocessor on this state/attribute. In the + # case of deletes we may try to load missing items here as well. + sum_ = state.manager[self.key].impl.get_all_pending( + state, state.dict, + self._passive_delete_flag + if isdelete + else attributes.PASSIVE_NO_INITIALIZE) + + if not sum_: + continue + + if isdelete: + before_delete = unitofwork.ProcessState(uow, + self, True, state) + if parent_in_cycles: + parent_deletes = unitofwork.DeleteState( + uow, + state, + parent_base_mapper) + else: + after_save = unitofwork.ProcessState(uow, self, False, state) + if parent_in_cycles: + parent_saves = unitofwork.SaveUpdateState( + uow, + state, + parent_base_mapper) + + if child_in_cycles: + child_actions = [] + for child_state, child in sum_: + if child_state not in uow.states: + child_action = (None, None) + else: + (deleted, listonly) = uow.states[child_state] + if deleted: + child_action = ( + unitofwork.DeleteState( + uow, child_state, + child_base_mapper), + True) + else: + child_action = ( + unitofwork.SaveUpdateState( + uow, child_state, + child_base_mapper), + False) + child_actions.append(child_action) + + # establish dependencies between our possibly per-state + # parent action and our possibly per-state child action. + for child_action, childisdelete in child_actions: + self.per_state_dependencies(uow, parent_saves, + parent_deletes, + child_action, + after_save, before_delete, + isdelete, childisdelete) + + def presort_deletes(self, uowcommit, states): + return False + + def presort_saves(self, uowcommit, states): + return False + + def process_deletes(self, uowcommit, states): + pass + + def process_saves(self, uowcommit, states): + pass + + def prop_has_changes(self, uowcommit, states, isdelete): + if not isdelete or self.passive_deletes: + passive = attributes.PASSIVE_NO_INITIALIZE + elif self.direction is MANYTOONE: + passive = attributes.PASSIVE_NO_FETCH_RELATED + else: + passive = attributes.PASSIVE_OFF + + for s in states: + # TODO: add a high speed method + # to InstanceState which returns: attribute + # has a non-None value, or had one + history = uowcommit.get_attribute_history( + s, + self.key, + passive) + if history and not history.empty(): + return True + else: + return states and \ + not self.prop._is_self_referential and \ + self.mapper in uowcommit.mappers + + def _verify_canload(self, state): + if self.prop.uselist and state is None: + raise exc.FlushError( + "Can't flush None value found in " + "collection %s" % (self.prop, )) + elif state is not None and \ + not self.mapper._canload( + state, allow_subtypes=not self.enable_typechecks): + if self.mapper._canload(state, allow_subtypes=True): + raise exc.FlushError('Attempting to flush an item of type ' + '%(x)s as a member of collection ' + '"%(y)s". Expected an object of type ' + '%(z)s or a polymorphic subclass of ' + 'this type. If %(x)s is a subclass of ' + '%(z)s, configure mapper "%(zm)s" to ' + 'load this subtype polymorphically, or ' + 'set enable_typechecks=False to allow ' + 'any subtype to be accepted for flush. ' + % { + 'x': state.class_, + 'y': self.prop, + 'z': self.mapper.class_, + 'zm': self.mapper, + }) + else: + raise exc.FlushError( + 'Attempting to flush an item of type ' + '%(x)s as a member of collection ' + '"%(y)s". Expected an object of type ' + '%(z)s or a polymorphic subclass of ' + 'this type.' % { + 'x': state.class_, + 'y': self.prop, + 'z': self.mapper.class_, + }) + + def _synchronize(self, state, child, associationrow, + clearkeys, uowcommit): + raise NotImplementedError() + + def _get_reversed_processed_set(self, uow): + if not self.prop._reverse_property: + return None + + process_key = tuple(sorted( + [self.key] + + [p.key for p in self.prop._reverse_property] + )) + return uow.memo( + ('reverse_key', process_key), + set + ) + + def _post_update(self, state, uowcommit, related, is_m2o_delete=False): + for x in related: + if not is_m2o_delete or x is not None: + uowcommit.issue_post_update( + state, + [r for l, r in self.prop.synchronize_pairs] + ) + break + + def _pks_changed(self, uowcommit, state): + raise NotImplementedError() + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, self.prop) + + +class OneToManyDP(DependencyProcessor): + + def per_property_dependencies(self, uow, parent_saves, + child_saves, + parent_deletes, + child_deletes, + after_save, + before_delete, + ): + if self.post_update: + child_post_updates = unitofwork.IssuePostUpdate( + uow, + self.mapper.primary_base_mapper, + False) + child_pre_updates = unitofwork.IssuePostUpdate( + uow, + self.mapper.primary_base_mapper, + True) + + uow.dependencies.update([ + (child_saves, after_save), + (parent_saves, after_save), + (after_save, child_post_updates), + + (before_delete, child_pre_updates), + (child_pre_updates, parent_deletes), + (child_pre_updates, child_deletes), + + ]) + else: + uow.dependencies.update([ + (parent_saves, after_save), + (after_save, child_saves), + (after_save, child_deletes), + + (child_saves, parent_deletes), + (child_deletes, parent_deletes), + + (before_delete, child_saves), + (before_delete, child_deletes), + ]) + + def per_state_dependencies(self, uow, + save_parent, + delete_parent, + child_action, + after_save, before_delete, + isdelete, childisdelete): + + if self.post_update: + + child_post_updates = unitofwork.IssuePostUpdate( + uow, + self.mapper.primary_base_mapper, + False) + child_pre_updates = unitofwork.IssuePostUpdate( + uow, + self.mapper.primary_base_mapper, + True) + + # TODO: this whole block is not covered + # by any tests + if not isdelete: + if childisdelete: + uow.dependencies.update([ + (child_action, after_save), + (after_save, child_post_updates), + ]) + else: + uow.dependencies.update([ + (save_parent, after_save), + (child_action, after_save), + (after_save, child_post_updates), + ]) + else: + if childisdelete: + uow.dependencies.update([ + (before_delete, child_pre_updates), + (child_pre_updates, delete_parent), + ]) + else: + uow.dependencies.update([ + (before_delete, child_pre_updates), + (child_pre_updates, delete_parent), + ]) + elif not isdelete: + uow.dependencies.update([ + (save_parent, after_save), + (after_save, child_action), + (save_parent, child_action) + ]) + else: + uow.dependencies.update([ + (before_delete, child_action), + (child_action, delete_parent) + ]) + + def presort_deletes(self, uowcommit, states): + # head object is being deleted, and we manage its list of + # child objects the child objects have to have their + # foreign key to the parent set to NULL + should_null_fks = not self.cascade.delete and \ + not self.passive_deletes == 'all' + + for state in states: + history = uowcommit.get_attribute_history( + state, + self.key, + self._passive_delete_flag) + if history: + for child in history.deleted: + if child is not None and self.hasparent(child) is False: + if self.cascade.delete_orphan: + uowcommit.register_object(child, isdelete=True) + else: + uowcommit.register_object(child) + + if should_null_fks: + for child in history.unchanged: + if child is not None: + uowcommit.register_object( + child, operation="delete", prop=self.prop) + + def presort_saves(self, uowcommit, states): + children_added = uowcommit.memo(('children_added', self), set) + + for state in states: + pks_changed = self._pks_changed(uowcommit, state) + + if not pks_changed or self.passive_updates: + passive = attributes.PASSIVE_NO_INITIALIZE + else: + passive = attributes.PASSIVE_OFF + + history = uowcommit.get_attribute_history( + state, + self.key, + passive) + if history: + for child in history.added: + if child is not None: + uowcommit.register_object(child, cancel_delete=True, + operation="add", + prop=self.prop) + + children_added.update(history.added) + + for child in history.deleted: + if not self.cascade.delete_orphan: + uowcommit.register_object(child, isdelete=False, + operation='delete', + prop=self.prop) + elif self.hasparent(child) is False: + uowcommit.register_object( + child, isdelete=True, + operation="delete", prop=self.prop) + for c, m, st_, dct_ in self.mapper.cascade_iterator( + 'delete', child): + uowcommit.register_object( + st_, + isdelete=True) + + if pks_changed: + if history: + for child in history.unchanged: + if child is not None: + uowcommit.register_object( + child, + False, + self.passive_updates, + operation="pk change", + prop=self.prop) + + def process_deletes(self, uowcommit, states): + # head object is being deleted, and we manage its list of + # child objects the child objects have to have their foreign + # key to the parent set to NULL this phase can be called + # safely for any cascade but is unnecessary if delete cascade + # is on. + + if self.post_update or not self.passive_deletes == 'all': + children_added = uowcommit.memo(('children_added', self), set) + + for state in states: + history = uowcommit.get_attribute_history( + state, + self.key, + self._passive_delete_flag) + if history: + for child in history.deleted: + if child is not None and \ + self.hasparent(child) is False: + self._synchronize( + state, + child, + None, True, + uowcommit, False) + if self.post_update and child: + self._post_update(child, uowcommit, [state]) + + if self.post_update or not self.cascade.delete: + for child in set(history.unchanged).\ + difference(children_added): + if child is not None: + self._synchronize( + state, + child, + None, True, + uowcommit, False) + if self.post_update and child: + self._post_update(child, + uowcommit, + [state]) + + # technically, we can even remove each child from the + # collection here too. but this would be a somewhat + # inconsistent behavior since it wouldn't happen + # if the old parent wasn't deleted but child was moved. + + def process_saves(self, uowcommit, states): + for state in states: + history = uowcommit.get_attribute_history( + state, + self.key, + attributes.PASSIVE_NO_INITIALIZE) + if history: + for child in history.added: + self._synchronize(state, child, None, + False, uowcommit, False) + if child is not None and self.post_update: + self._post_update(child, uowcommit, [state]) + + for child in history.deleted: + if not self.cascade.delete_orphan and \ + not self.hasparent(child): + self._synchronize(state, child, None, True, + uowcommit, False) + + if self._pks_changed(uowcommit, state): + for child in history.unchanged: + self._synchronize(state, child, None, + False, uowcommit, True) + + def _synchronize(self, state, child, + associationrow, clearkeys, uowcommit, + pks_changed): + source = state + dest = child + self._verify_canload(child) + if dest is None or \ + (not self.post_update and uowcommit.is_deleted(dest)): + return + if clearkeys: + sync.clear(dest, self.mapper, self.prop.synchronize_pairs) + else: + sync.populate(source, self.parent, dest, self.mapper, + self.prop.synchronize_pairs, uowcommit, + self.passive_updates and pks_changed) + + def _pks_changed(self, uowcommit, state): + return sync.source_modified( + uowcommit, + state, + self.parent, + self.prop.synchronize_pairs) + + +class ManyToOneDP(DependencyProcessor): + def __init__(self, prop): + DependencyProcessor.__init__(self, prop) + self.mapper._dependency_processors.append(DetectKeySwitch(prop)) + + def per_property_dependencies(self, uow, + parent_saves, + child_saves, + parent_deletes, + child_deletes, + after_save, + before_delete): + + if self.post_update: + parent_post_updates = unitofwork.IssuePostUpdate( + uow, + self.parent.primary_base_mapper, + False) + parent_pre_updates = unitofwork.IssuePostUpdate( + uow, + self.parent.primary_base_mapper, + True) + + uow.dependencies.update([ + (child_saves, after_save), + (parent_saves, after_save), + (after_save, parent_post_updates), + + (after_save, parent_pre_updates), + (before_delete, parent_pre_updates), + + (parent_pre_updates, child_deletes), + ]) + else: + uow.dependencies.update([ + (child_saves, after_save), + (after_save, parent_saves), + (parent_saves, child_deletes), + (parent_deletes, child_deletes) + ]) + + def per_state_dependencies(self, uow, + save_parent, + delete_parent, + child_action, + after_save, before_delete, + isdelete, childisdelete): + + if self.post_update: + + if not isdelete: + parent_post_updates = unitofwork.IssuePostUpdate( + uow, + self.parent.primary_base_mapper, + False) + if childisdelete: + uow.dependencies.update([ + (after_save, parent_post_updates), + (parent_post_updates, child_action) + ]) + else: + uow.dependencies.update([ + (save_parent, after_save), + (child_action, after_save), + + (after_save, parent_post_updates) + ]) + else: + parent_pre_updates = unitofwork.IssuePostUpdate( + uow, + self.parent.primary_base_mapper, + True) + + uow.dependencies.update([ + (before_delete, parent_pre_updates), + (parent_pre_updates, delete_parent), + (parent_pre_updates, child_action) + ]) + + elif not isdelete: + if not childisdelete: + uow.dependencies.update([ + (child_action, after_save), + (after_save, save_parent), + ]) + else: + uow.dependencies.update([ + (after_save, save_parent), + ]) + + else: + if childisdelete: + uow.dependencies.update([ + (delete_parent, child_action) + ]) + + def presort_deletes(self, uowcommit, states): + if self.cascade.delete or self.cascade.delete_orphan: + for state in states: + history = uowcommit.get_attribute_history( + state, + self.key, + self._passive_delete_flag) + if history: + if self.cascade.delete_orphan: + todelete = history.sum() + else: + todelete = history.non_deleted() + for child in todelete: + if child is None: + continue + uowcommit.register_object( + child, isdelete=True, + operation="delete", prop=self.prop) + t = self.mapper.cascade_iterator('delete', child) + for c, m, st_, dct_ in t: + uowcommit.register_object( + st_, isdelete=True) + + def presort_saves(self, uowcommit, states): + for state in states: + uowcommit.register_object(state, operation="add", prop=self.prop) + if self.cascade.delete_orphan: + history = uowcommit.get_attribute_history( + state, + self.key, + self._passive_delete_flag) + if history: + for child in history.deleted: + if self.hasparent(child) is False: + uowcommit.register_object( + child, isdelete=True, + operation="delete", prop=self.prop) + + t = self.mapper.cascade_iterator('delete', child) + for c, m, st_, dct_ in t: + uowcommit.register_object(st_, isdelete=True) + + def process_deletes(self, uowcommit, states): + if self.post_update and \ + not self.cascade.delete_orphan and \ + not self.passive_deletes == 'all': + + # post_update means we have to update our + # row to not reference the child object + # before we can DELETE the row + for state in states: + self._synchronize(state, None, None, True, uowcommit) + if state and self.post_update: + history = uowcommit.get_attribute_history( + state, + self.key, + self._passive_delete_flag) + if history: + self._post_update( + state, uowcommit, history.sum(), + is_m2o_delete=True) + + def process_saves(self, uowcommit, states): + for state in states: + history = uowcommit.get_attribute_history( + state, + self.key, + attributes.PASSIVE_NO_INITIALIZE) + if history: + if history.added: + for child in history.added: + self._synchronize(state, child, None, False, + uowcommit, "add") + if self.post_update: + self._post_update(state, uowcommit, history.sum()) + + def _synchronize(self, state, child, associationrow, + clearkeys, uowcommit, operation=None): + if state is None or \ + (not self.post_update and uowcommit.is_deleted(state)): + return + + if operation is not None and \ + child is not None and \ + not uowcommit.session._contains_state(child): + util.warn( + "Object of type %s not in session, %s " + "operation along '%s' won't proceed" % + (mapperutil.state_class_str(child), operation, self.prop)) + return + + if clearkeys or child is None: + sync.clear(state, self.parent, self.prop.synchronize_pairs) + else: + self._verify_canload(child) + sync.populate(child, self.mapper, state, + self.parent, + self.prop.synchronize_pairs, + uowcommit, + False) + + +class DetectKeySwitch(DependencyProcessor): + """For many-to-one relationships with no one-to-many backref, + searches for parents through the unit of work when a primary + key has changed and updates them. + + Theoretically, this approach could be expanded to support transparent + deletion of objects referenced via many-to-one as well, although + the current attribute system doesn't do enough bookkeeping for this + to be efficient. + + """ + + def per_property_preprocessors(self, uow): + if self.prop._reverse_property: + if self.passive_updates: + return + else: + if False in (prop.passive_updates for + prop in self.prop._reverse_property): + return + + uow.register_preprocessor(self, False) + + def per_property_flush_actions(self, uow): + parent_saves = unitofwork.SaveUpdateAll( + uow, + self.parent.base_mapper) + after_save = unitofwork.ProcessAll(uow, self, False, False) + uow.dependencies.update([ + (parent_saves, after_save) + ]) + + def per_state_flush_actions(self, uow, states, isdelete): + pass + + def presort_deletes(self, uowcommit, states): + pass + + def presort_saves(self, uow, states): + if not self.passive_updates: + # for non-passive updates, register in the preprocess stage + # so that mapper save_obj() gets a hold of changes + self._process_key_switches(states, uow) + + def prop_has_changes(self, uow, states, isdelete): + if not isdelete and self.passive_updates: + d = self._key_switchers(uow, states) + return bool(d) + + return False + + def process_deletes(self, uowcommit, states): + assert False + + def process_saves(self, uowcommit, states): + # for passive updates, register objects in the process stage + # so that we avoid ManyToOneDP's registering the object without + # the listonly flag in its own preprocess stage (results in UPDATE) + # statements being emitted + assert self.passive_updates + self._process_key_switches(states, uowcommit) + + def _key_switchers(self, uow, states): + switched, notswitched = uow.memo( + ('pk_switchers', self), + lambda: (set(), set()) + ) + + allstates = switched.union(notswitched) + for s in states: + if s not in allstates: + if self._pks_changed(uow, s): + switched.add(s) + else: + notswitched.add(s) + return switched + + def _process_key_switches(self, deplist, uowcommit): + switchers = self._key_switchers(uowcommit, deplist) + if switchers: + # if primary key values have actually changed somewhere, perform + # a linear search through the UOW in search of a parent. + for state in uowcommit.session.identity_map.all_states(): + if not issubclass(state.class_, self.parent.class_): + continue + dict_ = state.dict + related = state.get_impl(self.key).get( + state, dict_, passive=self._passive_update_flag) + if related is not attributes.PASSIVE_NO_RESULT and \ + related is not None: + related_state = attributes.instance_state(dict_[self.key]) + if related_state in switchers: + uowcommit.register_object(state, + False, + self.passive_updates) + sync.populate( + related_state, + self.mapper, state, + self.parent, self.prop.synchronize_pairs, + uowcommit, self.passive_updates) + + def _pks_changed(self, uowcommit, state): + return bool(state.key) and sync.source_modified( + uowcommit, state, self.mapper, self.prop.synchronize_pairs) + + +class ManyToManyDP(DependencyProcessor): + + def per_property_dependencies(self, uow, parent_saves, + child_saves, + parent_deletes, + child_deletes, + after_save, + before_delete + ): + + uow.dependencies.update([ + (parent_saves, after_save), + (child_saves, after_save), + (after_save, child_deletes), + + # a rowswitch on the parent from deleted to saved + # can make this one occur, as the "save" may remove + # an element from the + # "deleted" list before we have a chance to + # process its child rows + (before_delete, parent_saves), + + (before_delete, parent_deletes), + (before_delete, child_deletes), + (before_delete, child_saves), + ]) + + def per_state_dependencies(self, uow, + save_parent, + delete_parent, + child_action, + after_save, before_delete, + isdelete, childisdelete): + if not isdelete: + if childisdelete: + uow.dependencies.update([ + (save_parent, after_save), + (after_save, child_action), + ]) + else: + uow.dependencies.update([ + (save_parent, after_save), + (child_action, after_save), + ]) + else: + uow.dependencies.update([ + (before_delete, child_action), + (before_delete, delete_parent) + ]) + + def presort_deletes(self, uowcommit, states): + # TODO: no tests fail if this whole + # thing is removed !!!! + if not self.passive_deletes: + # if no passive deletes, load history on + # the collection, so that prop_has_changes() + # returns True + for state in states: + uowcommit.get_attribute_history( + state, + self.key, + self._passive_delete_flag) + + def presort_saves(self, uowcommit, states): + if not self.passive_updates: + # if no passive updates, load history on + # each collection where parent has changed PK, + # so that prop_has_changes() returns True + for state in states: + if self._pks_changed(uowcommit, state): + history = uowcommit.get_attribute_history( + state, + self.key, + attributes.PASSIVE_OFF) + + if not self.cascade.delete_orphan: + return + + # check for child items removed from the collection + # if delete_orphan check is turned on. + for state in states: + history = uowcommit.get_attribute_history( + state, + self.key, + attributes.PASSIVE_NO_INITIALIZE) + if history: + for child in history.deleted: + if self.hasparent(child) is False: + uowcommit.register_object( + child, isdelete=True, + operation="delete", prop=self.prop) + for c, m, st_, dct_ in self.mapper.cascade_iterator( + 'delete', + child): + uowcommit.register_object( + st_, isdelete=True) + + def process_deletes(self, uowcommit, states): + secondary_delete = [] + secondary_insert = [] + secondary_update = [] + + processed = self._get_reversed_processed_set(uowcommit) + tmp = set() + for state in states: + # this history should be cached already, as + # we loaded it in preprocess_deletes + history = uowcommit.get_attribute_history( + state, + self.key, + self._passive_delete_flag) + if history: + for child in history.non_added(): + if child is None or \ + (processed is not None and + (state, child) in processed): + continue + associationrow = {} + if not self._synchronize( + state, + child, + associationrow, + False, uowcommit, "delete"): + continue + secondary_delete.append(associationrow) + + tmp.update((c, state) for c in history.non_added()) + + if processed is not None: + processed.update(tmp) + + self._run_crud(uowcommit, secondary_insert, + secondary_update, secondary_delete) + + def process_saves(self, uowcommit, states): + secondary_delete = [] + secondary_insert = [] + secondary_update = [] + + processed = self._get_reversed_processed_set(uowcommit) + tmp = set() + + for state in states: + need_cascade_pks = not self.passive_updates and \ + self._pks_changed(uowcommit, state) + if need_cascade_pks: + passive = attributes.PASSIVE_OFF + else: + passive = attributes.PASSIVE_NO_INITIALIZE + history = uowcommit.get_attribute_history(state, self.key, + passive) + if history: + for child in history.added: + if (processed is not None and + (state, child) in processed): + continue + associationrow = {} + if not self._synchronize(state, + child, + associationrow, + False, uowcommit, "add"): + continue + secondary_insert.append(associationrow) + for child in history.deleted: + if (processed is not None and + (state, child) in processed): + continue + associationrow = {} + if not self._synchronize(state, + child, + associationrow, + False, uowcommit, "delete"): + continue + secondary_delete.append(associationrow) + + tmp.update((c, state) + for c in history.added + history.deleted) + + if need_cascade_pks: + + for child in history.unchanged: + associationrow = {} + sync.update(state, + self.parent, + associationrow, + "old_", + self.prop.synchronize_pairs) + sync.update(child, + self.mapper, + associationrow, + "old_", + self.prop.secondary_synchronize_pairs) + + secondary_update.append(associationrow) + + if processed is not None: + processed.update(tmp) + + self._run_crud(uowcommit, secondary_insert, + secondary_update, secondary_delete) + + def _run_crud(self, uowcommit, secondary_insert, + secondary_update, secondary_delete): + connection = uowcommit.transaction.connection(self.mapper) + + if secondary_delete: + associationrow = secondary_delete[0] + statement = self.secondary.delete(sql.and_(*[ + c == sql.bindparam(c.key, type_=c.type) + for c in self.secondary.c + if c.key in associationrow + ])) + result = connection.execute(statement, secondary_delete) + + if result.supports_sane_multi_rowcount() and \ + result.rowcount != len(secondary_delete): + raise exc.StaleDataError( + "DELETE statement on table '%s' expected to delete " + "%d row(s); Only %d were matched." % + (self.secondary.description, len(secondary_delete), + result.rowcount) + ) + + if secondary_update: + associationrow = secondary_update[0] + statement = self.secondary.update(sql.and_(*[ + c == sql.bindparam("old_" + c.key, type_=c.type) + for c in self.secondary.c + if c.key in associationrow + ])) + result = connection.execute(statement, secondary_update) + + if result.supports_sane_multi_rowcount() and \ + result.rowcount != len(secondary_update): + raise exc.StaleDataError( + "UPDATE statement on table '%s' expected to update " + "%d row(s); Only %d were matched." % + (self.secondary.description, len(secondary_update), + result.rowcount) + ) + + if secondary_insert: + statement = self.secondary.insert() + connection.execute(statement, secondary_insert) + + def _synchronize(self, state, child, associationrow, + clearkeys, uowcommit, operation): + + # this checks for None if uselist=True + self._verify_canload(child) + + # but if uselist=False we get here. If child is None, + # no association row can be generated, so return. + if child is None: + return False + + if child is not None and not uowcommit.session._contains_state(child): + if not child.deleted: + util.warn( + "Object of type %s not in session, %s " + "operation along '%s' won't proceed" % + (mapperutil.state_class_str(child), operation, self.prop)) + return False + + sync.populate_dict(state, self.parent, associationrow, + self.prop.synchronize_pairs) + sync.populate_dict(child, self.mapper, associationrow, + self.prop.secondary_synchronize_pairs) + + return True + + def _pks_changed(self, uowcommit, state): + return sync.source_modified( + uowcommit, + state, + self.parent, + self.prop.synchronize_pairs) + +_direction_to_processor = { + ONETOMANY: OneToManyDP, + MANYTOONE: ManyToOneDP, + MANYTOMANY: ManyToManyDP, +} diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/deprecated_interfaces.py b/lib/python3.4/site-packages/sqlalchemy/orm/deprecated_interfaces.py new file mode 100644 index 0000000..6477e82 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/orm/deprecated_interfaces.py @@ -0,0 +1,487 @@ +# orm/deprecated_interfaces.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from .. import event, util +from .interfaces import EXT_CONTINUE + + +@util.langhelpers.dependency_for("sqlalchemy.orm.interfaces") +class MapperExtension(object): + """Base implementation for :class:`.Mapper` event hooks. + + .. note:: + + :class:`.MapperExtension` is deprecated. Please + refer to :func:`.event.listen` as well as + :class:`.MapperEvents`. + + New extension classes subclass :class:`.MapperExtension` and are specified + using the ``extension`` mapper() argument, which is a single + :class:`.MapperExtension` or a list of such:: + + from sqlalchemy.orm.interfaces import MapperExtension + + class MyExtension(MapperExtension): + def before_insert(self, mapper, connection, instance): + print "instance %s before insert !" % instance + + m = mapper(User, users_table, extension=MyExtension()) + + A single mapper can maintain a chain of ``MapperExtension`` + objects. When a particular mapping event occurs, the + corresponding method on each ``MapperExtension`` is invoked + serially, and each method has the ability to halt the chain + from proceeding further:: + + m = mapper(User, users_table, extension=[ext1, ext2, ext3]) + + Each ``MapperExtension`` method returns the symbol + EXT_CONTINUE by default. This symbol generally means "move + to the next ``MapperExtension`` for processing". For methods + that return objects like translated rows or new object + instances, EXT_CONTINUE means the result of the method + should be ignored. In some cases it's required for a + default mapper activity to be performed, such as adding a + new instance to a result list. + + The symbol EXT_STOP has significance within a chain + of ``MapperExtension`` objects that the chain will be stopped + when this symbol is returned. Like EXT_CONTINUE, it also + has additional significance in some cases that a default + mapper activity will not be performed. + + """ + + @classmethod + def _adapt_instrument_class(cls, self, listener): + cls._adapt_listener_methods(self, listener, ('instrument_class',)) + + @classmethod + def _adapt_listener(cls, self, listener): + cls._adapt_listener_methods( + self, listener, + ( + 'init_instance', + 'init_failed', + 'reconstruct_instance', + 'before_insert', + 'after_insert', + 'before_update', + 'after_update', + 'before_delete', + 'after_delete' + )) + + @classmethod + def _adapt_listener_methods(cls, self, listener, methods): + + for meth in methods: + me_meth = getattr(MapperExtension, meth) + ls_meth = getattr(listener, meth) + + if not util.methods_equivalent(me_meth, ls_meth): + if meth == 'reconstruct_instance': + def go(ls_meth): + def reconstruct(instance, ctx): + ls_meth(self, instance) + return reconstruct + event.listen(self.class_manager, 'load', + go(ls_meth), raw=False, propagate=True) + elif meth == 'init_instance': + def go(ls_meth): + def init_instance(instance, args, kwargs): + ls_meth(self, self.class_, + self.class_manager.original_init, + instance, args, kwargs) + return init_instance + event.listen(self.class_manager, 'init', + go(ls_meth), raw=False, propagate=True) + elif meth == 'init_failed': + def go(ls_meth): + def init_failed(instance, args, kwargs): + util.warn_exception( + ls_meth, self, self.class_, + self.class_manager.original_init, + instance, args, kwargs) + + return init_failed + event.listen(self.class_manager, 'init_failure', + go(ls_meth), raw=False, propagate=True) + else: + event.listen(self, "%s" % meth, ls_meth, + raw=False, retval=True, propagate=True) + + def instrument_class(self, mapper, class_): + """Receive a class when the mapper is first constructed, and has + applied instrumentation to the mapped class. + + The return value is only significant within the ``MapperExtension`` + chain; the parent mapper's behavior isn't modified by this method. + + """ + return EXT_CONTINUE + + def init_instance(self, mapper, class_, oldinit, instance, args, kwargs): + """Receive an instance when its constructor is called. + + This method is only called during a userland construction of + an object. It is not called when an object is loaded from the + database. + + The return value is only significant within the ``MapperExtension`` + chain; the parent mapper's behavior isn't modified by this method. + + """ + return EXT_CONTINUE + + def init_failed(self, mapper, class_, oldinit, instance, args, kwargs): + """Receive an instance when its constructor has been called, + and raised an exception. + + This method is only called during a userland construction of + an object. It is not called when an object is loaded from the + database. + + The return value is only significant within the ``MapperExtension`` + chain; the parent mapper's behavior isn't modified by this method. + + """ + return EXT_CONTINUE + + def reconstruct_instance(self, mapper, instance): + """Receive an object instance after it has been created via + ``__new__``, and after initial attribute population has + occurred. + + This typically occurs when the instance is created based on + incoming result rows, and is only called once for that + instance's lifetime. + + Note that during a result-row load, this method is called upon + the first row received for this instance. Note that some + attributes and collections may or may not be loaded or even + initialized, depending on what's present in the result rows. + + The return value is only significant within the ``MapperExtension`` + chain; the parent mapper's behavior isn't modified by this method. + + """ + return EXT_CONTINUE + + def before_insert(self, mapper, connection, instance): + """Receive an object instance before that instance is inserted + into its table. + + This is a good place to set up primary key values and such + that aren't handled otherwise. + + Column-based attributes can be modified within this method + which will result in the new value being inserted. However + *no* changes to the overall flush plan can be made, and + manipulation of the ``Session`` will not have the desired effect. + To manipulate the ``Session`` within an extension, use + ``SessionExtension``. + + The return value is only significant within the ``MapperExtension`` + chain; the parent mapper's behavior isn't modified by this method. + + """ + + return EXT_CONTINUE + + def after_insert(self, mapper, connection, instance): + """Receive an object instance after that instance is inserted. + + The return value is only significant within the ``MapperExtension`` + chain; the parent mapper's behavior isn't modified by this method. + + """ + + return EXT_CONTINUE + + def before_update(self, mapper, connection, instance): + """Receive an object instance before that instance is updated. + + Note that this method is called for all instances that are marked as + "dirty", even those which have no net changes to their column-based + attributes. An object is marked as dirty when any of its column-based + attributes have a "set attribute" operation called or when any of its + collections are modified. If, at update time, no column-based + attributes have any net changes, no UPDATE statement will be issued. + This means that an instance being sent to before_update is *not* a + guarantee that an UPDATE statement will be issued (although you can + affect the outcome here). + + To detect if the column-based attributes on the object have net + changes, and will therefore generate an UPDATE statement, use + ``object_session(instance).is_modified(instance, + include_collections=False)``. + + Column-based attributes can be modified within this method + which will result in the new value being updated. However + *no* changes to the overall flush plan can be made, and + manipulation of the ``Session`` will not have the desired effect. + To manipulate the ``Session`` within an extension, use + ``SessionExtension``. + + The return value is only significant within the ``MapperExtension`` + chain; the parent mapper's behavior isn't modified by this method. + + """ + + return EXT_CONTINUE + + def after_update(self, mapper, connection, instance): + """Receive an object instance after that instance is updated. + + The return value is only significant within the ``MapperExtension`` + chain; the parent mapper's behavior isn't modified by this method. + + """ + + return EXT_CONTINUE + + def before_delete(self, mapper, connection, instance): + """Receive an object instance before that instance is deleted. + + Note that *no* changes to the overall flush plan can be made + here; and manipulation of the ``Session`` will not have the + desired effect. To manipulate the ``Session`` within an + extension, use ``SessionExtension``. + + The return value is only significant within the ``MapperExtension`` + chain; the parent mapper's behavior isn't modified by this method. + + """ + + return EXT_CONTINUE + + def after_delete(self, mapper, connection, instance): + """Receive an object instance after that instance is deleted. + + The return value is only significant within the ``MapperExtension`` + chain; the parent mapper's behavior isn't modified by this method. + + """ + + return EXT_CONTINUE + + +@util.langhelpers.dependency_for("sqlalchemy.orm.interfaces") +class SessionExtension(object): + + """Base implementation for :class:`.Session` event hooks. + + .. note:: + + :class:`.SessionExtension` is deprecated. Please + refer to :func:`.event.listen` as well as + :class:`.SessionEvents`. + + Subclasses may be installed into a :class:`.Session` (or + :class:`.sessionmaker`) using the ``extension`` keyword + argument:: + + from sqlalchemy.orm.interfaces import SessionExtension + + class MySessionExtension(SessionExtension): + def before_commit(self, session): + print "before commit!" + + Session = sessionmaker(extension=MySessionExtension()) + + The same :class:`.SessionExtension` instance can be used + with any number of sessions. + + """ + + @classmethod + def _adapt_listener(cls, self, listener): + for meth in [ + 'before_commit', + 'after_commit', + 'after_rollback', + 'before_flush', + 'after_flush', + 'after_flush_postexec', + 'after_begin', + 'after_attach', + 'after_bulk_update', + 'after_bulk_delete', + ]: + me_meth = getattr(SessionExtension, meth) + ls_meth = getattr(listener, meth) + + if not util.methods_equivalent(me_meth, ls_meth): + event.listen(self, meth, getattr(listener, meth)) + + def before_commit(self, session): + """Execute right before commit is called. + + Note that this may not be per-flush if a longer running + transaction is ongoing.""" + + def after_commit(self, session): + """Execute after a commit has occurred. + + Note that this may not be per-flush if a longer running + transaction is ongoing.""" + + def after_rollback(self, session): + """Execute after a rollback has occurred. + + Note that this may not be per-flush if a longer running + transaction is ongoing.""" + + def before_flush(self, session, flush_context, instances): + """Execute before flush process has started. + + `instances` is an optional list of objects which were passed to + the ``flush()`` method. """ + + def after_flush(self, session, flush_context): + """Execute after flush has completed, but before commit has been + called. + + Note that the session's state is still in pre-flush, i.e. 'new', + 'dirty', and 'deleted' lists still show pre-flush state as well + as the history settings on instance attributes.""" + + def after_flush_postexec(self, session, flush_context): + """Execute after flush has completed, and after the post-exec + state occurs. + + This will be when the 'new', 'dirty', and 'deleted' lists are in + their final state. An actual commit() may or may not have + occurred, depending on whether or not the flush started its own + transaction or participated in a larger transaction. """ + + def after_begin(self, session, transaction, connection): + """Execute after a transaction is begun on a connection + + `transaction` is the SessionTransaction. This method is called + after an engine level transaction is begun on a connection. """ + + def after_attach(self, session, instance): + """Execute after an instance is attached to a session. + + This is called after an add, delete or merge. """ + + def after_bulk_update(self, session, query, query_context, result): + """Execute after a bulk update operation to the session. + + This is called after a session.query(...).update() + + `query` is the query object that this update operation was + called on. `query_context` was the query context object. + `result` is the result object returned from the bulk operation. + """ + + def after_bulk_delete(self, session, query, query_context, result): + """Execute after a bulk delete operation to the session. + + This is called after a session.query(...).delete() + + `query` is the query object that this delete operation was + called on. `query_context` was the query context object. + `result` is the result object returned from the bulk operation. + """ + + +@util.langhelpers.dependency_for("sqlalchemy.orm.interfaces") +class AttributeExtension(object): + """Base implementation for :class:`.AttributeImpl` event hooks, events + that fire upon attribute mutations in user code. + + .. note:: + + :class:`.AttributeExtension` is deprecated. Please + refer to :func:`.event.listen` as well as + :class:`.AttributeEvents`. + + :class:`.AttributeExtension` is used to listen for set, + remove, and append events on individual mapped attributes. + It is established on an individual mapped attribute using + the `extension` argument, available on + :func:`.column_property`, :func:`.relationship`, and + others:: + + from sqlalchemy.orm.interfaces import AttributeExtension + from sqlalchemy.orm import mapper, relationship, column_property + + class MyAttrExt(AttributeExtension): + def append(self, state, value, initiator): + print "append event !" + return value + + def set(self, state, value, oldvalue, initiator): + print "set event !" + return value + + mapper(SomeClass, sometable, properties={ + 'foo':column_property(sometable.c.foo, extension=MyAttrExt()), + 'bar':relationship(Bar, extension=MyAttrExt()) + }) + + Note that the :class:`.AttributeExtension` methods + :meth:`~.AttributeExtension.append` and + :meth:`~.AttributeExtension.set` need to return the + ``value`` parameter. The returned value is used as the + effective value, and allows the extension to change what is + ultimately persisted. + + AttributeExtension is assembled within the descriptors associated + with a mapped class. + + """ + + active_history = True + """indicates that the set() method would like to receive the 'old' value, + even if it means firing lazy callables. + + Note that ``active_history`` can also be set directly via + :func:`.column_property` and :func:`.relationship`. + + """ + + @classmethod + def _adapt_listener(cls, self, listener): + event.listen(self, 'append', listener.append, + active_history=listener.active_history, + raw=True, retval=True) + event.listen(self, 'remove', listener.remove, + active_history=listener.active_history, + raw=True, retval=True) + event.listen(self, 'set', listener.set, + active_history=listener.active_history, + raw=True, retval=True) + + def append(self, state, value, initiator): + """Receive a collection append event. + + The returned value will be used as the actual value to be + appended. + + """ + return value + + def remove(self, state, value, initiator): + """Receive a remove event. + + No return value is defined. + + """ + pass + + def set(self, state, value, oldvalue, initiator): + """Receive a set event. + + The returned value will be used as the actual value to be + set. + + """ + return value diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/descriptor_props.py b/lib/python3.4/site-packages/sqlalchemy/orm/descriptor_props.py new file mode 100644 index 0000000..6c87ef9 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/orm/descriptor_props.py @@ -0,0 +1,699 @@ +# orm/descriptor_props.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Descriptor properties are more "auxiliary" properties +that exist as configurational elements, but don't participate +as actively in the load/persist ORM loop. + +""" + +from .interfaces import MapperProperty, PropComparator +from .util import _none_set +from . import attributes +from .. import util, sql, exc as sa_exc, event, schema +from ..sql import expression +from . import properties +from . import query + + +class DescriptorProperty(MapperProperty): + """:class:`.MapperProperty` which proxies access to a + user-defined descriptor.""" + + doc = None + + def instrument_class(self, mapper): + prop = self + + class _ProxyImpl(object): + accepts_scalar_loader = False + expire_missing = True + collection = False + + def __init__(self, key): + self.key = key + + if hasattr(prop, 'get_history'): + def get_history(self, state, dict_, + passive=attributes.PASSIVE_OFF): + return prop.get_history(state, dict_, passive) + + if self.descriptor is None: + desc = getattr(mapper.class_, self.key, None) + if mapper._is_userland_descriptor(desc): + self.descriptor = desc + + if self.descriptor is None: + def fset(obj, value): + setattr(obj, self.name, value) + + def fdel(obj): + delattr(obj, self.name) + + def fget(obj): + return getattr(obj, self.name) + + self.descriptor = property( + fget=fget, + fset=fset, + fdel=fdel, + ) + + proxy_attr = attributes.create_proxied_attribute( + self.descriptor)( + self.parent.class_, + self.key, + self.descriptor, + lambda: self._comparator_factory(mapper), + doc=self.doc, + original_property=self + ) + proxy_attr.impl = _ProxyImpl(self.key) + mapper.class_manager.instrument_attribute(self.key, proxy_attr) + + +@util.langhelpers.dependency_for("sqlalchemy.orm.properties") +class CompositeProperty(DescriptorProperty): + """Defines a "composite" mapped attribute, representing a collection + of columns as one attribute. + + :class:`.CompositeProperty` is constructed using the :func:`.composite` + function. + + .. seealso:: + + :ref:`mapper_composite` + + """ + + def __init__(self, class_, *attrs, **kwargs): + """Return a composite column-based property for use with a Mapper. + + See the mapping documentation section :ref:`mapper_composite` for a + full usage example. + + The :class:`.MapperProperty` returned by :func:`.composite` + is the :class:`.CompositeProperty`. + + :param class\_: + The "composite type" class. + + :param \*cols: + List of Column objects to be mapped. + + :param active_history=False: + When ``True``, indicates that the "previous" value for a + scalar attribute should be loaded when replaced, if not + already loaded. See the same flag on :func:`.column_property`. + + .. versionchanged:: 0.7 + This flag specifically becomes meaningful + - previously it was a placeholder. + + :param group: + A group name for this property when marked as deferred. + + :param deferred: + When True, the column property is "deferred", meaning that it does + not load immediately, and is instead loaded when the attribute is + first accessed on an instance. See also + :func:`~sqlalchemy.orm.deferred`. + + :param comparator_factory: a class which extends + :class:`.CompositeProperty.Comparator` which provides custom SQL + clause generation for comparison operations. + + :param doc: + optional string that will be applied as the doc on the + class-bound descriptor. + + :param info: Optional data dictionary which will be populated into the + :attr:`.MapperProperty.info` attribute of this object. + + .. versionadded:: 0.8 + + :param extension: + an :class:`.AttributeExtension` instance, + or list of extensions, which will be prepended to the list of + attribute listeners for the resulting descriptor placed on the + class. **Deprecated.** Please see :class:`.AttributeEvents`. + + """ + super(CompositeProperty, self).__init__() + + self.attrs = attrs + self.composite_class = class_ + self.active_history = kwargs.get('active_history', False) + self.deferred = kwargs.get('deferred', False) + self.group = kwargs.get('group', None) + self.comparator_factory = kwargs.pop('comparator_factory', + self.__class__.Comparator) + if 'info' in kwargs: + self.info = kwargs.pop('info') + + util.set_creation_order(self) + self._create_descriptor() + + def instrument_class(self, mapper): + super(CompositeProperty, self).instrument_class(mapper) + self._setup_event_handlers() + + def do_init(self): + """Initialization which occurs after the :class:`.CompositeProperty` + has been associated with its parent mapper. + + """ + self._setup_arguments_on_columns() + + def _create_descriptor(self): + """Create the Python descriptor that will serve as + the access point on instances of the mapped class. + + """ + + def fget(instance): + dict_ = attributes.instance_dict(instance) + state = attributes.instance_state(instance) + + if self.key not in dict_: + # key not present. Iterate through related + # attributes, retrieve their values. This + # ensures they all load. + values = [ + getattr(instance, key) + for key in self._attribute_keys + ] + + # current expected behavior here is that the composite is + # created on access if the object is persistent or if + # col attributes have non-None. This would be better + # if the composite were created unconditionally, + # but that would be a behavioral change. + if self.key not in dict_ and ( + state.key is not None or + not _none_set.issuperset(values) + ): + dict_[self.key] = self.composite_class(*values) + state.manager.dispatch.refresh(state, None, [self.key]) + + return dict_.get(self.key, None) + + def fset(instance, value): + dict_ = attributes.instance_dict(instance) + state = attributes.instance_state(instance) + attr = state.manager[self.key] + previous = dict_.get(self.key, attributes.NO_VALUE) + for fn in attr.dispatch.set: + value = fn(state, value, previous, attr.impl) + dict_[self.key] = value + if value is None: + for key in self._attribute_keys: + setattr(instance, key, None) + else: + for key, value in zip( + self._attribute_keys, + value.__composite_values__()): + setattr(instance, key, value) + + def fdel(instance): + state = attributes.instance_state(instance) + dict_ = attributes.instance_dict(instance) + previous = dict_.pop(self.key, attributes.NO_VALUE) + attr = state.manager[self.key] + attr.dispatch.remove(state, previous, attr.impl) + for key in self._attribute_keys: + setattr(instance, key, None) + + self.descriptor = property(fget, fset, fdel) + + @util.memoized_property + def _comparable_elements(self): + return [ + getattr(self.parent.class_, prop.key) + for prop in self.props + ] + + @util.memoized_property + def props(self): + props = [] + for attr in self.attrs: + if isinstance(attr, str): + prop = self.parent.get_property( + attr, _configure_mappers=False) + elif isinstance(attr, schema.Column): + prop = self.parent._columntoproperty[attr] + elif isinstance(attr, attributes.InstrumentedAttribute): + prop = attr.property + else: + raise sa_exc.ArgumentError( + "Composite expects Column objects or mapped " + "attributes/attribute names as arguments, got: %r" + % (attr,)) + props.append(prop) + return props + + @property + def columns(self): + return [a for a in self.attrs if isinstance(a, schema.Column)] + + def _setup_arguments_on_columns(self): + """Propagate configuration arguments made on this composite + to the target columns, for those that apply. + + """ + for prop in self.props: + prop.active_history = self.active_history + if self.deferred: + prop.deferred = self.deferred + prop.strategy_class = prop._strategy_lookup( + ("deferred", True), + ("instrument", True)) + prop.group = self.group + + def _setup_event_handlers(self): + """Establish events that populate/expire the composite attribute.""" + + def load_handler(state, *args): + dict_ = state.dict + + if self.key in dict_: + return + + # if column elements aren't loaded, skip. + # __get__() will initiate a load for those + # columns + for k in self._attribute_keys: + if k not in dict_: + return + + # assert self.key not in dict_ + dict_[self.key] = self.composite_class( + *[state.dict[key] for key in + self._attribute_keys] + ) + + def expire_handler(state, keys): + if keys is None or set(self._attribute_keys).intersection(keys): + state.dict.pop(self.key, None) + + def insert_update_handler(mapper, connection, state): + """After an insert or update, some columns may be expired due + to server side defaults, or re-populated due to client side + defaults. Pop out the composite value here so that it + recreates. + + """ + + state.dict.pop(self.key, None) + + event.listen(self.parent, 'after_insert', + insert_update_handler, raw=True) + event.listen(self.parent, 'after_update', + insert_update_handler, raw=True) + event.listen(self.parent, 'load', + load_handler, raw=True, propagate=True) + event.listen(self.parent, 'refresh', + load_handler, raw=True, propagate=True) + event.listen(self.parent, 'expire', + expire_handler, raw=True, propagate=True) + + # TODO: need a deserialize hook here + + @util.memoized_property + def _attribute_keys(self): + return [ + prop.key for prop in self.props + ] + + def get_history(self, state, dict_, passive=attributes.PASSIVE_OFF): + """Provided for userland code that uses attributes.get_history().""" + + added = [] + deleted = [] + + has_history = False + for prop in self.props: + key = prop.key + hist = state.manager[key].impl.get_history(state, dict_) + if hist.has_changes(): + has_history = True + + non_deleted = hist.non_deleted() + if non_deleted: + added.extend(non_deleted) + else: + added.append(None) + if hist.deleted: + deleted.extend(hist.deleted) + else: + deleted.append(None) + + if has_history: + return attributes.History( + [self.composite_class(*added)], + (), + [self.composite_class(*deleted)] + ) + else: + return attributes.History( + (), [self.composite_class(*added)], () + ) + + def _comparator_factory(self, mapper): + return self.comparator_factory(self, mapper) + + class CompositeBundle(query.Bundle): + def __init__(self, property, expr): + self.property = property + super(CompositeProperty.CompositeBundle, self).__init__( + property.key, *expr) + + def create_row_processor(self, query, procs, labels): + def proc(row): + return self.property.composite_class( + *[proc(row) for proc in procs]) + return proc + + class Comparator(PropComparator): + """Produce boolean, comparison, and other operators for + :class:`.CompositeProperty` attributes. + + See the example in :ref:`composite_operations` for an overview + of usage , as well as the documentation for :class:`.PropComparator`. + + See also: + + :class:`.PropComparator` + + :class:`.ColumnOperators` + + :ref:`types_operators` + + :attr:`.TypeEngine.comparator_factory` + + """ + + __hash__ = None + + @property + def clauses(self): + return self.__clause_element__() + + def __clause_element__(self): + return expression.ClauseList( + group=False, *self._comparable_elements) + + def _query_clause_element(self): + return CompositeProperty.CompositeBundle( + self.prop, self.__clause_element__()) + + @util.memoized_property + def _comparable_elements(self): + if self._adapt_to_entity: + return [ + getattr( + self._adapt_to_entity.entity, + prop.key + ) for prop in self.prop._comparable_elements + ] + else: + return self.prop._comparable_elements + + def __eq__(self, other): + if other is None: + values = [None] * len(self.prop._comparable_elements) + else: + values = other.__composite_values__() + comparisons = [ + a == b + for a, b in zip(self.prop._comparable_elements, values) + ] + if self._adapt_to_entity: + comparisons = [self.adapter(x) for x in comparisons] + return sql.and_(*comparisons) + + def __ne__(self, other): + return sql.not_(self.__eq__(other)) + + def __str__(self): + return str(self.parent.class_.__name__) + "." + self.key + + +@util.langhelpers.dependency_for("sqlalchemy.orm.properties") +class ConcreteInheritedProperty(DescriptorProperty): + """A 'do nothing' :class:`.MapperProperty` that disables + an attribute on a concrete subclass that is only present + on the inherited mapper, not the concrete classes' mapper. + + Cases where this occurs include: + + * When the superclass mapper is mapped against a + "polymorphic union", which includes all attributes from + all subclasses. + * When a relationship() is configured on an inherited mapper, + but not on the subclass mapper. Concrete mappers require + that relationship() is configured explicitly on each + subclass. + + """ + + def _comparator_factory(self, mapper): + comparator_callable = None + + for m in self.parent.iterate_to_root(): + p = m._props[self.key] + if not isinstance(p, ConcreteInheritedProperty): + comparator_callable = p.comparator_factory + break + return comparator_callable + + def __init__(self): + super(ConcreteInheritedProperty, self).__init__() + def warn(): + raise AttributeError("Concrete %s does not implement " + "attribute %r at the instance level. Add " + "this property explicitly to %s." % + (self.parent, self.key, self.parent)) + + class NoninheritedConcreteProp(object): + def __set__(s, obj, value): + warn() + + def __delete__(s, obj): + warn() + + def __get__(s, obj, owner): + if obj is None: + return self.descriptor + warn() + self.descriptor = NoninheritedConcreteProp() + + +@util.langhelpers.dependency_for("sqlalchemy.orm.properties") +class SynonymProperty(DescriptorProperty): + + def __init__(self, name, map_column=None, + descriptor=None, comparator_factory=None, + doc=None, info=None): + """Denote an attribute name as a synonym to a mapped property, + in that the attribute will mirror the value and expression behavior + of another attribute. + + :param name: the name of the existing mapped property. This + can refer to the string name of any :class:`.MapperProperty` + configured on the class, including column-bound attributes + and relationships. + + :param descriptor: a Python :term:`descriptor` that will be used + as a getter (and potentially a setter) when this attribute is + accessed at the instance level. + + :param map_column: if ``True``, the :func:`.synonym` construct will + locate the existing named :class:`.MapperProperty` based on the + attribute name of this :func:`.synonym`, and assign it to a new + attribute linked to the name of this :func:`.synonym`. + That is, given a mapping like:: + + class MyClass(Base): + __tablename__ = 'my_table' + + id = Column(Integer, primary_key=True) + job_status = Column(String(50)) + + job_status = synonym("_job_status", map_column=True) + + The above class ``MyClass`` will now have the ``job_status`` + :class:`.Column` object mapped to the attribute named + ``_job_status``, and the attribute named ``job_status`` will refer + to the synonym itself. This feature is typically used in + conjunction with the ``descriptor`` argument in order to link a + user-defined descriptor as a "wrapper" for an existing column. + + :param info: Optional data dictionary which will be populated into the + :attr:`.InspectionAttr.info` attribute of this object. + + .. versionadded:: 1.0.0 + + :param comparator_factory: A subclass of :class:`.PropComparator` + that will provide custom comparison behavior at the SQL expression + level. + + .. note:: + + For the use case of providing an attribute which redefines both + Python-level and SQL-expression level behavior of an attribute, + please refer to the Hybrid attribute introduced at + :ref:`mapper_hybrids` for a more effective technique. + + .. seealso:: + + :ref:`synonyms` - examples of functionality. + + :ref:`mapper_hybrids` - Hybrids provide a better approach for + more complicated attribute-wrapping schemes than synonyms. + + """ + super(SynonymProperty, self).__init__() + + self.name = name + self.map_column = map_column + self.descriptor = descriptor + self.comparator_factory = comparator_factory + self.doc = doc or (descriptor and descriptor.__doc__) or None + if info: + self.info = info + + util.set_creation_order(self) + + # TODO: when initialized, check _proxied_property, + # emit a warning if its not a column-based property + + @util.memoized_property + def _proxied_property(self): + return getattr(self.parent.class_, self.name).property + + def _comparator_factory(self, mapper): + prop = self._proxied_property + + if self.comparator_factory: + comp = self.comparator_factory(prop, mapper) + else: + comp = prop.comparator_factory(prop, mapper) + return comp + + def set_parent(self, parent, init): + if self.map_column: + # implement the 'map_column' option. + if self.key not in parent.mapped_table.c: + raise sa_exc.ArgumentError( + "Can't compile synonym '%s': no column on table " + "'%s' named '%s'" + % (self.name, parent.mapped_table.description, self.key)) + elif parent.mapped_table.c[self.key] in \ + parent._columntoproperty and \ + parent._columntoproperty[ + parent.mapped_table.c[self.key] + ].key == self.name: + raise sa_exc.ArgumentError( + "Can't call map_column=True for synonym %r=%r, " + "a ColumnProperty already exists keyed to the name " + "%r for column %r" % + (self.key, self.name, self.name, self.key) + ) + p = properties.ColumnProperty(parent.mapped_table.c[self.key]) + parent._configure_property( + self.name, p, + init=init, + setparent=True) + p._mapped_by_synonym = self.key + + self.parent = parent + + +@util.langhelpers.dependency_for("sqlalchemy.orm.properties") +class ComparableProperty(DescriptorProperty): + """Instruments a Python property for use in query expressions.""" + + def __init__( + self, comparator_factory, descriptor=None, doc=None, info=None): + """Provides a method of applying a :class:`.PropComparator` + to any Python descriptor attribute. + + .. versionchanged:: 0.7 + :func:`.comparable_property` is superseded by + the :mod:`~sqlalchemy.ext.hybrid` extension. See the example + at :ref:`hybrid_custom_comparators`. + + Allows any Python descriptor to behave like a SQL-enabled + attribute when used at the class level in queries, allowing + redefinition of expression operator behavior. + + In the example below we redefine :meth:`.PropComparator.operate` + to wrap both sides of an expression in ``func.lower()`` to produce + case-insensitive comparison:: + + from sqlalchemy.orm import comparable_property + from sqlalchemy.orm.interfaces import PropComparator + from sqlalchemy.sql import func + from sqlalchemy import Integer, String, Column + from sqlalchemy.ext.declarative import declarative_base + + class CaseInsensitiveComparator(PropComparator): + def __clause_element__(self): + return self.prop + + def operate(self, op, other): + return op( + func.lower(self.__clause_element__()), + func.lower(other) + ) + + Base = declarative_base() + + class SearchWord(Base): + __tablename__ = 'search_word' + id = Column(Integer, primary_key=True) + word = Column(String) + word_insensitive = comparable_property(lambda prop, mapper: + CaseInsensitiveComparator( + mapper.c.word, mapper) + ) + + + A mapping like the above allows the ``word_insensitive`` attribute + to render an expression like:: + + >>> print SearchWord.word_insensitive == "Trucks" + lower(search_word.word) = lower(:lower_1) + + :param comparator_factory: + A PropComparator subclass or factory that defines operator behavior + for this property. + + :param descriptor: + Optional when used in a ``properties={}`` declaration. The Python + descriptor or property to layer comparison behavior on top of. + + The like-named descriptor will be automatically retrieved from the + mapped class if left blank in a ``properties`` declaration. + + :param info: Optional data dictionary which will be populated into the + :attr:`.InspectionAttr.info` attribute of this object. + + .. versionadded:: 1.0.0 + + """ + super(ComparableProperty, self).__init__() + self.descriptor = descriptor + self.comparator_factory = comparator_factory + self.doc = doc or (descriptor and descriptor.__doc__) or None + if info: + self.info = info + util.set_creation_order(self) + + def _comparator_factory(self, mapper): + return self.comparator_factory(self, mapper) diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/dynamic.py b/lib/python3.4/site-packages/sqlalchemy/orm/dynamic.py new file mode 100644 index 0000000..88187cd --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/orm/dynamic.py @@ -0,0 +1,370 @@ +# orm/dynamic.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Dynamic collection API. + +Dynamic collections act like Query() objects for read operations and support +basic add/delete mutation. + +""" + +from .. import log, util, exc +from ..sql import operators +from . import ( + attributes, object_session, util as orm_util, strategies, + object_mapper, exc as orm_exc, properties +) +from .query import Query + + +@log.class_logger +@properties.RelationshipProperty.strategy_for(lazy="dynamic") +class DynaLoader(strategies.AbstractRelationshipLoader): + def init_class_attribute(self, mapper): + self.is_class_level = True + if not self.uselist: + raise exc.InvalidRequestError( + "On relationship %s, 'dynamic' loaders cannot be used with " + "many-to-one/one-to-one relationships and/or " + "uselist=False." % self.parent_property) + strategies._register_attribute( + self, + mapper, + useobject=True, + uselist=True, + impl_class=DynamicAttributeImpl, + target_mapper=self.parent_property.mapper, + order_by=self.parent_property.order_by, + query_class=self.parent_property.query_class, + backref=self.parent_property.back_populates, + ) + + +class DynamicAttributeImpl(attributes.AttributeImpl): + uses_objects = True + accepts_scalar_loader = False + supports_population = False + collection = False + + def __init__(self, class_, key, typecallable, + dispatch, + target_mapper, order_by, query_class=None, **kw): + super(DynamicAttributeImpl, self).\ + __init__(class_, key, typecallable, dispatch, **kw) + self.target_mapper = target_mapper + self.order_by = order_by + if not query_class: + self.query_class = AppenderQuery + elif AppenderMixin in query_class.mro(): + self.query_class = query_class + else: + self.query_class = mixin_user_query(query_class) + + def get(self, state, dict_, passive=attributes.PASSIVE_OFF): + if not passive & attributes.SQL_OK: + return self._get_collection_history( + state, attributes.PASSIVE_NO_INITIALIZE).added_items + else: + return self.query_class(self, state) + + def get_collection(self, state, dict_, user_data=None, + passive=attributes.PASSIVE_NO_INITIALIZE): + if not passive & attributes.SQL_OK: + return self._get_collection_history(state, + passive).added_items + else: + history = self._get_collection_history(state, passive) + return history.added_plus_unchanged + + @util.memoized_property + def _append_token(self): + return attributes.Event(self, attributes.OP_APPEND) + + @util.memoized_property + def _remove_token(self): + return attributes.Event(self, attributes.OP_REMOVE) + + def fire_append_event(self, state, dict_, value, initiator, + collection_history=None): + if collection_history is None: + collection_history = self._modified_event(state, dict_) + + collection_history.add_added(value) + + for fn in self.dispatch.append: + value = fn(state, value, initiator or self._append_token) + + if self.trackparent and value is not None: + self.sethasparent(attributes.instance_state(value), state, True) + + def fire_remove_event(self, state, dict_, value, initiator, + collection_history=None): + if collection_history is None: + collection_history = self._modified_event(state, dict_) + + collection_history.add_removed(value) + + if self.trackparent and value is not None: + self.sethasparent(attributes.instance_state(value), state, False) + + for fn in self.dispatch.remove: + fn(state, value, initiator or self._remove_token) + + def _modified_event(self, state, dict_): + + if self.key not in state.committed_state: + state.committed_state[self.key] = CollectionHistory(self, state) + + state._modified_event(dict_, + self, + attributes.NEVER_SET) + + # this is a hack to allow the fixtures.ComparableEntity fixture + # to work + dict_[self.key] = True + return state.committed_state[self.key] + + def set(self, state, dict_, value, initiator, + passive=attributes.PASSIVE_OFF, + check_old=None, pop=False): + if initiator and initiator.parent_token is self.parent_token: + return + + if pop and value is None: + return + self._set_iterable(state, dict_, value) + + def _set_iterable(self, state, dict_, iterable, adapter=None): + new_values = list(iterable) + if state.has_identity: + old_collection = util.IdentitySet(self.get(state, dict_)) + + collection_history = self._modified_event(state, dict_) + if not state.has_identity: + old_collection = collection_history.added_items + else: + old_collection = old_collection.union( + collection_history.added_items) + + idset = util.IdentitySet + constants = old_collection.intersection(new_values) + additions = idset(new_values).difference(constants) + removals = old_collection.difference(constants) + + for member in new_values: + if member in additions: + self.fire_append_event(state, dict_, member, None, + collection_history=collection_history) + + for member in removals: + self.fire_remove_event(state, dict_, member, None, + collection_history=collection_history) + + def delete(self, *args, **kwargs): + raise NotImplementedError() + + def set_committed_value(self, state, dict_, value): + raise NotImplementedError("Dynamic attributes don't support " + "collection population.") + + def get_history(self, state, dict_, passive=attributes.PASSIVE_OFF): + c = self._get_collection_history(state, passive) + return c.as_history() + + def get_all_pending(self, state, dict_, + passive=attributes.PASSIVE_NO_INITIALIZE): + c = self._get_collection_history( + state, passive) + return [ + (attributes.instance_state(x), x) + for x in + c.all_items + ] + + def _get_collection_history(self, state, passive=attributes.PASSIVE_OFF): + if self.key in state.committed_state: + c = state.committed_state[self.key] + else: + c = CollectionHistory(self, state) + + if state.has_identity and (passive & attributes.INIT_OK): + return CollectionHistory(self, state, apply_to=c) + else: + return c + + def append(self, state, dict_, value, initiator, + passive=attributes.PASSIVE_OFF): + if initiator is not self: + self.fire_append_event(state, dict_, value, initiator) + + def remove(self, state, dict_, value, initiator, + passive=attributes.PASSIVE_OFF): + if initiator is not self: + self.fire_remove_event(state, dict_, value, initiator) + + def pop(self, state, dict_, value, initiator, + passive=attributes.PASSIVE_OFF): + self.remove(state, dict_, value, initiator, passive=passive) + + +class AppenderMixin(object): + query_class = None + + def __init__(self, attr, state): + super(AppenderMixin, self).__init__(attr.target_mapper, None) + self.instance = instance = state.obj() + self.attr = attr + + mapper = object_mapper(instance) + prop = mapper._props[self.attr.key] + self._criterion = prop._with_parent( + instance, + alias_secondary=False) + + if self.attr.order_by: + self._order_by = self.attr.order_by + + def session(self): + sess = object_session(self.instance) + if sess is not None and self.autoflush and sess.autoflush \ + and self.instance in sess: + sess.flush() + if not orm_util.has_identity(self.instance): + return None + else: + return sess + session = property(session, lambda s, x: None) + + def __iter__(self): + sess = self.session + if sess is None: + return iter(self.attr._get_collection_history( + attributes.instance_state(self.instance), + attributes.PASSIVE_NO_INITIALIZE).added_items) + else: + return iter(self._clone(sess)) + + def __getitem__(self, index): + sess = self.session + if sess is None: + return self.attr._get_collection_history( + attributes.instance_state(self.instance), + attributes.PASSIVE_NO_INITIALIZE).indexed(index) + else: + return self._clone(sess).__getitem__(index) + + def count(self): + sess = self.session + if sess is None: + return len(self.attr._get_collection_history( + attributes.instance_state(self.instance), + attributes.PASSIVE_NO_INITIALIZE).added_items) + else: + return self._clone(sess).count() + + def _clone(self, sess=None): + # note we're returning an entirely new Query class instance + # here without any assignment capabilities; the class of this + # query is determined by the session. + instance = self.instance + if sess is None: + sess = object_session(instance) + if sess is None: + raise orm_exc.DetachedInstanceError( + "Parent instance %s is not bound to a Session, and no " + "contextual session is established; lazy load operation " + "of attribute '%s' cannot proceed" % ( + orm_util.instance_str(instance), self.attr.key)) + + if self.query_class: + query = self.query_class(self.attr.target_mapper, session=sess) + else: + query = sess.query(self.attr.target_mapper) + + query._criterion = self._criterion + query._order_by = self._order_by + + return query + + def extend(self, iterator): + for item in iterator: + self.attr.append( + attributes.instance_state(self.instance), + attributes.instance_dict(self.instance), item, None) + + def append(self, item): + self.attr.append( + attributes.instance_state(self.instance), + attributes.instance_dict(self.instance), item, None) + + def remove(self, item): + self.attr.remove( + attributes.instance_state(self.instance), + attributes.instance_dict(self.instance), item, None) + + +class AppenderQuery(AppenderMixin, Query): + """A dynamic query that supports basic collection storage operations.""" + + +def mixin_user_query(cls): + """Return a new class with AppenderQuery functionality layered over.""" + name = 'Appender' + cls.__name__ + return type(name, (AppenderMixin, cls), {'query_class': cls}) + + +class CollectionHistory(object): + """Overrides AttributeHistory to receive append/remove events directly.""" + + def __init__(self, attr, state, apply_to=None): + if apply_to: + coll = AppenderQuery(attr, state).autoflush(False) + self.unchanged_items = util.OrderedIdentitySet(coll) + self.added_items = apply_to.added_items + self.deleted_items = apply_to.deleted_items + self._reconcile_collection = True + else: + self.deleted_items = util.OrderedIdentitySet() + self.added_items = util.OrderedIdentitySet() + self.unchanged_items = util.OrderedIdentitySet() + self._reconcile_collection = False + + @property + def added_plus_unchanged(self): + return list(self.added_items.union(self.unchanged_items)) + + @property + def all_items(self): + return list(self.added_items.union( + self.unchanged_items).union(self.deleted_items)) + + def as_history(self): + if self._reconcile_collection: + added = self.added_items.difference(self.unchanged_items) + deleted = self.deleted_items.intersection(self.unchanged_items) + unchanged = self.unchanged_items.difference(deleted) + else: + added, unchanged, deleted = self.added_items,\ + self.unchanged_items,\ + self.deleted_items + return attributes.History( + list(added), + list(unchanged), + list(deleted), + ) + + def indexed(self, index): + return list(self.added_items)[index] + + def add_added(self, value): + self.added_items.add(value) + + def add_removed(self, value): + if value in self.added_items: + self.added_items.remove(value) + else: + self.deleted_items.add(value) diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/evaluator.py b/lib/python3.4/site-packages/sqlalchemy/orm/evaluator.py new file mode 100644 index 0000000..534e7fa --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/orm/evaluator.py @@ -0,0 +1,134 @@ +# orm/evaluator.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +import operator +from ..sql import operators + + +class UnevaluatableError(Exception): + pass + +_straight_ops = set(getattr(operators, op) + for op in ('add', 'mul', 'sub', + 'div', + 'mod', 'truediv', + 'lt', 'le', 'ne', 'gt', 'ge', 'eq')) + + +_notimplemented_ops = set(getattr(operators, op) + for op in ('like_op', 'notlike_op', 'ilike_op', + 'notilike_op', 'between_op', 'in_op', + 'notin_op', 'endswith_op', 'concat_op')) + + +class EvaluatorCompiler(object): + def __init__(self, target_cls=None): + self.target_cls = target_cls + + def process(self, clause): + meth = getattr(self, "visit_%s" % clause.__visit_name__, None) + if not meth: + raise UnevaluatableError( + "Cannot evaluate %s" % type(clause).__name__) + return meth(clause) + + def visit_grouping(self, clause): + return self.process(clause.element) + + def visit_null(self, clause): + return lambda obj: None + + def visit_false(self, clause): + return lambda obj: False + + def visit_true(self, clause): + return lambda obj: True + + def visit_column(self, clause): + if 'parentmapper' in clause._annotations: + parentmapper = clause._annotations['parentmapper'] + if self.target_cls and not issubclass( + self.target_cls, parentmapper.class_): + raise UnevaluatableError( + "Can't evaluate criteria against alternate class %s" % + parentmapper.class_ + ) + key = parentmapper._columntoproperty[clause].key + else: + key = clause.key + + get_corresponding_attr = operator.attrgetter(key) + return lambda obj: get_corresponding_attr(obj) + + def visit_clauselist(self, clause): + evaluators = list(map(self.process, clause.clauses)) + if clause.operator is operators.or_: + def evaluate(obj): + has_null = False + for sub_evaluate in evaluators: + value = sub_evaluate(obj) + if value: + return True + has_null = has_null or value is None + if has_null: + return None + return False + elif clause.operator is operators.and_: + def evaluate(obj): + for sub_evaluate in evaluators: + value = sub_evaluate(obj) + if not value: + if value is None: + return None + return False + return True + else: + raise UnevaluatableError( + "Cannot evaluate clauselist with operator %s" % + clause.operator) + + return evaluate + + def visit_binary(self, clause): + eval_left, eval_right = list(map(self.process, + [clause.left, clause.right])) + operator = clause.operator + if operator is operators.is_: + def evaluate(obj): + return eval_left(obj) == eval_right(obj) + elif operator is operators.isnot: + def evaluate(obj): + return eval_left(obj) != eval_right(obj) + elif operator in _straight_ops: + def evaluate(obj): + left_val = eval_left(obj) + right_val = eval_right(obj) + if left_val is None or right_val is None: + return None + return operator(eval_left(obj), eval_right(obj)) + else: + raise UnevaluatableError( + "Cannot evaluate %s with operator %s" % + (type(clause).__name__, clause.operator)) + return evaluate + + def visit_unary(self, clause): + eval_inner = self.process(clause.element) + if clause.operator is operators.inv: + def evaluate(obj): + value = eval_inner(obj) + if value is None: + return None + return not value + return evaluate + raise UnevaluatableError( + "Cannot evaluate %s with operator %s" % + (type(clause).__name__, clause.operator)) + + def visit_bindparam(self, clause): + val = clause.value + return lambda obj: val diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/events.py b/lib/python3.4/site-packages/sqlalchemy/orm/events.py new file mode 100644 index 0000000..67ce46e --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/orm/events.py @@ -0,0 +1,1801 @@ +# orm/events.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""ORM event interfaces. + +""" +from .. import event, exc, util +from .base import _mapper_or_none +import inspect +import weakref +from . import interfaces +from . import mapperlib, instrumentation +from .session import Session, sessionmaker +from .scoping import scoped_session +from .attributes import QueryableAttribute +from .query import Query +from sqlalchemy.util.compat import inspect_getargspec + +class InstrumentationEvents(event.Events): + """Events related to class instrumentation events. + + The listeners here support being established against + any new style class, that is any object that is a subclass + of 'type'. Events will then be fired off for events + against that class. If the "propagate=True" flag is passed + to event.listen(), the event will fire off for subclasses + of that class as well. + + The Python ``type`` builtin is also accepted as a target, + which when used has the effect of events being emitted + for all classes. + + Note the "propagate" flag here is defaulted to ``True``, + unlike the other class level events where it defaults + to ``False``. This means that new subclasses will also + be the subject of these events, when a listener + is established on a superclass. + + .. versionchanged:: 0.8 - events here will emit based + on comparing the incoming class to the type of class + passed to :func:`.event.listen`. Previously, the + event would fire for any class unconditionally regardless + of what class was sent for listening, despite + documentation which stated the contrary. + + """ + + _target_class_doc = "SomeBaseClass" + _dispatch_target = instrumentation.InstrumentationFactory + + @classmethod + def _accept_with(cls, target): + if isinstance(target, type): + return _InstrumentationEventsHold(target) + else: + return None + + @classmethod + def _listen(cls, event_key, propagate=True, **kw): + target, identifier, fn = \ + event_key.dispatch_target, event_key.identifier, \ + event_key._listen_fn + + def listen(target_cls, *arg): + listen_cls = target() + if propagate and issubclass(target_cls, listen_cls): + return fn(target_cls, *arg) + elif not propagate and target_cls is listen_cls: + return fn(target_cls, *arg) + + def remove(ref): + key = event.registry._EventKey( + None, identifier, listen, + instrumentation._instrumentation_factory) + getattr(instrumentation._instrumentation_factory.dispatch, + identifier).remove(key) + + target = weakref.ref(target.class_, remove) + + event_key.\ + with_dispatch_target(instrumentation._instrumentation_factory).\ + with_wrapper(listen).base_listen(**kw) + + @classmethod + def _clear(cls): + super(InstrumentationEvents, cls)._clear() + instrumentation._instrumentation_factory.dispatch._clear() + + def class_instrument(self, cls): + """Called after the given class is instrumented. + + To get at the :class:`.ClassManager`, use + :func:`.manager_of_class`. + + """ + + def class_uninstrument(self, cls): + """Called before the given class is uninstrumented. + + To get at the :class:`.ClassManager`, use + :func:`.manager_of_class`. + + """ + + def attribute_instrument(self, cls, key, inst): + """Called when an attribute is instrumented.""" + + +class _InstrumentationEventsHold(object): + """temporary marker object used to transfer from _accept_with() to + _listen() on the InstrumentationEvents class. + + """ + + def __init__(self, class_): + self.class_ = class_ + + dispatch = event.dispatcher(InstrumentationEvents) + + +class InstanceEvents(event.Events): + """Define events specific to object lifecycle. + + e.g.:: + + from sqlalchemy import event + + def my_load_listener(target, context): + print "on load!" + + event.listen(SomeClass, 'load', my_load_listener) + + Available targets include: + + * mapped classes + * unmapped superclasses of mapped or to-be-mapped classes + (using the ``propagate=True`` flag) + * :class:`.Mapper` objects + * the :class:`.Mapper` class itself and the :func:`.mapper` + function indicate listening for all mappers. + + .. versionchanged:: 0.8.0 instance events can be associated with + unmapped superclasses of mapped classes. + + Instance events are closely related to mapper events, but + are more specific to the instance and its instrumentation, + rather than its system of persistence. + + When using :class:`.InstanceEvents`, several modifiers are + available to the :func:`.event.listen` function. + + :param propagate=False: When True, the event listener should + be applied to all inheriting classes as well as the + class which is the target of this listener. + :param raw=False: When True, the "target" argument passed + to applicable event listener functions will be the + instance's :class:`.InstanceState` management + object, rather than the mapped instance itself. + + """ + + _target_class_doc = "SomeClass" + + _dispatch_target = instrumentation.ClassManager + + @classmethod + def _new_classmanager_instance(cls, class_, classmanager): + _InstanceEventsHold.populate(class_, classmanager) + + @classmethod + @util.dependencies("sqlalchemy.orm") + def _accept_with(cls, orm, target): + if isinstance(target, instrumentation.ClassManager): + return target + elif isinstance(target, mapperlib.Mapper): + return target.class_manager + elif target is orm.mapper: + return instrumentation.ClassManager + elif isinstance(target, type): + if issubclass(target, mapperlib.Mapper): + return instrumentation.ClassManager + else: + manager = instrumentation.manager_of_class(target) + if manager: + return manager + else: + return _InstanceEventsHold(target) + return None + + @classmethod + def _listen(cls, event_key, raw=False, propagate=False, **kw): + target, identifier, fn = \ + event_key.dispatch_target, event_key.identifier, \ + event_key._listen_fn + + if not raw: + def wrap(state, *arg, **kw): + return fn(state.obj(), *arg, **kw) + event_key = event_key.with_wrapper(wrap) + + event_key.base_listen(propagate=propagate, **kw) + + if propagate: + for mgr in target.subclass_managers(True): + event_key.with_dispatch_target(mgr).base_listen( + propagate=True) + + @classmethod + def _clear(cls): + super(InstanceEvents, cls)._clear() + _InstanceEventsHold._clear() + + def first_init(self, manager, cls): + """Called when the first instance of a particular mapping is called. + + This event is called when the ``__init__`` method of a class + is called the first time for that particular class. The event + invokes before ``__init__`` actually proceeds as well as before + the :meth:`.InstanceEvents.init` event is invoked. + + """ + + def init(self, target, args, kwargs): + """Receive an instance when its constructor is called. + + This method is only called during a userland construction of + an object, in conjunction with the object's constructor, e.g. + its ``__init__`` method. It is not called when an object is + loaded from the database; see the :meth:`.InstanceEvents.load` + event in order to intercept a database load. + + The event is called before the actual ``__init__`` constructor + of the object is called. The ``kwargs`` dictionary may be + modified in-place in order to affect what is passed to + ``__init__``. + + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :param args: positional arguments passed to the ``__init__`` method. + This is passed as a tuple and is currently immutable. + :param kwargs: keyword arguments passed to the ``__init__`` method. + This structure *can* be altered in place. + + .. seealso:: + + :meth:`.InstanceEvents.init_failure` + + :meth:`.InstanceEvents.load` + + """ + + def init_failure(self, target, args, kwargs): + """Receive an instance when its constructor has been called, + and raised an exception. + + This method is only called during a userland construction of + an object, in conjunction with the object's constructor, e.g. + its ``__init__`` method. It is not called when an object is loaded + from the database. + + The event is invoked after an exception raised by the ``__init__`` + method is caught. After the event + is invoked, the original exception is re-raised outwards, so that + the construction of the object still raises an exception. The + actual exception and stack trace raised should be present in + ``sys.exc_info()``. + + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :param args: positional arguments that were passed to the ``__init__`` + method. + :param kwargs: keyword arguments that were passed to the ``__init__`` + method. + + .. seealso:: + + :meth:`.InstanceEvents.init` + + :meth:`.InstanceEvents.load` + + """ + + def load(self, target, context): + """Receive an object instance after it has been created via + ``__new__``, and after initial attribute population has + occurred. + + This typically occurs when the instance is created based on + incoming result rows, and is only called once for that + instance's lifetime. + + Note that during a result-row load, this method is called upon + the first row received for this instance. Note that some + attributes and collections may or may not be loaded or even + initialized, depending on what's present in the result rows. + + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :param context: the :class:`.QueryContext` corresponding to the + current :class:`.Query` in progress. This argument may be + ``None`` if the load does not correspond to a :class:`.Query`, + such as during :meth:`.Session.merge`. + + .. seealso:: + + :meth:`.InstanceEvents.init` + + :meth:`.InstanceEvents.refresh` + + """ + + def refresh(self, target, context, attrs): + """Receive an object instance after one or more attributes have + been refreshed from a query. + + Contrast this to the :meth:`.InstanceEvents.load` method, which + is invoked when the object is first loaded from a query. + + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :param context: the :class:`.QueryContext` corresponding to the + current :class:`.Query` in progress. + :param attrs: sequence of attribute names which + were populated, or None if all column-mapped, non-deferred + attributes were populated. + + .. seealso:: + + :meth:`.InstanceEvents.load` + + """ + + def refresh_flush(self, target, flush_context, attrs): + """Receive an object instance after one or more attributes have + been refreshed within the persistence of the object. + + This event is the same as :meth:`.InstanceEvents.refresh` except + it is invoked within the unit of work flush process, and the values + here typically come from the process of handling an INSERT or + UPDATE, such as via the RETURNING clause or from Python-side default + values. + + .. versionadded:: 1.0.5 + + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :param flush_context: Internal :class:`.UOWTransaction` object + which handles the details of the flush. + :param attrs: sequence of attribute names which + were populated. + + """ + + def expire(self, target, attrs): + """Receive an object instance after its attributes or some subset + have been expired. + + 'keys' is a list of attribute names. If None, the entire + state was expired. + + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :param attrs: sequence of attribute + names which were expired, or None if all attributes were + expired. + + """ + + def pickle(self, target, state_dict): + """Receive an object instance when its associated state is + being pickled. + + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :param state_dict: the dictionary returned by + :class:`.InstanceState.__getstate__`, containing the state + to be pickled. + + """ + + def unpickle(self, target, state_dict): + """Receive an object instance after its associated state has + been unpickled. + + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :param state_dict: the dictionary sent to + :class:`.InstanceState.__setstate__`, containing the state + dictionary which was pickled. + + """ + + +class _EventsHold(event.RefCollection): + """Hold onto listeners against unmapped, uninstrumented classes. + + Establish _listen() for that class' mapper/instrumentation when + those objects are created for that class. + + """ + + def __init__(self, class_): + self.class_ = class_ + + @classmethod + def _clear(cls): + cls.all_holds.clear() + + class HoldEvents(object): + _dispatch_target = None + + @classmethod + def _listen(cls, event_key, raw=False, propagate=False, **kw): + target, identifier, fn = \ + event_key.dispatch_target, event_key.identifier, event_key.fn + + if target.class_ in target.all_holds: + collection = target.all_holds[target.class_] + else: + collection = target.all_holds[target.class_] = {} + + event.registry._stored_in_collection(event_key, target) + collection[event_key._key] = (event_key, raw, propagate) + + if propagate: + stack = list(target.class_.__subclasses__()) + while stack: + subclass = stack.pop(0) + stack.extend(subclass.__subclasses__()) + subject = target.resolve(subclass) + if subject is not None: + # we are already going through __subclasses__() + # so leave generic propagate flag False + event_key.with_dispatch_target(subject).\ + listen(raw=raw, propagate=False, **kw) + + def remove(self, event_key): + target, identifier, fn = \ + event_key.dispatch_target, event_key.identifier, event_key.fn + + if isinstance(target, _EventsHold): + collection = target.all_holds[target.class_] + del collection[event_key._key] + + @classmethod + def populate(cls, class_, subject): + for subclass in class_.__mro__: + if subclass in cls.all_holds: + collection = cls.all_holds[subclass] + for event_key, raw, propagate in collection.values(): + if propagate or subclass is class_: + # since we can't be sure in what order different + # classes in a hierarchy are triggered with + # populate(), we rely upon _EventsHold for all event + # assignment, instead of using the generic propagate + # flag. + event_key.with_dispatch_target(subject).\ + listen(raw=raw, propagate=False) + + +class _InstanceEventsHold(_EventsHold): + all_holds = weakref.WeakKeyDictionary() + + def resolve(self, class_): + return instrumentation.manager_of_class(class_) + + class HoldInstanceEvents(_EventsHold.HoldEvents, InstanceEvents): + pass + + dispatch = event.dispatcher(HoldInstanceEvents) + + +class MapperEvents(event.Events): + """Define events specific to mappings. + + e.g.:: + + from sqlalchemy import event + + def my_before_insert_listener(mapper, connection, target): + # execute a stored procedure upon INSERT, + # apply the value to the row to be inserted + target.calculated_value = connection.scalar( + "select my_special_function(%d)" + % target.special_number) + + # associate the listener function with SomeClass, + # to execute during the "before_insert" hook + event.listen( + SomeClass, 'before_insert', my_before_insert_listener) + + Available targets include: + + * mapped classes + * unmapped superclasses of mapped or to-be-mapped classes + (using the ``propagate=True`` flag) + * :class:`.Mapper` objects + * the :class:`.Mapper` class itself and the :func:`.mapper` + function indicate listening for all mappers. + + .. versionchanged:: 0.8.0 mapper events can be associated with + unmapped superclasses of mapped classes. + + Mapper events provide hooks into critical sections of the + mapper, including those related to object instrumentation, + object loading, and object persistence. In particular, the + persistence methods :meth:`~.MapperEvents.before_insert`, + and :meth:`~.MapperEvents.before_update` are popular + places to augment the state being persisted - however, these + methods operate with several significant restrictions. The + user is encouraged to evaluate the + :meth:`.SessionEvents.before_flush` and + :meth:`.SessionEvents.after_flush` methods as more + flexible and user-friendly hooks in which to apply + additional database state during a flush. + + When using :class:`.MapperEvents`, several modifiers are + available to the :func:`.event.listen` function. + + :param propagate=False: When True, the event listener should + be applied to all inheriting mappers and/or the mappers of + inheriting classes, as well as any + mapper which is the target of this listener. + :param raw=False: When True, the "target" argument passed + to applicable event listener functions will be the + instance's :class:`.InstanceState` management + object, rather than the mapped instance itself. + :param retval=False: when True, the user-defined event function + must have a return value, the purpose of which is either to + control subsequent event propagation, or to otherwise alter + the operation in progress by the mapper. Possible return + values are: + + * ``sqlalchemy.orm.interfaces.EXT_CONTINUE`` - continue event + processing normally. + * ``sqlalchemy.orm.interfaces.EXT_STOP`` - cancel all subsequent + event handlers in the chain. + * other values - the return value specified by specific listeners. + + """ + + _target_class_doc = "SomeClass" + _dispatch_target = mapperlib.Mapper + + @classmethod + def _new_mapper_instance(cls, class_, mapper): + _MapperEventsHold.populate(class_, mapper) + + @classmethod + @util.dependencies("sqlalchemy.orm") + def _accept_with(cls, orm, target): + if target is orm.mapper: + return mapperlib.Mapper + elif isinstance(target, type): + if issubclass(target, mapperlib.Mapper): + return target + else: + mapper = _mapper_or_none(target) + if mapper is not None: + return mapper + else: + return _MapperEventsHold(target) + else: + return target + + @classmethod + def _listen( + cls, event_key, raw=False, retval=False, propagate=False, **kw): + target, identifier, fn = \ + event_key.dispatch_target, event_key.identifier, \ + event_key._listen_fn + + if identifier in ("before_configured", "after_configured") and \ + target is not mapperlib.Mapper: + util.warn( + "'before_configured' and 'after_configured' ORM events " + "only invoke with the mapper() function or Mapper class " + "as the target.") + + if not raw or not retval: + if not raw: + meth = getattr(cls, identifier) + try: + target_index = \ + inspect_getargspec(meth)[0].index('target') - 1 + except ValueError: + target_index = None + + def wrap(*arg, **kw): + if not raw and target_index is not None: + arg = list(arg) + arg[target_index] = arg[target_index].obj() + if not retval: + fn(*arg, **kw) + return interfaces.EXT_CONTINUE + else: + return fn(*arg, **kw) + event_key = event_key.with_wrapper(wrap) + + if propagate: + for mapper in target.self_and_descendants: + event_key.with_dispatch_target(mapper).base_listen( + propagate=True, **kw) + else: + event_key.base_listen(**kw) + + @classmethod + def _clear(cls): + super(MapperEvents, cls)._clear() + _MapperEventsHold._clear() + + def instrument_class(self, mapper, class_): + """Receive a class when the mapper is first constructed, + before instrumentation is applied to the mapped class. + + This event is the earliest phase of mapper construction. + Most attributes of the mapper are not yet initialized. + + This listener can either be applied to the :class:`.Mapper` + class overall, or to any un-mapped class which serves as a base + for classes that will be mapped (using the ``propagate=True`` flag):: + + Base = declarative_base() + + @event.listens_for(Base, "instrument_class", propagate=True) + def on_new_class(mapper, cls_): + " ... " + + :param mapper: the :class:`.Mapper` which is the target + of this event. + :param class\_: the mapped class. + + """ + + def mapper_configured(self, mapper, class_): + """Called when a specific mapper has completed its own configuration + within the scope of the :func:`.configure_mappers` call. + + The :meth:`.MapperEvents.mapper_configured` event is invoked + for each mapper that is encountered when the + :func:`.orm.configure_mappers` function proceeds through the current + list of not-yet-configured mappers. + :func:`.orm.configure_mappers` is typically invoked + automatically as mappings are first used, as well as each time + new mappers have been made available and new mapper use is + detected. + + When the event is called, the mapper should be in its final + state, but **not including backrefs** that may be invoked from + other mappers; they might still be pending within the + configuration operation. Bidirectional relationships that + are instead configured via the + :paramref:`.orm.relationship.back_populates` argument + *will* be fully available, since this style of relationship does not + rely upon other possibly-not-configured mappers to know that they + exist. + + For an event that is guaranteed to have **all** mappers ready + to go including backrefs that are defined only on other + mappings, use the :meth:`.MapperEvents.after_configured` + event; this event invokes only after all known mappings have been + fully configured. + + The :meth:`.MapperEvents.mapper_configured` event, unlike + :meth:`.MapperEvents.before_configured` or + :meth:`.MapperEvents.after_configured`, + is called for each mapper/class individually, and the mapper is + passed to the event itself. It also is called exactly once for + a particular mapper. The event is therefore useful for + configurational steps that benefit from being invoked just once + on a specific mapper basis, which don't require that "backref" + configurations are necessarily ready yet. + + :param mapper: the :class:`.Mapper` which is the target + of this event. + :param class\_: the mapped class. + + .. seealso:: + + :meth:`.MapperEvents.before_configured` + + :meth:`.MapperEvents.after_configured` + + """ + # TODO: need coverage for this event + + def before_configured(self): + """Called before a series of mappers have been configured. + + The :meth:`.MapperEvents.before_configured` event is invoked + each time the :func:`.orm.configure_mappers` function is + invoked, before the function has done any of its work. + :func:`.orm.configure_mappers` is typically invoked + automatically as mappings are first used, as well as each time + new mappers have been made available and new mapper use is + detected. + + This event can **only** be applied to the :class:`.Mapper` class + or :func:`.mapper` function, and not to individual mappings or + mapped classes. It is only invoked for all mappings as a whole:: + + from sqlalchemy.orm import mapper + + @event.listens_for(mapper, "before_configured") + def go(): + # ... + + Constrast this event to :meth:`.MapperEvents.after_configured`, + which is invoked after the series of mappers has been configured, + as well as :meth:`.MapperEvents.mapper_configured`, which is invoked + on a per-mapper basis as each one is configured to the extent possible. + + Theoretically this event is called once per + application, but is actually called any time new mappers + are to be affected by a :func:`.orm.configure_mappers` + call. If new mappings are constructed after existing ones have + already been used, this event will likely be called again. To ensure + that a particular event is only called once and no further, the + ``once=True`` argument (new in 0.9.4) can be applied:: + + from sqlalchemy.orm import mapper + + @event.listens_for(mapper, "before_configured", once=True) + def go(): + # ... + + + .. versionadded:: 0.9.3 + + + .. seealso:: + + :meth:`.MapperEvents.mapper_configured` + + :meth:`.MapperEvents.after_configured` + + """ + + def after_configured(self): + """Called after a series of mappers have been configured. + + The :meth:`.MapperEvents.after_configured` event is invoked + each time the :func:`.orm.configure_mappers` function is + invoked, after the function has completed its work. + :func:`.orm.configure_mappers` is typically invoked + automatically as mappings are first used, as well as each time + new mappers have been made available and new mapper use is + detected. + + Contrast this event to the :meth:`.MapperEvents.mapper_configured` + event, which is called on a per-mapper basis while the configuration + operation proceeds; unlike that event, when this event is invoked, + all cross-configurations (e.g. backrefs) will also have been made + available for any mappers that were pending. + Also constrast to :meth:`.MapperEvents.before_configured`, + which is invoked before the series of mappers has been configured. + + This event can **only** be applied to the :class:`.Mapper` class + or :func:`.mapper` function, and not to individual mappings or + mapped classes. It is only invoked for all mappings as a whole:: + + from sqlalchemy.orm import mapper + + @event.listens_for(mapper, "after_configured") + def go(): + # ... + + Theoretically this event is called once per + application, but is actually called any time new mappers + have been affected by a :func:`.orm.configure_mappers` + call. If new mappings are constructed after existing ones have + already been used, this event will likely be called again. To ensure + that a particular event is only called once and no further, the + ``once=True`` argument (new in 0.9.4) can be applied:: + + from sqlalchemy.orm import mapper + + @event.listens_for(mapper, "after_configured", once=True) + def go(): + # ... + + .. seealso:: + + :meth:`.MapperEvents.mapper_configured` + + :meth:`.MapperEvents.before_configured` + + """ + + def before_insert(self, mapper, connection, target): + """Receive an object instance before an INSERT statement + is emitted corresponding to that instance. + + This event is used to modify local, non-object related + attributes on the instance before an INSERT occurs, as well + as to emit additional SQL statements on the given + connection. + + The event is often called for a batch of objects of the + same class before their INSERT statements are emitted at + once in a later step. In the extremely rare case that + this is not desirable, the :func:`.mapper` can be + configured with ``batch=False``, which will cause + batches of instances to be broken up into individual + (and more poorly performing) event->persist->event + steps. + + .. warning:: + + Mapper-level flush events only allow **very limited operations**, + on attributes local to the row being operated upon only, + as well as allowing any SQL to be emitted on the given + :class:`.Connection`. **Please read fully** the notes + at :ref:`session_persistence_mapper` for guidelines on using + these methods; generally, the :meth:`.SessionEvents.before_flush` + method should be preferred for general on-flush changes. + + :param mapper: the :class:`.Mapper` which is the target + of this event. + :param connection: the :class:`.Connection` being used to + emit INSERT statements for this instance. This + provides a handle into the current transaction on the + target database specific to this instance. + :param target: the mapped instance being persisted. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :return: No return value is supported by this event. + + .. seealso:: + + :ref:`session_persistence_events` + + """ + + def after_insert(self, mapper, connection, target): + """Receive an object instance after an INSERT statement + is emitted corresponding to that instance. + + This event is used to modify in-Python-only + state on the instance after an INSERT occurs, as well + as to emit additional SQL statements on the given + connection. + + The event is often called for a batch of objects of the + same class after their INSERT statements have been + emitted at once in a previous step. In the extremely + rare case that this is not desirable, the + :func:`.mapper` can be configured with ``batch=False``, + which will cause batches of instances to be broken up + into individual (and more poorly performing) + event->persist->event steps. + + .. warning:: + + Mapper-level flush events only allow **very limited operations**, + on attributes local to the row being operated upon only, + as well as allowing any SQL to be emitted on the given + :class:`.Connection`. **Please read fully** the notes + at :ref:`session_persistence_mapper` for guidelines on using + these methods; generally, the :meth:`.SessionEvents.before_flush` + method should be preferred for general on-flush changes. + + :param mapper: the :class:`.Mapper` which is the target + of this event. + :param connection: the :class:`.Connection` being used to + emit INSERT statements for this instance. This + provides a handle into the current transaction on the + target database specific to this instance. + :param target: the mapped instance being persisted. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :return: No return value is supported by this event. + + .. seealso:: + + :ref:`session_persistence_events` + + """ + + def before_update(self, mapper, connection, target): + """Receive an object instance before an UPDATE statement + is emitted corresponding to that instance. + + This event is used to modify local, non-object related + attributes on the instance before an UPDATE occurs, as well + as to emit additional SQL statements on the given + connection. + + This method is called for all instances that are + marked as "dirty", *even those which have no net changes + to their column-based attributes*. An object is marked + as dirty when any of its column-based attributes have a + "set attribute" operation called or when any of its + collections are modified. If, at update time, no + column-based attributes have any net changes, no UPDATE + statement will be issued. This means that an instance + being sent to :meth:`~.MapperEvents.before_update` is + *not* a guarantee that an UPDATE statement will be + issued, although you can affect the outcome here by + modifying attributes so that a net change in value does + exist. + + To detect if the column-based attributes on the object have net + changes, and will therefore generate an UPDATE statement, use + ``object_session(instance).is_modified(instance, + include_collections=False)``. + + The event is often called for a batch of objects of the + same class before their UPDATE statements are emitted at + once in a later step. In the extremely rare case that + this is not desirable, the :func:`.mapper` can be + configured with ``batch=False``, which will cause + batches of instances to be broken up into individual + (and more poorly performing) event->persist->event + steps. + + .. warning:: + + Mapper-level flush events only allow **very limited operations**, + on attributes local to the row being operated upon only, + as well as allowing any SQL to be emitted on the given + :class:`.Connection`. **Please read fully** the notes + at :ref:`session_persistence_mapper` for guidelines on using + these methods; generally, the :meth:`.SessionEvents.before_flush` + method should be preferred for general on-flush changes. + + :param mapper: the :class:`.Mapper` which is the target + of this event. + :param connection: the :class:`.Connection` being used to + emit UPDATE statements for this instance. This + provides a handle into the current transaction on the + target database specific to this instance. + :param target: the mapped instance being persisted. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :return: No return value is supported by this event. + + .. seealso:: + + :ref:`session_persistence_events` + + """ + + def after_update(self, mapper, connection, target): + """Receive an object instance after an UPDATE statement + is emitted corresponding to that instance. + + This event is used to modify in-Python-only + state on the instance after an UPDATE occurs, as well + as to emit additional SQL statements on the given + connection. + + This method is called for all instances that are + marked as "dirty", *even those which have no net changes + to their column-based attributes*, and for which + no UPDATE statement has proceeded. An object is marked + as dirty when any of its column-based attributes have a + "set attribute" operation called or when any of its + collections are modified. If, at update time, no + column-based attributes have any net changes, no UPDATE + statement will be issued. This means that an instance + being sent to :meth:`~.MapperEvents.after_update` is + *not* a guarantee that an UPDATE statement has been + issued. + + To detect if the column-based attributes on the object have net + changes, and therefore resulted in an UPDATE statement, use + ``object_session(instance).is_modified(instance, + include_collections=False)``. + + The event is often called for a batch of objects of the + same class after their UPDATE statements have been emitted at + once in a previous step. In the extremely rare case that + this is not desirable, the :func:`.mapper` can be + configured with ``batch=False``, which will cause + batches of instances to be broken up into individual + (and more poorly performing) event->persist->event + steps. + + .. warning:: + + Mapper-level flush events only allow **very limited operations**, + on attributes local to the row being operated upon only, + as well as allowing any SQL to be emitted on the given + :class:`.Connection`. **Please read fully** the notes + at :ref:`session_persistence_mapper` for guidelines on using + these methods; generally, the :meth:`.SessionEvents.before_flush` + method should be preferred for general on-flush changes. + + :param mapper: the :class:`.Mapper` which is the target + of this event. + :param connection: the :class:`.Connection` being used to + emit UPDATE statements for this instance. This + provides a handle into the current transaction on the + target database specific to this instance. + :param target: the mapped instance being persisted. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :return: No return value is supported by this event. + + .. seealso:: + + :ref:`session_persistence_events` + + """ + + def before_delete(self, mapper, connection, target): + """Receive an object instance before a DELETE statement + is emitted corresponding to that instance. + + This event is used to emit additional SQL statements on + the given connection as well as to perform application + specific bookkeeping related to a deletion event. + + The event is often called for a batch of objects of the + same class before their DELETE statements are emitted at + once in a later step. + + .. warning:: + + Mapper-level flush events only allow **very limited operations**, + on attributes local to the row being operated upon only, + as well as allowing any SQL to be emitted on the given + :class:`.Connection`. **Please read fully** the notes + at :ref:`session_persistence_mapper` for guidelines on using + these methods; generally, the :meth:`.SessionEvents.before_flush` + method should be preferred for general on-flush changes. + + :param mapper: the :class:`.Mapper` which is the target + of this event. + :param connection: the :class:`.Connection` being used to + emit DELETE statements for this instance. This + provides a handle into the current transaction on the + target database specific to this instance. + :param target: the mapped instance being deleted. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :return: No return value is supported by this event. + + .. seealso:: + + :ref:`session_persistence_events` + + """ + + def after_delete(self, mapper, connection, target): + """Receive an object instance after a DELETE statement + has been emitted corresponding to that instance. + + This event is used to emit additional SQL statements on + the given connection as well as to perform application + specific bookkeeping related to a deletion event. + + The event is often called for a batch of objects of the + same class after their DELETE statements have been emitted at + once in a previous step. + + .. warning:: + + Mapper-level flush events only allow **very limited operations**, + on attributes local to the row being operated upon only, + as well as allowing any SQL to be emitted on the given + :class:`.Connection`. **Please read fully** the notes + at :ref:`session_persistence_mapper` for guidelines on using + these methods; generally, the :meth:`.SessionEvents.before_flush` + method should be preferred for general on-flush changes. + + :param mapper: the :class:`.Mapper` which is the target + of this event. + :param connection: the :class:`.Connection` being used to + emit DELETE statements for this instance. This + provides a handle into the current transaction on the + target database specific to this instance. + :param target: the mapped instance being deleted. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :return: No return value is supported by this event. + + .. seealso:: + + :ref:`session_persistence_events` + + """ + + +class _MapperEventsHold(_EventsHold): + all_holds = weakref.WeakKeyDictionary() + + def resolve(self, class_): + return _mapper_or_none(class_) + + class HoldMapperEvents(_EventsHold.HoldEvents, MapperEvents): + pass + + dispatch = event.dispatcher(HoldMapperEvents) + + +class SessionEvents(event.Events): + """Define events specific to :class:`.Session` lifecycle. + + e.g.:: + + from sqlalchemy import event + from sqlalchemy.orm import sessionmaker + + def my_before_commit(session): + print "before commit!" + + Session = sessionmaker() + + event.listen(Session, "before_commit", my_before_commit) + + The :func:`~.event.listen` function will accept + :class:`.Session` objects as well as the return result + of :class:`~.sessionmaker()` and :class:`~.scoped_session()`. + + Additionally, it accepts the :class:`.Session` class which + will apply listeners to all :class:`.Session` instances + globally. + + """ + + _target_class_doc = "SomeSessionOrFactory" + + _dispatch_target = Session + + @classmethod + def _accept_with(cls, target): + if isinstance(target, scoped_session): + + target = target.session_factory + if not isinstance(target, sessionmaker) and \ + ( + not isinstance(target, type) or + not issubclass(target, Session) + ): + raise exc.ArgumentError( + "Session event listen on a scoped_session " + "requires that its creation callable " + "is associated with the Session class.") + + if isinstance(target, sessionmaker): + return target.class_ + elif isinstance(target, type): + if issubclass(target, scoped_session): + return Session + elif issubclass(target, Session): + return target + elif isinstance(target, Session): + return target + else: + return None + + def after_transaction_create(self, session, transaction): + """Execute when a new :class:`.SessionTransaction` is created. + + This event differs from :meth:`~.SessionEvents.after_begin` + in that it occurs for each :class:`.SessionTransaction` + overall, as opposed to when transactions are begun + on individual database connections. It is also invoked + for nested transactions and subtransactions, and is always + matched by a corresponding + :meth:`~.SessionEvents.after_transaction_end` event + (assuming normal operation of the :class:`.Session`). + + :param session: the target :class:`.Session`. + :param transaction: the target :class:`.SessionTransaction`. + + .. versionadded:: 0.8 + + .. seealso:: + + :meth:`~.SessionEvents.after_transaction_end` + + """ + + def after_transaction_end(self, session, transaction): + """Execute when the span of a :class:`.SessionTransaction` ends. + + This event differs from :meth:`~.SessionEvents.after_commit` + in that it corresponds to all :class:`.SessionTransaction` + objects in use, including those for nested transactions + and subtransactions, and is always matched by a corresponding + :meth:`~.SessionEvents.after_transaction_create` event. + + :param session: the target :class:`.Session`. + :param transaction: the target :class:`.SessionTransaction`. + + .. versionadded:: 0.8 + + .. seealso:: + + :meth:`~.SessionEvents.after_transaction_create` + + """ + + def before_commit(self, session): + """Execute before commit is called. + + .. note:: + + The :meth:`~.SessionEvents.before_commit` hook is *not* per-flush, + that is, the :class:`.Session` can emit SQL to the database + many times within the scope of a transaction. + For interception of these events, use the + :meth:`~.SessionEvents.before_flush`, + :meth:`~.SessionEvents.after_flush`, or + :meth:`~.SessionEvents.after_flush_postexec` + events. + + :param session: The target :class:`.Session`. + + .. seealso:: + + :meth:`~.SessionEvents.after_commit` + + :meth:`~.SessionEvents.after_begin` + + :meth:`~.SessionEvents.after_transaction_create` + + :meth:`~.SessionEvents.after_transaction_end` + + """ + + def after_commit(self, session): + """Execute after a commit has occurred. + + .. note:: + + The :meth:`~.SessionEvents.after_commit` hook is *not* per-flush, + that is, the :class:`.Session` can emit SQL to the database + many times within the scope of a transaction. + For interception of these events, use the + :meth:`~.SessionEvents.before_flush`, + :meth:`~.SessionEvents.after_flush`, or + :meth:`~.SessionEvents.after_flush_postexec` + events. + + .. note:: + + The :class:`.Session` is not in an active transaction + when the :meth:`~.SessionEvents.after_commit` event is invoked, + and therefore can not emit SQL. To emit SQL corresponding to + every transaction, use the :meth:`~.SessionEvents.before_commit` + event. + + :param session: The target :class:`.Session`. + + .. seealso:: + + :meth:`~.SessionEvents.before_commit` + + :meth:`~.SessionEvents.after_begin` + + :meth:`~.SessionEvents.after_transaction_create` + + :meth:`~.SessionEvents.after_transaction_end` + + """ + + def after_rollback(self, session): + """Execute after a real DBAPI rollback has occurred. + + Note that this event only fires when the *actual* rollback against + the database occurs - it does *not* fire each time the + :meth:`.Session.rollback` method is called, if the underlying + DBAPI transaction has already been rolled back. In many + cases, the :class:`.Session` will not be in + an "active" state during this event, as the current + transaction is not valid. To acquire a :class:`.Session` + which is active after the outermost rollback has proceeded, + use the :meth:`.SessionEvents.after_soft_rollback` event, checking the + :attr:`.Session.is_active` flag. + + :param session: The target :class:`.Session`. + + """ + + def after_soft_rollback(self, session, previous_transaction): + """Execute after any rollback has occurred, including "soft" + rollbacks that don't actually emit at the DBAPI level. + + This corresponds to both nested and outer rollbacks, i.e. + the innermost rollback that calls the DBAPI's + rollback() method, as well as the enclosing rollback + calls that only pop themselves from the transaction stack. + + The given :class:`.Session` can be used to invoke SQL and + :meth:`.Session.query` operations after an outermost rollback + by first checking the :attr:`.Session.is_active` flag:: + + @event.listens_for(Session, "after_soft_rollback") + def do_something(session, previous_transaction): + if session.is_active: + session.execute("select * from some_table") + + :param session: The target :class:`.Session`. + :param previous_transaction: The :class:`.SessionTransaction` + transactional marker object which was just closed. The current + :class:`.SessionTransaction` for the given :class:`.Session` is + available via the :attr:`.Session.transaction` attribute. + + .. versionadded:: 0.7.3 + + """ + + def before_flush(self, session, flush_context, instances): + """Execute before flush process has started. + + :param session: The target :class:`.Session`. + :param flush_context: Internal :class:`.UOWTransaction` object + which handles the details of the flush. + :param instances: Usually ``None``, this is the collection of + objects which can be passed to the :meth:`.Session.flush` method + (note this usage is deprecated). + + .. seealso:: + + :meth:`~.SessionEvents.after_flush` + + :meth:`~.SessionEvents.after_flush_postexec` + + :ref:`session_persistence_events` + + """ + + def after_flush(self, session, flush_context): + """Execute after flush has completed, but before commit has been + called. + + Note that the session's state is still in pre-flush, i.e. 'new', + 'dirty', and 'deleted' lists still show pre-flush state as well + as the history settings on instance attributes. + + :param session: The target :class:`.Session`. + :param flush_context: Internal :class:`.UOWTransaction` object + which handles the details of the flush. + + .. seealso:: + + :meth:`~.SessionEvents.before_flush` + + :meth:`~.SessionEvents.after_flush_postexec` + + :ref:`session_persistence_events` + + """ + + def after_flush_postexec(self, session, flush_context): + """Execute after flush has completed, and after the post-exec + state occurs. + + This will be when the 'new', 'dirty', and 'deleted' lists are in + their final state. An actual commit() may or may not have + occurred, depending on whether or not the flush started its own + transaction or participated in a larger transaction. + + :param session: The target :class:`.Session`. + :param flush_context: Internal :class:`.UOWTransaction` object + which handles the details of the flush. + + + .. seealso:: + + :meth:`~.SessionEvents.before_flush` + + :meth:`~.SessionEvents.after_flush` + + :ref:`session_persistence_events` + + """ + + def after_begin(self, session, transaction, connection): + """Execute after a transaction is begun on a connection + + :param session: The target :class:`.Session`. + :param transaction: The :class:`.SessionTransaction`. + :param connection: The :class:`~.engine.Connection` object + which will be used for SQL statements. + + .. seealso:: + + :meth:`~.SessionEvents.before_commit` + + :meth:`~.SessionEvents.after_commit` + + :meth:`~.SessionEvents.after_transaction_create` + + :meth:`~.SessionEvents.after_transaction_end` + + """ + + def before_attach(self, session, instance): + """Execute before an instance is attached to a session. + + This is called before an add, delete or merge causes + the object to be part of the session. + + .. versionadded:: 0.8. Note that :meth:`~.SessionEvents.after_attach` + now fires off after the item is part of the session. + :meth:`.before_attach` is provided for those cases where + the item should not yet be part of the session state. + + .. seealso:: + + :meth:`~.SessionEvents.after_attach` + + :ref:`session_lifecycle_events` + + """ + + def after_attach(self, session, instance): + """Execute after an instance is attached to a session. + + This is called after an add, delete or merge. + + .. note:: + + As of 0.8, this event fires off *after* the item + has been fully associated with the session, which is + different than previous releases. For event + handlers that require the object not yet + be part of session state (such as handlers which + may autoflush while the target object is not + yet complete) consider the + new :meth:`.before_attach` event. + + .. seealso:: + + :meth:`~.SessionEvents.before_attach` + + :ref:`session_lifecycle_events` + + """ + + @event._legacy_signature("0.9", + ["session", "query", "query_context", "result"], + lambda update_context: ( + update_context.session, + update_context.query, + update_context.context, + update_context.result)) + def after_bulk_update(self, update_context): + """Execute after a bulk update operation to the session. + + This is called as a result of the :meth:`.Query.update` method. + + :param update_context: an "update context" object which contains + details about the update, including these attributes: + + * ``session`` - the :class:`.Session` involved + * ``query`` -the :class:`.Query` object that this update operation + was called upon. + * ``context`` The :class:`.QueryContext` object, corresponding + to the invocation of an ORM query. + * ``result`` the :class:`.ResultProxy` returned as a result of the + bulk UPDATE operation. + + + """ + + @event._legacy_signature("0.9", + ["session", "query", "query_context", "result"], + lambda delete_context: ( + delete_context.session, + delete_context.query, + delete_context.context, + delete_context.result)) + def after_bulk_delete(self, delete_context): + """Execute after a bulk delete operation to the session. + + This is called as a result of the :meth:`.Query.delete` method. + + :param delete_context: a "delete context" object which contains + details about the update, including these attributes: + + * ``session`` - the :class:`.Session` involved + * ``query`` -the :class:`.Query` object that this update operation + was called upon. + * ``context`` The :class:`.QueryContext` object, corresponding + to the invocation of an ORM query. + * ``result`` the :class:`.ResultProxy` returned as a result of the + bulk DELETE operation. + + + """ + + +class AttributeEvents(event.Events): + """Define events for object attributes. + + These are typically defined on the class-bound descriptor for the + target class. + + e.g.:: + + from sqlalchemy import event + + def my_append_listener(target, value, initiator): + print "received append event for target: %s" % target + + event.listen(MyClass.collection, 'append', my_append_listener) + + Listeners have the option to return a possibly modified version + of the value, when the ``retval=True`` flag is passed + to :func:`~.event.listen`:: + + def validate_phone(target, value, oldvalue, initiator): + "Strip non-numeric characters from a phone number" + + return re.sub(r'(?![0-9])', '', value) + + # setup listener on UserContact.phone attribute, instructing + # it to use the return value + listen(UserContact.phone, 'set', validate_phone, retval=True) + + A validation function like the above can also raise an exception + such as :exc:`ValueError` to halt the operation. + + Several modifiers are available to the :func:`~.event.listen` function. + + :param active_history=False: When True, indicates that the + "set" event would like to receive the "old" value being + replaced unconditionally, even if this requires firing off + database loads. Note that ``active_history`` can also be + set directly via :func:`.column_property` and + :func:`.relationship`. + + :param propagate=False: When True, the listener function will + be established not just for the class attribute given, but + for attributes of the same name on all current subclasses + of that class, as well as all future subclasses of that + class, using an additional listener that listens for + instrumentation events. + :param raw=False: When True, the "target" argument to the + event will be the :class:`.InstanceState` management + object, rather than the mapped instance itself. + :param retval=False: when True, the user-defined event + listening must return the "value" argument from the + function. This gives the listening function the opportunity + to change the value that is ultimately used for a "set" + or "append" event. + + """ + + _target_class_doc = "SomeClass.some_attribute" + _dispatch_target = QueryableAttribute + + @staticmethod + def _set_dispatch(cls, dispatch_cls): + dispatch = event.Events._set_dispatch(cls, dispatch_cls) + dispatch_cls._active_history = False + return dispatch + + @classmethod + def _accept_with(cls, target): + # TODO: coverage + if isinstance(target, interfaces.MapperProperty): + return getattr(target.parent.class_, target.key) + else: + return target + + @classmethod + def _listen(cls, event_key, active_history=False, + raw=False, retval=False, + propagate=False): + + target, identifier, fn = \ + event_key.dispatch_target, event_key.identifier, \ + event_key._listen_fn + + if active_history: + target.dispatch._active_history = True + + if not raw or not retval: + def wrap(target, value, *arg): + if not raw: + target = target.obj() + if not retval: + fn(target, value, *arg) + return value + else: + return fn(target, value, *arg) + event_key = event_key.with_wrapper(wrap) + + event_key.base_listen(propagate=propagate) + + if propagate: + manager = instrumentation.manager_of_class(target.class_) + + for mgr in manager.subclass_managers(True): + event_key.with_dispatch_target( + mgr[target.key]).base_listen(propagate=True) + + def append(self, target, value, initiator): + """Receive a collection append event. + + :param target: the object instance receiving the event. + If the listener is registered with ``raw=True``, this will + be the :class:`.InstanceState` object. + :param value: the value being appended. If this listener + is registered with ``retval=True``, the listener + function must return this value, or a new value which + replaces it. + :param initiator: An instance of :class:`.attributes.Event` + representing the initiation of the event. May be modified + from its original value by backref handlers in order to control + chained event propagation. + + .. versionchanged:: 0.9.0 the ``initiator`` argument is now + passed as a :class:`.attributes.Event` object, and may be + modified by backref handlers within a chain of backref-linked + events. + + :return: if the event was registered with ``retval=True``, + the given value, or a new effective value, should be returned. + + """ + + def remove(self, target, value, initiator): + """Receive a collection remove event. + + :param target: the object instance receiving the event. + If the listener is registered with ``raw=True``, this will + be the :class:`.InstanceState` object. + :param value: the value being removed. + :param initiator: An instance of :class:`.attributes.Event` + representing the initiation of the event. May be modified + from its original value by backref handlers in order to control + chained event propagation. + + .. versionchanged:: 0.9.0 the ``initiator`` argument is now + passed as a :class:`.attributes.Event` object, and may be + modified by backref handlers within a chain of backref-linked + events. + + :return: No return value is defined for this event. + """ + + def set(self, target, value, oldvalue, initiator): + """Receive a scalar set event. + + :param target: the object instance receiving the event. + If the listener is registered with ``raw=True``, this will + be the :class:`.InstanceState` object. + :param value: the value being set. If this listener + is registered with ``retval=True``, the listener + function must return this value, or a new value which + replaces it. + :param oldvalue: the previous value being replaced. This + may also be the symbol ``NEVER_SET`` or ``NO_VALUE``. + If the listener is registered with ``active_history=True``, + the previous value of the attribute will be loaded from + the database if the existing value is currently unloaded + or expired. + :param initiator: An instance of :class:`.attributes.Event` + representing the initiation of the event. May be modified + from its original value by backref handlers in order to control + chained event propagation. + + .. versionchanged:: 0.9.0 the ``initiator`` argument is now + passed as a :class:`.attributes.Event` object, and may be + modified by backref handlers within a chain of backref-linked + events. + + :return: if the event was registered with ``retval=True``, + the given value, or a new effective value, should be returned. + + """ + + def init_collection(self, target, collection, collection_adapter): + """Receive a 'collection init' event. + + This event is triggered for a collection-based attribute, when + the initial "empty collection" is first generated for a blank + attribute, as well as for when the collection is replaced with + a new one, such as via a set event. + + E.g., given that ``User.addresses`` is a relationship-based + collection, the event is triggered here:: + + u1 = User() + u1.addresses.append(a1) # <- new collection + + and also during replace operations:: + + u1.addresses = [a2, a3] # <- new collection + + :param target: the object instance receiving the event. + If the listener is registered with ``raw=True``, this will + be the :class:`.InstanceState` object. + :param collection: the new collection. This will always be generated + from what was specified as + :paramref:`.RelationshipProperty.collection_class`, and will always + be empty. + :param collection_adpater: the :class:`.CollectionAdapter` that will + mediate internal access to the collection. + + .. versionadded:: 1.0.0 the :meth:`.AttributeEvents.init_collection` + and :meth:`.AttributeEvents.dispose_collection` events supersede + the :class:`.collection.linker` hook. + + """ + + def dispose_collection(self, target, collection, collection_adpater): + """Receive a 'collection dispose' event. + + This event is triggered for a collection-based attribute when + a collection is replaced, that is:: + + u1.addresses.append(a1) + + u1.addresses = [a2, a3] # <- old collection is disposed + + The mechanics of the event will typically include that the given + collection is empty, even if it stored objects while being replaced. + + .. versionadded:: 1.0.0 the :meth:`.AttributeEvents.init_collection` + and :meth:`.AttributeEvents.dispose_collection` events supersede + the :class:`.collection.linker` hook. + + """ + + +class QueryEvents(event.Events): + """Represent events within the construction of a :class:`.Query` object. + + The events here are intended to be used with an as-yet-unreleased + inspection system for :class:`.Query`. Some very basic operations + are possible now, however the inspection system is intended to allow + complex query manipulations to be automated. + + .. versionadded:: 1.0.0 + + """ + + _target_class_doc = "SomeQuery" + _dispatch_target = Query + + def before_compile(self, query): + """Receive the :class:`.Query` object before it is composed into a + core :class:`.Select` object. + + This event is intended to allow changes to the query given:: + + @event.listens_for(Query, "before_compile", retval=True) + def no_deleted(query): + for desc in query.column_descriptions: + if desc['type'] is User: + entity = desc['entity'] + query = query.filter(entity.deleted == False) + return query + + The event should normally be listened with the ``retval=True`` + parameter set, so that the modified query may be returned. + + + """ + + @classmethod + def _listen( + cls, event_key, retval=False, **kw): + fn = event_key._listen_fn + + if not retval: + def wrap(*arg, **kw): + if not retval: + query = arg[0] + fn(*arg, **kw) + return query + else: + return fn(*arg, **kw) + event_key = event_key.with_wrapper(wrap) + + event_key.base_listen(**kw) diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/exc.py b/lib/python3.4/site-packages/sqlalchemy/orm/exc.py new file mode 100644 index 0000000..db99322 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/orm/exc.py @@ -0,0 +1,165 @@ +# orm/exc.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""SQLAlchemy ORM exceptions.""" +from .. import exc as sa_exc, util + +NO_STATE = (AttributeError, KeyError) +"""Exception types that may be raised by instrumentation implementations.""" + + +class StaleDataError(sa_exc.SQLAlchemyError): + """An operation encountered database state that is unaccounted for. + + Conditions which cause this to happen include: + + * A flush may have attempted to update or delete rows + and an unexpected number of rows were matched during + the UPDATE or DELETE statement. Note that when + version_id_col is used, rows in UPDATE or DELETE statements + are also matched against the current known version + identifier. + + * A mapped object with version_id_col was refreshed, + and the version number coming back from the database does + not match that of the object itself. + + * A object is detached from its parent object, however + the object was previously attached to a different parent + identity which was garbage collected, and a decision + cannot be made if the new parent was really the most + recent "parent". + + .. versionadded:: 0.7.4 + + """ + +ConcurrentModificationError = StaleDataError + + +class FlushError(sa_exc.SQLAlchemyError): + """A invalid condition was detected during flush().""" + + +class UnmappedError(sa_exc.InvalidRequestError): + """Base for exceptions that involve expected mappings not present.""" + + +class ObjectDereferencedError(sa_exc.SQLAlchemyError): + """An operation cannot complete due to an object being garbage + collected. + + """ + + +class DetachedInstanceError(sa_exc.SQLAlchemyError): + """An attempt to access unloaded attributes on a + mapped instance that is detached.""" + + +class UnmappedInstanceError(UnmappedError): + """An mapping operation was requested for an unknown instance.""" + + @util.dependencies("sqlalchemy.orm.base") + def __init__(self, base, obj, msg=None): + if not msg: + try: + base.class_mapper(type(obj)) + name = _safe_cls_name(type(obj)) + msg = ("Class %r is mapped, but this instance lacks " + "instrumentation. This occurs when the instance" + "is created before sqlalchemy.orm.mapper(%s) " + "was called." % (name, name)) + except UnmappedClassError: + msg = _default_unmapped(type(obj)) + if isinstance(obj, type): + msg += ( + '; was a class (%s) supplied where an instance was ' + 'required?' % _safe_cls_name(obj)) + UnmappedError.__init__(self, msg) + + def __reduce__(self): + return self.__class__, (None, self.args[0]) + + +class UnmappedClassError(UnmappedError): + """An mapping operation was requested for an unknown class.""" + + def __init__(self, cls, msg=None): + if not msg: + msg = _default_unmapped(cls) + UnmappedError.__init__(self, msg) + + def __reduce__(self): + return self.__class__, (None, self.args[0]) + + +class ObjectDeletedError(sa_exc.InvalidRequestError): + """A refresh operation failed to retrieve the database + row corresponding to an object's known primary key identity. + + A refresh operation proceeds when an expired attribute is + accessed on an object, or when :meth:`.Query.get` is + used to retrieve an object which is, upon retrieval, detected + as expired. A SELECT is emitted for the target row + based on primary key; if no row is returned, this + exception is raised. + + The true meaning of this exception is simply that + no row exists for the primary key identifier associated + with a persistent object. The row may have been + deleted, or in some cases the primary key updated + to a new value, outside of the ORM's management of the target + object. + + """ + @util.dependencies("sqlalchemy.orm.base") + def __init__(self, base, state, msg=None): + if not msg: + msg = "Instance '%s' has been deleted, or its "\ + "row is otherwise not present." % base.state_str(state) + + sa_exc.InvalidRequestError.__init__(self, msg) + + def __reduce__(self): + return self.__class__, (None, self.args[0]) + + +class UnmappedColumnError(sa_exc.InvalidRequestError): + """Mapping operation was requested on an unknown column.""" + + +class NoResultFound(sa_exc.InvalidRequestError): + """A database result was required but none was found.""" + + +class MultipleResultsFound(sa_exc.InvalidRequestError): + """A single database result was required but more than one were found.""" + + +def _safe_cls_name(cls): + try: + cls_name = '.'.join((cls.__module__, cls.__name__)) + except AttributeError: + cls_name = getattr(cls, '__name__', None) + if cls_name is None: + cls_name = repr(cls) + return cls_name + + +@util.dependencies("sqlalchemy.orm.base") +def _default_unmapped(base, cls): + try: + mappers = base.manager_of_class(cls).mappers + except NO_STATE: + mappers = {} + except TypeError: + mappers = {} + name = _safe_cls_name(cls) + + if not mappers: + return "Class '%s' is not mapped" % name diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/identity.py b/lib/python3.4/site-packages/sqlalchemy/orm/identity.py new file mode 100644 index 0000000..5646732 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/orm/identity.py @@ -0,0 +1,314 @@ +# orm/identity.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +import weakref +from . import attributes +from .. import util + + +class IdentityMap(object): + def __init__(self): + self._dict = {} + self._modified = set() + self._wr = weakref.ref(self) + + def keys(self): + return self._dict.keys() + + def replace(self, state): + raise NotImplementedError() + + def add(self, state): + raise NotImplementedError() + + def _add_unpresent(self, state, key): + """optional inlined form of add() which can assume item isn't present + in the map""" + self.add(state) + + def update(self, dict): + raise NotImplementedError("IdentityMap uses add() to insert data") + + def clear(self): + raise NotImplementedError("IdentityMap uses remove() to remove data") + + def _manage_incoming_state(self, state): + state._instance_dict = self._wr + + if state.modified: + self._modified.add(state) + + def _manage_removed_state(self, state): + del state._instance_dict + if state.modified: + self._modified.discard(state) + + def _dirty_states(self): + return self._modified + + def check_modified(self): + """return True if any InstanceStates present have been marked + as 'modified'. + + """ + return bool(self._modified) + + def has_key(self, key): + return key in self + + def popitem(self): + raise NotImplementedError("IdentityMap uses remove() to remove data") + + def pop(self, key, *args): + raise NotImplementedError("IdentityMap uses remove() to remove data") + + def setdefault(self, key, default=None): + raise NotImplementedError("IdentityMap uses add() to insert data") + + def __len__(self): + return len(self._dict) + + def copy(self): + raise NotImplementedError() + + def __setitem__(self, key, value): + raise NotImplementedError("IdentityMap uses add() to insert data") + + def __delitem__(self, key): + raise NotImplementedError("IdentityMap uses remove() to remove data") + + +class WeakInstanceDict(IdentityMap): + + def __getitem__(self, key): + state = self._dict[key] + o = state.obj() + if o is None: + raise KeyError(key) + return o + + def __contains__(self, key): + try: + if key in self._dict: + state = self._dict[key] + o = state.obj() + else: + return False + except KeyError: + return False + else: + return o is not None + + def contains_state(self, state): + return state.key in self._dict and self._dict[state.key] is state + + def replace(self, state): + if state.key in self._dict: + existing = self._dict[state.key] + if existing is not state: + self._manage_removed_state(existing) + else: + return + + self._dict[state.key] = state + self._manage_incoming_state(state) + + def add(self, state): + key = state.key + # inline of self.__contains__ + if key in self._dict: + try: + existing_state = self._dict[key] + if existing_state is not state: + o = existing_state.obj() + if o is not None: + raise AssertionError( + "A conflicting state is already " + "present in the identity map for key %r" + % (key, )) + else: + return + except KeyError: + pass + self._dict[key] = state + self._manage_incoming_state(state) + + def _add_unpresent(self, state, key): + # inlined form of add() called by loading.py + self._dict[key] = state + state._instance_dict = self._wr + + def get(self, key, default=None): + if key not in self._dict: + return default + state = self._dict[key] + o = state.obj() + if o is None: + return default + return o + + def items(self): + values = self.all_states() + result = [] + for state in values: + value = state.obj() + if value is not None: + result.append((state.key, value)) + return result + + def values(self): + values = self.all_states() + result = [] + for state in values: + value = state.obj() + if value is not None: + result.append(value) + + return result + + def __iter__(self): + return iter(self.keys()) + + if util.py2k: + + def iteritems(self): + return iter(self.items()) + + def itervalues(self): + return iter(self.values()) + + def all_states(self): + if util.py2k: + return self._dict.values() + else: + return list(self._dict.values()) + + def _fast_discard(self, state): + self._dict.pop(state.key, None) + + def discard(self, state): + st = self._dict.pop(state.key, None) + if st: + assert st is state + self._manage_removed_state(state) + + def safe_discard(self, state): + if state.key in self._dict: + st = self._dict[state.key] + if st is state: + self._dict.pop(state.key, None) + self._manage_removed_state(state) + + def prune(self): + return 0 + + +class StrongInstanceDict(IdentityMap): + """A 'strong-referencing' version of the identity map. + + .. deprecated:: this object is present in order to fulfill + the ``weak_identity_map=False`` option of the Session. + This option is present to allow compatibility with older applications, + but it is recommended that strong references to objects + be maintained by the calling application + externally to the :class:`.Session` itself, to the degree + that is needed by the application. + + """ + + if util.py2k: + def itervalues(self): + return self._dict.itervalues() + + def iteritems(self): + return self._dict.iteritems() + + def __iter__(self): + return iter(self.dict_) + + def __getitem__(self, key): + return self._dict[key] + + def __contains__(self, key): + return key in self._dict + + def get(self, key, default=None): + return self._dict.get(key, default) + + def values(self): + return self._dict.values() + + def items(self): + return self._dict.items() + + def all_states(self): + return [attributes.instance_state(o) for o in self.values()] + + def contains_state(self, state): + return ( + state.key in self and + attributes.instance_state(self[state.key]) is state) + + def replace(self, state): + if state.key in self._dict: + existing = self._dict[state.key] + existing = attributes.instance_state(existing) + if existing is not state: + self._manage_removed_state(existing) + else: + return + + self._dict[state.key] = state.obj() + self._manage_incoming_state(state) + + def add(self, state): + if state.key in self: + if attributes.instance_state(self._dict[state.key]) is not state: + raise AssertionError('A conflicting state is already ' + 'present in the identity map for key %r' + % (state.key, )) + else: + self._dict[state.key] = state.obj() + self._manage_incoming_state(state) + + def _add_unpresent(self, state, key): + # inlined form of add() called by loading.py + self._dict[key] = state.obj() + state._instance_dict = self._wr + + def _fast_discard(self, state): + self._dict.pop(state.key, None) + + def discard(self, state): + obj = self._dict.pop(state.key, None) + if obj is not None: + self._manage_removed_state(state) + st = attributes.instance_state(obj) + assert st is state + + def safe_discard(self, state): + if state.key in self._dict: + obj = self._dict[state.key] + st = attributes.instance_state(obj) + if st is state: + self._dict.pop(state.key, None) + self._manage_removed_state(state) + + def prune(self): + """prune unreferenced, non-dirty states.""" + + ref_count = len(self) + dirty = [s.obj() for s in self.all_states() if s.modified] + + # work around http://bugs.python.org/issue6149 + keepers = weakref.WeakValueDictionary() + keepers.update(self) + + self._dict.clear() + self._dict.update(keepers) + self.modified = bool(dirty) + return ref_count - len(self) diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/instrumentation.py b/lib/python3.4/site-packages/sqlalchemy/orm/instrumentation.py new file mode 100644 index 0000000..d41ee59 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/orm/instrumentation.py @@ -0,0 +1,528 @@ +# orm/instrumentation.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Defines SQLAlchemy's system of class instrumentation. + +This module is usually not directly visible to user applications, but +defines a large part of the ORM's interactivity. + +instrumentation.py deals with registration of end-user classes +for state tracking. It interacts closely with state.py +and attributes.py which establish per-instance and per-class-attribute +instrumentation, respectively. + +The class instrumentation system can be customized on a per-class +or global basis using the :mod:`sqlalchemy.ext.instrumentation` +module, which provides the means to build and specify +alternate instrumentation forms. + +.. versionchanged: 0.8 + The instrumentation extension system was moved out of the + ORM and into the external :mod:`sqlalchemy.ext.instrumentation` + package. When that package is imported, it installs + itself within sqlalchemy.orm so that its more comprehensive + resolution mechanics take effect. + +""" + + +from . import exc, collections, interfaces, state +from .. import util +from . import base + + +_memoized_key_collection = util.group_expirable_memoized_property() + + +class ClassManager(dict): + """tracks state information at the class level.""" + + MANAGER_ATTR = base.DEFAULT_MANAGER_ATTR + STATE_ATTR = base.DEFAULT_STATE_ATTR + + _state_setter = staticmethod(util.attrsetter(STATE_ATTR)) + + deferred_scalar_loader = None + + original_init = object.__init__ + + factory = None + + def __init__(self, class_): + self.class_ = class_ + self.info = {} + self.new_init = None + self.local_attrs = {} + self.originals = {} + + self._bases = [mgr for mgr in [ + manager_of_class(base) + for base in self.class_.__bases__ + if isinstance(base, type) + ] if mgr is not None] + + for base in self._bases: + self.update(base) + + self.dispatch._events._new_classmanager_instance(class_, self) + # events._InstanceEventsHold.populate(class_, self) + + for basecls in class_.__mro__: + mgr = manager_of_class(basecls) + if mgr is not None: + self.dispatch._update(mgr.dispatch) + self.manage() + self._instrument_init() + + if '__del__' in class_.__dict__: + util.warn("__del__() method on class %s will " + "cause unreachable cycles and memory leaks, " + "as SQLAlchemy instrumentation often creates " + "reference cycles. Please remove this method." % + class_) + + def __hash__(self): + return id(self) + + def __eq__(self, other): + return other is self + + @property + def is_mapped(self): + return 'mapper' in self.__dict__ + + @_memoized_key_collection + def _all_key_set(self): + return frozenset(self) + + @_memoized_key_collection + def _collection_impl_keys(self): + return frozenset([ + attr.key for attr in self.values() if attr.impl.collection]) + + @_memoized_key_collection + def _scalar_loader_impls(self): + return frozenset([ + attr.impl for attr in + self.values() if attr.impl.accepts_scalar_loader]) + + @util.memoized_property + def mapper(self): + # raises unless self.mapper has been assigned + raise exc.UnmappedClassError(self.class_) + + def _all_sqla_attributes(self, exclude=None): + """return an iterator of all classbound attributes that are + implement :class:`.InspectionAttr`. + + This includes :class:`.QueryableAttribute` as well as extension + types such as :class:`.hybrid_property` and + :class:`.AssociationProxy`. + + """ + if exclude is None: + exclude = set() + for supercls in self.class_.__mro__: + for key in set(supercls.__dict__).difference(exclude): + exclude.add(key) + val = supercls.__dict__[key] + if isinstance(val, interfaces.InspectionAttr): + yield key, val + + def _attr_has_impl(self, key): + """Return True if the given attribute is fully initialized. + + i.e. has an impl. + """ + + return key in self and self[key].impl is not None + + def _subclass_manager(self, cls): + """Create a new ClassManager for a subclass of this ClassManager's + class. + + This is called automatically when attributes are instrumented so that + the attributes can be propagated to subclasses against their own + class-local manager, without the need for mappers etc. to have already + pre-configured managers for the full class hierarchy. Mappers + can post-configure the auto-generated ClassManager when needed. + + """ + manager = manager_of_class(cls) + if manager is None: + manager = _instrumentation_factory.create_manager_for_cls(cls) + return manager + + def _instrument_init(self): + # TODO: self.class_.__init__ is often the already-instrumented + # __init__ from an instrumented superclass. We still need to make + # our own wrapper, but it would + # be nice to wrap the original __init__ and not our existing wrapper + # of such, since this adds method overhead. + self.original_init = self.class_.__init__ + self.new_init = _generate_init(self.class_, self) + self.install_member('__init__', self.new_init) + + def _uninstrument_init(self): + if self.new_init: + self.uninstall_member('__init__') + self.new_init = None + + @util.memoized_property + def _state_constructor(self): + self.dispatch.first_init(self, self.class_) + return state.InstanceState + + def manage(self): + """Mark this instance as the manager for its class.""" + + setattr(self.class_, self.MANAGER_ATTR, self) + + def dispose(self): + """Dissasociate this manager from its class.""" + + delattr(self.class_, self.MANAGER_ATTR) + + @util.hybridmethod + def manager_getter(self): + return _default_manager_getter + + @util.hybridmethod + def state_getter(self): + """Return a (instance) -> InstanceState callable. + + "state getter" callables should raise either KeyError or + AttributeError if no InstanceState could be found for the + instance. + """ + + return _default_state_getter + + @util.hybridmethod + def dict_getter(self): + return _default_dict_getter + + def instrument_attribute(self, key, inst, propagated=False): + if propagated: + if key in self.local_attrs: + return # don't override local attr with inherited attr + else: + self.local_attrs[key] = inst + self.install_descriptor(key, inst) + _memoized_key_collection.expire_instance(self) + self[key] = inst + + for cls in self.class_.__subclasses__(): + manager = self._subclass_manager(cls) + manager.instrument_attribute(key, inst, True) + + def subclass_managers(self, recursive): + for cls in self.class_.__subclasses__(): + mgr = manager_of_class(cls) + if mgr is not None and mgr is not self: + yield mgr + if recursive: + for m in mgr.subclass_managers(True): + yield m + + def post_configure_attribute(self, key): + _instrumentation_factory.dispatch.\ + attribute_instrument(self.class_, key, self[key]) + + def uninstrument_attribute(self, key, propagated=False): + if key not in self: + return + if propagated: + if key in self.local_attrs: + return # don't get rid of local attr + else: + del self.local_attrs[key] + self.uninstall_descriptor(key) + _memoized_key_collection.expire_instance(self) + del self[key] + for cls in self.class_.__subclasses__(): + manager = manager_of_class(cls) + if manager: + manager.uninstrument_attribute(key, True) + + def unregister(self): + """remove all instrumentation established by this ClassManager.""" + + self._uninstrument_init() + + self.mapper = self.dispatch = None + self.info.clear() + + for key in list(self): + if key in self.local_attrs: + self.uninstrument_attribute(key) + + def install_descriptor(self, key, inst): + if key in (self.STATE_ATTR, self.MANAGER_ATTR): + raise KeyError("%r: requested attribute name conflicts with " + "instrumentation attribute of the same name." % + key) + setattr(self.class_, key, inst) + + def uninstall_descriptor(self, key): + delattr(self.class_, key) + + def install_member(self, key, implementation): + if key in (self.STATE_ATTR, self.MANAGER_ATTR): + raise KeyError("%r: requested attribute name conflicts with " + "instrumentation attribute of the same name." % + key) + self.originals.setdefault(key, getattr(self.class_, key, None)) + setattr(self.class_, key, implementation) + + def uninstall_member(self, key): + original = self.originals.pop(key, None) + if original is not None: + setattr(self.class_, key, original) + + def instrument_collection_class(self, key, collection_class): + return collections.prepare_instrumentation(collection_class) + + def initialize_collection(self, key, state, factory): + user_data = factory() + adapter = collections.CollectionAdapter( + self.get_impl(key), state, user_data) + return adapter, user_data + + def is_instrumented(self, key, search=False): + if search: + return key in self + else: + return key in self.local_attrs + + def get_impl(self, key): + return self[key].impl + + @property + def attributes(self): + return iter(self.values()) + + # InstanceState management + + def new_instance(self, state=None): + instance = self.class_.__new__(self.class_) + if state is None: + state = self._state_constructor(instance, self) + self._state_setter(instance, state) + return instance + + def setup_instance(self, instance, state=None): + if state is None: + state = self._state_constructor(instance, self) + self._state_setter(instance, state) + + def teardown_instance(self, instance): + delattr(instance, self.STATE_ATTR) + + def _serialize(self, state, state_dict): + return _SerializeManager(state, state_dict) + + def _new_state_if_none(self, instance): + """Install a default InstanceState if none is present. + + A private convenience method used by the __init__ decorator. + + """ + if hasattr(instance, self.STATE_ATTR): + return False + elif self.class_ is not instance.__class__ and \ + self.is_mapped: + # this will create a new ClassManager for the + # subclass, without a mapper. This is likely a + # user error situation but allow the object + # to be constructed, so that it is usable + # in a non-ORM context at least. + return self._subclass_manager(instance.__class__).\ + _new_state_if_none(instance) + else: + state = self._state_constructor(instance, self) + self._state_setter(instance, state) + return state + + def has_state(self, instance): + return hasattr(instance, self.STATE_ATTR) + + def has_parent(self, state, key, optimistic=False): + """TODO""" + return self.get_impl(key).hasparent(state, optimistic=optimistic) + + def __bool__(self): + """All ClassManagers are non-zero regardless of attribute state.""" + return True + + __nonzero__ = __bool__ + + def __repr__(self): + return '<%s of %r at %x>' % ( + self.__class__.__name__, self.class_, id(self)) + + +class _SerializeManager(object): + """Provide serialization of a :class:`.ClassManager`. + + The :class:`.InstanceState` uses ``__init__()`` on serialize + and ``__call__()`` on deserialize. + + """ + + def __init__(self, state, d): + self.class_ = state.class_ + manager = state.manager + manager.dispatch.pickle(state, d) + + def __call__(self, state, inst, state_dict): + state.manager = manager = manager_of_class(self.class_) + if manager is None: + raise exc.UnmappedInstanceError( + inst, + "Cannot deserialize object of type %r - " + "no mapper() has " + "been configured for this class within the current " + "Python process!" % + self.class_) + elif manager.is_mapped and not manager.mapper.configured: + manager.mapper._configure_all() + + # setup _sa_instance_state ahead of time so that + # unpickle events can access the object normally. + # see [ticket:2362] + if inst is not None: + manager.setup_instance(inst, state) + manager.dispatch.unpickle(state, state_dict) + + +class InstrumentationFactory(object): + """Factory for new ClassManager instances.""" + + def create_manager_for_cls(self, class_): + assert class_ is not None + assert manager_of_class(class_) is None + + # give a more complicated subclass + # a chance to do what it wants here + manager, factory = self._locate_extended_factory(class_) + + if factory is None: + factory = ClassManager + manager = factory(class_) + + self._check_conflicts(class_, factory) + + manager.factory = factory + + self.dispatch.class_instrument(class_) + return manager + + def _locate_extended_factory(self, class_): + """Overridden by a subclass to do an extended lookup.""" + return None, None + + def _check_conflicts(self, class_, factory): + """Overridden by a subclass to test for conflicting factories.""" + return + + def unregister(self, class_): + manager = manager_of_class(class_) + manager.unregister() + manager.dispose() + self.dispatch.class_uninstrument(class_) + if ClassManager.MANAGER_ATTR in class_.__dict__: + delattr(class_, ClassManager.MANAGER_ATTR) + +# this attribute is replaced by sqlalchemy.ext.instrumentation +# when importred. +_instrumentation_factory = InstrumentationFactory() + +# these attributes are replaced by sqlalchemy.ext.instrumentation +# when a non-standard InstrumentationManager class is first +# used to instrument a class. +instance_state = _default_state_getter = base.instance_state + +instance_dict = _default_dict_getter = base.instance_dict + +manager_of_class = _default_manager_getter = base.manager_of_class + + +def register_class(class_): + """Register class instrumentation. + + Returns the existing or newly created class manager. + + """ + + manager = manager_of_class(class_) + if manager is None: + manager = _instrumentation_factory.create_manager_for_cls(class_) + return manager + + +def unregister_class(class_): + """Unregister class instrumentation.""" + + _instrumentation_factory.unregister(class_) + + +def is_instrumented(instance, key): + """Return True if the given attribute on the given instance is + instrumented by the attributes package. + + This function may be used regardless of instrumentation + applied directly to the class, i.e. no descriptors are required. + + """ + return manager_of_class(instance.__class__).\ + is_instrumented(key, search=True) + + +def _generate_init(class_, class_manager): + """Build an __init__ decorator that triggers ClassManager events.""" + + # TODO: we should use the ClassManager's notion of the + # original '__init__' method, once ClassManager is fixed + # to always reference that. + original__init__ = class_.__init__ + assert original__init__ + + # Go through some effort here and don't change the user's __init__ + # calling signature, including the unlikely case that it has + # a return value. + # FIXME: need to juggle local names to avoid constructor argument + # clashes. + func_body = """\ +def __init__(%(apply_pos)s): + new_state = class_manager._new_state_if_none(%(self_arg)s) + if new_state: + return new_state._initialize_instance(%(apply_kw)s) + else: + return original__init__(%(apply_kw)s) +""" + func_vars = util.format_argspec_init(original__init__, grouped=False) + func_text = func_body % func_vars + + if util.py2k: + func = getattr(original__init__, 'im_func', original__init__) + func_defaults = getattr(func, 'func_defaults', None) + else: + func_defaults = getattr(original__init__, '__defaults__', None) + func_kw_defaults = getattr(original__init__, '__kwdefaults__', None) + + env = locals().copy() + exec(func_text, env) + __init__ = env['__init__'] + __init__.__doc__ = original__init__.__doc__ + + if func_defaults: + __init__.__defaults__ = func_defaults + if not util.py2k and func_kw_defaults: + __init__.__kwdefaults__ = func_kw_defaults + + return __init__ diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/interfaces.py b/lib/python3.4/site-packages/sqlalchemy/orm/interfaces.py new file mode 100644 index 0000000..2ff00ae --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/orm/interfaces.py @@ -0,0 +1,640 @@ +# orm/interfaces.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" + +Contains various base classes used throughout the ORM. + +Defines some key base classes prominent within the internals, +as well as the now-deprecated ORM extension classes. + +Other than the deprecated extensions, this module and the +classes within are mostly private, though some attributes +are exposed when inspecting mappings. + +""" + +from __future__ import absolute_import + +from .. import util +from ..sql import operators +from .base import (ONETOMANY, MANYTOONE, MANYTOMANY, + EXT_CONTINUE, EXT_STOP, NOT_EXTENSION) +from .base import (InspectionAttr, InspectionAttr, + InspectionAttrInfo, _MappedAttribute) +import collections +from .. import inspect + +# imported later +MapperExtension = SessionExtension = AttributeExtension = None + +__all__ = ( + 'AttributeExtension', + 'EXT_CONTINUE', + 'EXT_STOP', + 'ONETOMANY', + 'MANYTOMANY', + 'MANYTOONE', + 'NOT_EXTENSION', + 'LoaderStrategy', + 'MapperExtension', + 'MapperOption', + 'MapperProperty', + 'PropComparator', + 'SessionExtension', + 'StrategizedProperty', +) + + +class MapperProperty(_MappedAttribute, InspectionAttr, util.MemoizedSlots): + """Represent a particular class attribute mapped by :class:`.Mapper`. + + The most common occurrences of :class:`.MapperProperty` are the + mapped :class:`.Column`, which is represented in a mapping as + an instance of :class:`.ColumnProperty`, + and a reference to another class produced by :func:`.relationship`, + represented in the mapping as an instance of + :class:`.RelationshipProperty`. + + """ + + __slots__ = ( + '_configure_started', '_configure_finished', 'parent', 'key', + 'info' + ) + + cascade = frozenset() + """The set of 'cascade' attribute names. + + This collection is checked before the 'cascade_iterator' method is called. + + The collection typically only applies to a RelationshipProperty. + + """ + + is_property = True + """Part of the InspectionAttr interface; states this object is a + mapper property. + + """ + + def _memoized_attr_info(self): + """Info dictionary associated with the object, allowing user-defined + data to be associated with this :class:`.InspectionAttr`. + + The dictionary is generated when first accessed. Alternatively, + it can be specified as a constructor argument to the + :func:`.column_property`, :func:`.relationship`, or :func:`.composite` + functions. + + .. versionadded:: 0.8 Added support for .info to all + :class:`.MapperProperty` subclasses. + + .. versionchanged:: 1.0.0 :attr:`.MapperProperty.info` is also + available on extension types via the + :attr:`.InspectionAttrInfo.info` attribute, so that it can apply + to a wider variety of ORM and extension constructs. + + .. seealso:: + + :attr:`.QueryableAttribute.info` + + :attr:`.SchemaItem.info` + + """ + return {} + + def setup(self, context, entity, path, adapter, **kwargs): + """Called by Query for the purposes of constructing a SQL statement. + + Each MapperProperty associated with the target mapper processes the + statement referenced by the query context, adding columns and/or + criterion as appropriate. + + """ + + def create_row_processor(self, context, path, + mapper, result, adapter, populators): + """Produce row processing functions and append to the given + set of populators lists. + + """ + + def cascade_iterator(self, type_, state, visited_instances=None, + halt_on=None): + """Iterate through instances related to the given instance for + a particular 'cascade', starting with this MapperProperty. + + Return an iterator3-tuples (instance, mapper, state). + + Note that the 'cascade' collection on this MapperProperty is + checked first for the given type before cascade_iterator is called. + + This method typically only applies to RelationshipProperty. + + """ + + return iter(()) + + def set_parent(self, parent, init): + """Set the parent mapper that references this MapperProperty. + + This method is overridden by some subclasses to perform extra + setup when the mapper is first known. + + """ + self.parent = parent + + def instrument_class(self, mapper): + """Hook called by the Mapper to the property to initiate + instrumentation of the class attribute managed by this + MapperProperty. + + The MapperProperty here will typically call out to the + attributes module to set up an InstrumentedAttribute. + + This step is the first of two steps to set up an InstrumentedAttribute, + and is called early in the mapper setup process. + + The second step is typically the init_class_attribute step, + called from StrategizedProperty via the post_instrument_class() + hook. This step assigns additional state to the InstrumentedAttribute + (specifically the "impl") which has been determined after the + MapperProperty has determined what kind of persistence + management it needs to do (e.g. scalar, object, collection, etc). + + """ + + def __init__(self): + self._configure_started = False + self._configure_finished = False + + def init(self): + """Called after all mappers are created to assemble + relationships between mappers and perform other post-mapper-creation + initialization steps. + + """ + self._configure_started = True + self.do_init() + self._configure_finished = True + + @property + def class_attribute(self): + """Return the class-bound descriptor corresponding to this + :class:`.MapperProperty`. + + This is basically a ``getattr()`` call:: + + return getattr(self.parent.class_, self.key) + + I.e. if this :class:`.MapperProperty` were named ``addresses``, + and the class to which it is mapped is ``User``, this sequence + is possible:: + + >>> from sqlalchemy import inspect + >>> mapper = inspect(User) + >>> addresses_property = mapper.attrs.addresses + >>> addresses_property.class_attribute is User.addresses + True + >>> User.addresses.property is addresses_property + True + + + """ + + return getattr(self.parent.class_, self.key) + + def do_init(self): + """Perform subclass-specific initialization post-mapper-creation + steps. + + This is a template method called by the ``MapperProperty`` + object's init() method. + + """ + + def post_instrument_class(self, mapper): + """Perform instrumentation adjustments that need to occur + after init() has completed. + + The given Mapper is the Mapper invoking the operation, which + may not be the same Mapper as self.parent in an inheritance + scenario; however, Mapper will always at least be a sub-mapper of + self.parent. + + This method is typically used by StrategizedProperty, which delegates + it to LoaderStrategy.init_class_attribute() to perform final setup + on the class-bound InstrumentedAttribute. + + """ + + def merge(self, session, source_state, source_dict, dest_state, + dest_dict, load, _recursive): + """Merge the attribute represented by this ``MapperProperty`` + from source to destination object. + + """ + + def __repr__(self): + return '<%s at 0x%x; %s>' % ( + self.__class__.__name__, + id(self), getattr(self, 'key', 'no key')) + + +class PropComparator(operators.ColumnOperators): + """Defines SQL operators for :class:`.MapperProperty` objects. + + SQLAlchemy allows for operators to + be redefined at both the Core and ORM level. :class:`.PropComparator` + is the base class of operator redefinition for ORM-level operations, + including those of :class:`.ColumnProperty`, + :class:`.RelationshipProperty`, and :class:`.CompositeProperty`. + + .. note:: With the advent of Hybrid properties introduced in SQLAlchemy + 0.7, as well as Core-level operator redefinition in + SQLAlchemy 0.8, the use case for user-defined :class:`.PropComparator` + instances is extremely rare. See :ref:`hybrids_toplevel` as well + as :ref:`types_operators`. + + User-defined subclasses of :class:`.PropComparator` may be created. The + built-in Python comparison and math operator methods, such as + :meth:`.operators.ColumnOperators.__eq__`, + :meth:`.operators.ColumnOperators.__lt__`, and + :meth:`.operators.ColumnOperators.__add__`, can be overridden to provide + new operator behavior. The custom :class:`.PropComparator` is passed to + the :class:`.MapperProperty` instance via the ``comparator_factory`` + argument. In each case, + the appropriate subclass of :class:`.PropComparator` should be used:: + + # definition of custom PropComparator subclasses + + from sqlalchemy.orm.properties import \\ + ColumnProperty,\\ + CompositeProperty,\\ + RelationshipProperty + + class MyColumnComparator(ColumnProperty.Comparator): + def __eq__(self, other): + return self.__clause_element__() == other + + class MyRelationshipComparator(RelationshipProperty.Comparator): + def any(self, expression): + "define the 'any' operation" + # ... + + class MyCompositeComparator(CompositeProperty.Comparator): + def __gt__(self, other): + "redefine the 'greater than' operation" + + return sql.and_(*[a>b for a, b in + zip(self.__clause_element__().clauses, + other.__composite_values__())]) + + + # application of custom PropComparator subclasses + + from sqlalchemy.orm import column_property, relationship, composite + from sqlalchemy import Column, String + + class SomeMappedClass(Base): + some_column = column_property(Column("some_column", String), + comparator_factory=MyColumnComparator) + + some_relationship = relationship(SomeOtherClass, + comparator_factory=MyRelationshipComparator) + + some_composite = composite( + Column("a", String), Column("b", String), + comparator_factory=MyCompositeComparator + ) + + Note that for column-level operator redefinition, it's usually + simpler to define the operators at the Core level, using the + :attr:`.TypeEngine.comparator_factory` attribute. See + :ref:`types_operators` for more detail. + + See also: + + :class:`.ColumnProperty.Comparator` + + :class:`.RelationshipProperty.Comparator` + + :class:`.CompositeProperty.Comparator` + + :class:`.ColumnOperators` + + :ref:`types_operators` + + :attr:`.TypeEngine.comparator_factory` + + """ + + __slots__ = 'prop', 'property', '_parententity', '_adapt_to_entity' + + def __init__(self, prop, parentmapper, adapt_to_entity=None): + self.prop = self.property = prop + self._parententity = adapt_to_entity or parentmapper + self._adapt_to_entity = adapt_to_entity + + def __clause_element__(self): + raise NotImplementedError("%r" % self) + + def _query_clause_element(self): + return self.__clause_element__() + + def adapt_to_entity(self, adapt_to_entity): + """Return a copy of this PropComparator which will use the given + :class:`.AliasedInsp` to produce corresponding expressions. + """ + return self.__class__(self.prop, self._parententity, adapt_to_entity) + + @property + def _parentmapper(self): + """legacy; this is renamed to _parententity to be + compatible with QueryableAttribute.""" + return inspect(self._parententity).mapper + + @property + def adapter(self): + """Produce a callable that adapts column expressions + to suit an aliased version of this comparator. + + """ + if self._adapt_to_entity is None: + return None + else: + return self._adapt_to_entity._adapt_element + + @property + def info(self): + return self.property.info + + @staticmethod + def any_op(a, b, **kwargs): + return a.any(b, **kwargs) + + @staticmethod + def has_op(a, b, **kwargs): + return a.has(b, **kwargs) + + @staticmethod + def of_type_op(a, class_): + return a.of_type(class_) + + def of_type(self, class_): + """Redefine this object in terms of a polymorphic subclass. + + Returns a new PropComparator from which further criterion can be + evaluated. + + e.g.:: + + query.join(Company.employees.of_type(Engineer)).\\ + filter(Engineer.name=='foo') + + :param \class_: a class or mapper indicating that criterion will be + against this specific subclass. + + + """ + + return self.operate(PropComparator.of_type_op, class_) + + def any(self, criterion=None, **kwargs): + """Return true if this collection contains any member that meets the + given criterion. + + The usual implementation of ``any()`` is + :meth:`.RelationshipProperty.Comparator.any`. + + :param criterion: an optional ClauseElement formulated against the + member class' table or attributes. + + :param \**kwargs: key/value pairs corresponding to member class + attribute names which will be compared via equality to the + corresponding values. + + """ + + return self.operate(PropComparator.any_op, criterion, **kwargs) + + def has(self, criterion=None, **kwargs): + """Return true if this element references a member which meets the + given criterion. + + The usual implementation of ``has()`` is + :meth:`.RelationshipProperty.Comparator.has`. + + :param criterion: an optional ClauseElement formulated against the + member class' table or attributes. + + :param \**kwargs: key/value pairs corresponding to member class + attribute names which will be compared via equality to the + corresponding values. + + """ + + return self.operate(PropComparator.has_op, criterion, **kwargs) + + +class StrategizedProperty(MapperProperty): + """A MapperProperty which uses selectable strategies to affect + loading behavior. + + There is a single strategy selected by default. Alternate + strategies can be selected at Query time through the usage of + ``StrategizedOption`` objects via the Query.options() method. + + The mechanics of StrategizedProperty are used for every Query + invocation for every mapped attribute participating in that Query, + to determine first how the attribute will be rendered in SQL + and secondly how the attribute will retrieve a value from a result + row and apply it to a mapped object. The routines here are very + performance-critical. + + """ + + __slots__ = '_strategies', 'strategy' + + strategy_wildcard_key = None + + def _get_context_loader(self, context, path): + load = None + + # use EntityRegistry.__getitem__()->PropRegistry here so + # that the path is stated in terms of our base + search_path = dict.__getitem__(path, self) + + # search among: exact match, "attr.*", "default" strategy + # if any. + for path_key in ( + search_path._loader_key, + search_path._wildcard_path_loader_key, + search_path._default_path_loader_key + ): + if path_key in context.attributes: + load = context.attributes[path_key] + break + + return load + + def _get_strategy(self, key): + try: + return self._strategies[key] + except KeyError: + cls = self._strategy_lookup(*key) + self._strategies[key] = self._strategies[ + cls] = strategy = cls(self) + return strategy + + def _get_strategy_by_cls(self, cls): + return self._get_strategy(cls._strategy_keys[0]) + + def setup( + self, context, entity, path, adapter, **kwargs): + loader = self._get_context_loader(context, path) + if loader and loader.strategy: + strat = self._get_strategy(loader.strategy) + else: + strat = self.strategy + strat.setup_query(context, entity, path, loader, adapter, **kwargs) + + def create_row_processor( + self, context, path, mapper, + result, adapter, populators): + loader = self._get_context_loader(context, path) + if loader and loader.strategy: + strat = self._get_strategy(loader.strategy) + else: + strat = self.strategy + strat.create_row_processor( + context, path, loader, + mapper, result, adapter, populators) + + def do_init(self): + self._strategies = {} + self.strategy = self._get_strategy_by_cls(self.strategy_class) + + def post_instrument_class(self, mapper): + if not self.parent.non_primary and \ + not mapper.class_manager._attr_has_impl(self.key): + self.strategy.init_class_attribute(mapper) + + _all_strategies = collections.defaultdict(dict) + + @classmethod + def strategy_for(cls, **kw): + def decorate(dec_cls): + # ensure each subclass of the strategy has its + # own _strategy_keys collection + if '_strategy_keys' not in dec_cls.__dict__: + dec_cls._strategy_keys = [] + key = tuple(sorted(kw.items())) + cls._all_strategies[cls][key] = dec_cls + dec_cls._strategy_keys.append(key) + return dec_cls + return decorate + + @classmethod + def _strategy_lookup(cls, *key): + for prop_cls in cls.__mro__: + if prop_cls in cls._all_strategies: + strategies = cls._all_strategies[prop_cls] + try: + return strategies[key] + except KeyError: + pass + raise Exception("can't locate strategy for %s %s" % (cls, key)) + + +class MapperOption(object): + """Describe a modification to a Query.""" + + propagate_to_loaders = False + """if True, indicate this option should be carried along + to "secondary" Query objects produced during lazy loads + or refresh operations. + + """ + + def process_query(self, query): + """Apply a modification to the given :class:`.Query`.""" + + def process_query_conditionally(self, query): + """same as process_query(), except that this option may not + apply to the given query. + + This is typically used during a lazy load or scalar refresh + operation to propagate options stated in the original Query to the + new Query being used for the load. It occurs for those options that + specify propagate_to_loaders=True. + + """ + + self.process_query(query) + + +class LoaderStrategy(object): + """Describe the loading behavior of a StrategizedProperty object. + + The ``LoaderStrategy`` interacts with the querying process in three + ways: + + * it controls the configuration of the ``InstrumentedAttribute`` + placed on a class to handle the behavior of the attribute. this + may involve setting up class-level callable functions to fire + off a select operation when the attribute is first accessed + (i.e. a lazy load) + + * it processes the ``QueryContext`` at statement construction time, + where it can modify the SQL statement that is being produced. + For example, simple column attributes will add their represented + column to the list of selected columns, a joined eager loader + may establish join clauses to add to the statement. + + * It produces "row processor" functions at result fetching time. + These "row processor" functions populate a particular attribute + on a particular mapped instance. + + """ + + __slots__ = 'parent_property', 'is_class_level', 'parent', 'key' + + def __init__(self, parent): + self.parent_property = parent + self.is_class_level = False + self.parent = self.parent_property.parent + self.key = self.parent_property.key + + def init_class_attribute(self, mapper): + pass + + def setup_query(self, context, entity, path, loadopt, adapter, **kwargs): + """Establish column and other state for a given QueryContext. + + This method fulfills the contract specified by MapperProperty.setup(). + + StrategizedProperty delegates its setup() method + directly to this method. + + """ + + def create_row_processor(self, context, path, loadopt, mapper, + result, adapter, populators): + """Establish row processing functions for a given QueryContext. + + This method fulfills the contract specified by + MapperProperty.create_row_processor(). + + StrategizedProperty delegates its create_row_processor() method + directly to this method. + + """ + + def __str__(self): + return str(self.parent_property) diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/loading.py b/lib/python3.4/site-packages/sqlalchemy/orm/loading.py new file mode 100644 index 0000000..d3e719d --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/orm/loading.py @@ -0,0 +1,669 @@ +# orm/loading.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""private module containing functions used to convert database +rows into object instances and associated state. + +the functions here are called primarily by Query, Mapper, +as well as some of the attribute loading strategies. + +""" +from __future__ import absolute_import + +from .. import util +from . import attributes, exc as orm_exc +from ..sql import util as sql_util +from . import strategy_options + +from .util import _none_set, state_str +from .base import _SET_DEFERRED_EXPIRED, _DEFER_FOR_STATE +from .. import exc as sa_exc +import collections + +_new_runid = util.counter() + + +def instances(query, cursor, context): + """Return an ORM result as an iterator.""" + + context.runid = _new_runid() + + filter_fns = [ent.filter_fn for ent in query._entities] + filtered = id in filter_fns + + single_entity = len(query._entities) == 1 and \ + query._entities[0].supports_single_entity + + if filtered: + if single_entity: + filter_fn = id + else: + def filter_fn(row): + return tuple(fn(x) for x, fn in zip(row, filter_fns)) + + try: + (process, labels) = \ + list(zip(*[ + query_entity.row_processor(query, + context, cursor) + for query_entity in query._entities + ])) + + if not single_entity: + keyed_tuple = util.lightweight_named_tuple('result', labels) + + while True: + context.partials = {} + + if query._yield_per: + fetch = cursor.fetchmany(query._yield_per) + if not fetch: + break + else: + fetch = cursor.fetchall() + + if single_entity: + proc = process[0] + rows = [proc(row) for row in fetch] + else: + rows = [keyed_tuple([proc(row) for proc in process]) + for row in fetch] + + if filtered: + rows = util.unique_list(rows, filter_fn) + + for row in rows: + yield row + + if not query._yield_per: + break + except Exception as err: + cursor.close() + util.raise_from_cause(err) + + +@util.dependencies("sqlalchemy.orm.query") +def merge_result(querylib, query, iterator, load=True): + """Merge a result into this :class:`.Query` object's Session.""" + + session = query.session + if load: + # flush current contents if we expect to load data + session._autoflush() + + autoflush = session.autoflush + try: + session.autoflush = False + single_entity = len(query._entities) == 1 + if single_entity: + if isinstance(query._entities[0], querylib._MapperEntity): + result = [session._merge( + attributes.instance_state(instance), + attributes.instance_dict(instance), + load=load, _recursive={}) + for instance in iterator] + else: + result = list(iterator) + else: + mapped_entities = [i for i, e in enumerate(query._entities) + if isinstance(e, querylib._MapperEntity)] + result = [] + keys = [ent._label_name for ent in query._entities] + keyed_tuple = util.lightweight_named_tuple('result', keys) + for row in iterator: + newrow = list(row) + for i in mapped_entities: + if newrow[i] is not None: + newrow[i] = session._merge( + attributes.instance_state(newrow[i]), + attributes.instance_dict(newrow[i]), + load=load, _recursive={}) + result.append(keyed_tuple(newrow)) + + return iter(result) + finally: + session.autoflush = autoflush + + +def get_from_identity(session, key, passive): + """Look up the given key in the given session's identity map, + check the object for expired state if found. + + """ + instance = session.identity_map.get(key) + if instance is not None: + + state = attributes.instance_state(instance) + + # expired - ensure it still exists + if state.expired: + if not passive & attributes.SQL_OK: + # TODO: no coverage here + return attributes.PASSIVE_NO_RESULT + elif not passive & attributes.RELATED_OBJECT_OK: + # this mode is used within a flush and the instance's + # expired state will be checked soon enough, if necessary + return instance + try: + state._load_expired(state, passive) + except orm_exc.ObjectDeletedError: + session._remove_newly_deleted([state]) + return None + return instance + else: + return None + + +def load_on_ident(query, key, + refresh_state=None, lockmode=None, + only_load_props=None): + """Load the given identity key from the database.""" + + if key is not None: + ident = key[1] + else: + ident = None + + if refresh_state is None: + q = query._clone() + q._get_condition() + else: + q = query._clone() + + if ident is not None: + mapper = query._mapper_zero() + + (_get_clause, _get_params) = mapper._get_clause + + # None present in ident - turn those comparisons + # into "IS NULL" + if None in ident: + nones = set([ + _get_params[col].key for col, value in + zip(mapper.primary_key, ident) if value is None + ]) + _get_clause = sql_util.adapt_criterion_to_null( + _get_clause, nones) + + _get_clause = q._adapt_clause(_get_clause, True, False) + q._criterion = _get_clause + + params = dict([ + (_get_params[primary_key].key, id_val) + for id_val, primary_key in zip(ident, mapper.primary_key) + ]) + + q._params = params + + if lockmode is not None: + version_check = True + q = q.with_lockmode(lockmode) + elif query._for_update_arg is not None: + version_check = True + q._for_update_arg = query._for_update_arg + else: + version_check = False + + q._get_options( + populate_existing=bool(refresh_state), + version_check=version_check, + only_load_props=only_load_props, + refresh_state=refresh_state) + q._order_by = None + + try: + return q.one() + except orm_exc.NoResultFound: + return None + + +def _setup_entity_query( + context, mapper, query_entity, + path, adapter, column_collection, + with_polymorphic=None, only_load_props=None, + polymorphic_discriminator=None, **kw): + + if with_polymorphic: + poly_properties = mapper._iterate_polymorphic_properties( + with_polymorphic) + else: + poly_properties = mapper._polymorphic_properties + + quick_populators = {} + + path.set( + context.attributes, + "memoized_setups", + quick_populators) + + for value in poly_properties: + if only_load_props and \ + value.key not in only_load_props: + continue + value.setup( + context, + query_entity, + path, + adapter, + only_load_props=only_load_props, + column_collection=column_collection, + memoized_populators=quick_populators, + **kw + ) + + if polymorphic_discriminator is not None and \ + polymorphic_discriminator \ + is not mapper.polymorphic_on: + + if adapter: + pd = adapter.columns[polymorphic_discriminator] + else: + pd = polymorphic_discriminator + column_collection.append(pd) + + +def _instance_processor( + mapper, context, result, path, adapter, + only_load_props=None, refresh_state=None, + polymorphic_discriminator=None, + _polymorphic_from=None): + """Produce a mapper level row processor callable + which processes rows into mapped instances.""" + + # note that this method, most of which exists in a closure + # called _instance(), resists being broken out, as + # attempts to do so tend to add significant function + # call overhead. _instance() is the most + # performance-critical section in the whole ORM. + + pk_cols = mapper.primary_key + + if adapter: + pk_cols = [adapter.columns[c] for c in pk_cols] + + identity_class = mapper._identity_class + + populators = collections.defaultdict(list) + + props = mapper._prop_set + if only_load_props is not None: + props = props.intersection( + mapper._props[k] for k in only_load_props) + + quick_populators = path.get( + context.attributes, "memoized_setups", _none_set) + + for prop in props: + if prop in quick_populators: + # this is an inlined path just for column-based attributes. + col = quick_populators[prop] + if col is _DEFER_FOR_STATE: + populators["new"].append( + (prop.key, prop._deferred_column_loader)) + elif col is _SET_DEFERRED_EXPIRED: + # note that in this path, we are no longer + # searching in the result to see if the column might + # be present in some unexpected way. + populators["expire"].append((prop.key, False)) + else: + if adapter: + col = adapter.columns[col] + getter = result._getter(col) + if getter: + populators["quick"].append((prop.key, getter)) + else: + # fall back to the ColumnProperty itself, which + # will iterate through all of its columns + # to see if one fits + prop.create_row_processor( + context, path, mapper, result, adapter, populators) + else: + prop.create_row_processor( + context, path, mapper, result, adapter, populators) + + propagate_options = context.propagate_options + if propagate_options: + load_path = context.query._current_path + path \ + if context.query._current_path.path else path + + session_identity_map = context.session.identity_map + + populate_existing = context.populate_existing or mapper.always_refresh + load_evt = bool(mapper.class_manager.dispatch.load) + refresh_evt = bool(mapper.class_manager.dispatch.refresh) + instance_state = attributes.instance_state + instance_dict = attributes.instance_dict + session_id = context.session.hash_key + version_check = context.version_check + runid = context.runid + + if refresh_state: + refresh_identity_key = refresh_state.key + if refresh_identity_key is None: + # super-rare condition; a refresh is being called + # on a non-instance-key instance; this is meant to only + # occur within a flush() + refresh_identity_key = \ + mapper._identity_key_from_state(refresh_state) + else: + refresh_identity_key = None + + if mapper.allow_partial_pks: + is_not_primary_key = _none_set.issuperset + else: + is_not_primary_key = _none_set.intersection + + def _instance(row): + + # determine the state that we'll be populating + if refresh_identity_key: + # fixed state that we're refreshing + state = refresh_state + instance = state.obj() + dict_ = instance_dict(instance) + isnew = state.runid != runid + currentload = True + loaded_instance = False + else: + # look at the row, see if that identity is in the + # session, or we have to create a new one + identitykey = ( + identity_class, + tuple([row[column] for column in pk_cols]) + ) + + instance = session_identity_map.get(identitykey) + + if instance is not None: + # existing instance + state = instance_state(instance) + dict_ = instance_dict(instance) + + isnew = state.runid != runid + currentload = not isnew + loaded_instance = False + + if version_check and not currentload: + _validate_version_id(mapper, state, dict_, row, adapter) + + else: + # create a new instance + + # check for non-NULL values in the primary key columns, + # else no entity is returned for the row + if is_not_primary_key(identitykey[1]): + return None + + isnew = True + currentload = True + loaded_instance = True + + instance = mapper.class_manager.new_instance() + + dict_ = instance_dict(instance) + state = instance_state(instance) + state.key = identitykey + + # attach instance to session. + state.session_id = session_id + session_identity_map._add_unpresent(state, identitykey) + + # populate. this looks at whether this state is new + # for this load or was existing, and whether or not this + # row is the first row with this identity. + if currentload or populate_existing: + # full population routines. Objects here are either + # just created, or we are doing a populate_existing + + if isnew and propagate_options: + state.load_options = propagate_options + state.load_path = load_path + + _populate_full( + context, row, state, dict_, isnew, + loaded_instance, populate_existing, populators) + + if isnew: + if loaded_instance and load_evt: + state.manager.dispatch.load(state, context) + elif refresh_evt: + state.manager.dispatch.refresh( + state, context, only_load_props) + + if populate_existing or state.modified: + if refresh_state and only_load_props: + state._commit(dict_, only_load_props) + else: + state._commit_all(dict_, session_identity_map) + + else: + # partial population routines, for objects that were already + # in the Session, but a row matches them; apply eager loaders + # on existing objects, etc. + unloaded = state.unloaded + isnew = state not in context.partials + + if not isnew or unloaded or populators["eager"]: + # state is having a partial set of its attributes + # refreshed. Populate those attributes, + # and add to the "context.partials" collection. + + to_load = _populate_partial( + context, row, state, dict_, isnew, + unloaded, populators) + + if isnew: + if refresh_evt: + state.manager.dispatch.refresh( + state, context, to_load) + + state._commit(dict_, to_load) + + return instance + + if mapper.polymorphic_map and not _polymorphic_from and not refresh_state: + # if we are doing polymorphic, dispatch to a different _instance() + # method specific to the subclass mapper + _instance = _decorate_polymorphic_switch( + _instance, context, mapper, result, path, + polymorphic_discriminator, adapter) + + return _instance + + +def _populate_full( + context, row, state, dict_, isnew, + loaded_instance, populate_existing, populators): + if isnew: + # first time we are seeing a row with this identity. + state.runid = context.runid + + for key, getter in populators["quick"]: + dict_[key] = getter(row) + if populate_existing: + for key, set_callable in populators["expire"]: + dict_.pop(key, None) + if set_callable: + state.expired_attributes.add(key) + else: + for key, set_callable in populators["expire"]: + if set_callable: + state.expired_attributes.add(key) + for key, populator in populators["new"]: + populator(state, dict_, row) + for key, populator in populators["delayed"]: + populator(state, dict_, row) + else: + # have already seen rows with this identity. + for key, populator in populators["existing"]: + populator(state, dict_, row) + + +def _populate_partial( + context, row, state, dict_, isnew, + unloaded, populators): + if not isnew: + to_load = context.partials[state] + for key, populator in populators["existing"]: + if key in to_load: + populator(state, dict_, row) + else: + to_load = unloaded + context.partials[state] = to_load + + for key, getter in populators["quick"]: + if key in to_load: + dict_[key] = getter(row) + for key, set_callable in populators["expire"]: + if key in to_load: + dict_.pop(key, None) + if set_callable: + state.expired_attributes.add(key) + for key, populator in populators["new"]: + if key in to_load: + populator(state, dict_, row) + for key, populator in populators["delayed"]: + if key in to_load: + populator(state, dict_, row) + for key, populator in populators["eager"]: + if key not in unloaded: + populator(state, dict_, row) + + return to_load + + +def _validate_version_id(mapper, state, dict_, row, adapter): + + version_id_col = mapper.version_id_col + + if version_id_col is None: + return + + if adapter: + version_id_col = adapter.columns[version_id_col] + + if mapper._get_state_attr_by_column( + state, dict_, mapper.version_id_col) != row[version_id_col]: + raise orm_exc.StaleDataError( + "Instance '%s' has version id '%s' which " + "does not match database-loaded version id '%s'." + % (state_str(state), mapper._get_state_attr_by_column( + state, dict_, mapper.version_id_col), + row[version_id_col])) + + +def _decorate_polymorphic_switch( + instance_fn, context, mapper, result, path, + polymorphic_discriminator, adapter): + if polymorphic_discriminator is not None: + polymorphic_on = polymorphic_discriminator + else: + polymorphic_on = mapper.polymorphic_on + if polymorphic_on is None: + return instance_fn + + if adapter: + polymorphic_on = adapter.columns[polymorphic_on] + + def configure_subclass_mapper(discriminator): + try: + sub_mapper = mapper.polymorphic_map[discriminator] + except KeyError: + raise AssertionError( + "No such polymorphic_identity %r is defined" % + discriminator) + else: + if sub_mapper is mapper: + return None + + return _instance_processor( + sub_mapper, context, result, + path, adapter, _polymorphic_from=mapper) + + polymorphic_instances = util.PopulateDict( + configure_subclass_mapper + ) + + def polymorphic_instance(row): + discriminator = row[polymorphic_on] + if discriminator is not None: + _instance = polymorphic_instances[discriminator] + if _instance: + return _instance(row) + return instance_fn(row) + return polymorphic_instance + + +def load_scalar_attributes(mapper, state, attribute_names): + """initiate a column-based attribute refresh operation.""" + + # assert mapper is _state_mapper(state) + session = state.session + if not session: + raise orm_exc.DetachedInstanceError( + "Instance %s is not bound to a Session; " + "attribute refresh operation cannot proceed" % + (state_str(state))) + + has_key = bool(state.key) + + result = False + + if mapper.inherits and not mapper.concrete: + # because we are using Core to produce a select() that we + # pass to the Query, we aren't calling setup() for mapped + # attributes; in 1.0 this means deferred attrs won't get loaded + # by default + statement = mapper._optimized_get_statement(state, attribute_names) + if statement is not None: + result = load_on_ident( + session.query(mapper). + options( + strategy_options.Load(mapper).undefer("*") + ).from_statement(statement), + None, + only_load_props=attribute_names, + refresh_state=state + ) + + if result is False: + if has_key: + identity_key = state.key + else: + # this codepath is rare - only valid when inside a flush, and the + # object is becoming persistent but hasn't yet been assigned + # an identity_key. + # check here to ensure we have the attrs we need. + pk_attrs = [mapper._columntoproperty[col].key + for col in mapper.primary_key] + if state.expired_attributes.intersection(pk_attrs): + raise sa_exc.InvalidRequestError( + "Instance %s cannot be refreshed - it's not " + " persistent and does not " + "contain a full primary key." % state_str(state)) + identity_key = mapper._identity_key_from_state(state) + + if (_none_set.issubset(identity_key) and + not mapper.allow_partial_pks) or \ + _none_set.issuperset(identity_key): + util.warn_limited( + "Instance %s to be refreshed doesn't " + "contain a full primary key - can't be refreshed " + "(and shouldn't be expired, either).", + state_str(state)) + return + + result = load_on_ident( + session.query(mapper), + identity_key, + refresh_state=state, + only_load_props=attribute_names) + + # if instance is pending, a refresh operation + # may not complete (even if PK attributes are assigned) + if has_key and result is None: + raise orm_exc.ObjectDeletedError(state) diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/mapper.py b/lib/python3.4/site-packages/sqlalchemy/orm/mapper.py new file mode 100644 index 0000000..97e4638 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/orm/mapper.py @@ -0,0 +1,2909 @@ +# orm/mapper.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Logic to map Python classes to and from selectables. + +Defines the :class:`~sqlalchemy.orm.mapper.Mapper` class, the central +configurational unit which associates a class with a database table. + +This is a semi-private module; the main configurational API of the ORM is +available in :class:`~sqlalchemy.orm.`. + +""" +from __future__ import absolute_import + +import types +import weakref +from itertools import chain +from collections import deque + +from .. import sql, util, log, exc as sa_exc, event, schema, inspection +from ..sql import expression, visitors, operators, util as sql_util +from . import instrumentation, attributes, exc as orm_exc, loading +from . import properties +from . import util as orm_util +from .interfaces import MapperProperty, InspectionAttr, _MappedAttribute + +from .base import _class_to_mapper, _state_mapper, class_mapper, \ + state_str, _INSTRUMENTOR +from .path_registry import PathRegistry + +import sys + + +_mapper_registry = weakref.WeakKeyDictionary() +_already_compiling = False + +_memoized_configured_property = util.group_expirable_memoized_property() + + +# a constant returned by _get_attr_by_column to indicate +# this mapper is not handling an attribute for a particular +# column +NO_ATTRIBUTE = util.symbol('NO_ATTRIBUTE') + +# lock used to synchronize the "mapper configure" step +_CONFIGURE_MUTEX = util.threading.RLock() + + +@inspection._self_inspects +@log.class_logger +class Mapper(InspectionAttr): + """Define the correlation of class attributes to database table + columns. + + The :class:`.Mapper` object is instantiated using the + :func:`~sqlalchemy.orm.mapper` function. For information + about instantiating new :class:`.Mapper` objects, see + that function's documentation. + + + When :func:`.mapper` is used + explicitly to link a user defined class with table + metadata, this is referred to as *classical mapping*. + Modern SQLAlchemy usage tends to favor the + :mod:`sqlalchemy.ext.declarative` extension for class + configuration, which + makes usage of :func:`.mapper` behind the scenes. + + Given a particular class known to be mapped by the ORM, + the :class:`.Mapper` which maintains it can be acquired + using the :func:`.inspect` function:: + + from sqlalchemy import inspect + + mapper = inspect(MyClass) + + A class which was mapped by the :mod:`sqlalchemy.ext.declarative` + extension will also have its mapper available via the ``__mapper__`` + attribute. + + + """ + + _new_mappers = False + + def __init__(self, + class_, + local_table=None, + properties=None, + primary_key=None, + non_primary=False, + inherits=None, + inherit_condition=None, + inherit_foreign_keys=None, + extension=None, + order_by=False, + always_refresh=False, + version_id_col=None, + version_id_generator=None, + polymorphic_on=None, + _polymorphic_map=None, + polymorphic_identity=None, + concrete=False, + with_polymorphic=None, + allow_partial_pks=True, + batch=True, + column_prefix=None, + include_properties=None, + exclude_properties=None, + passive_updates=True, + confirm_deleted_rows=True, + eager_defaults=False, + legacy_is_orphan=False, + _compiled_cache_size=100, + ): + """Return a new :class:`~.Mapper` object. + + This function is typically used behind the scenes + via the Declarative extension. When using Declarative, + many of the usual :func:`.mapper` arguments are handled + by the Declarative extension itself, including ``class_``, + ``local_table``, ``properties``, and ``inherits``. + Other options are passed to :func:`.mapper` using + the ``__mapper_args__`` class variable:: + + class MyClass(Base): + __tablename__ = 'my_table' + id = Column(Integer, primary_key=True) + type = Column(String(50)) + alt = Column("some_alt", Integer) + + __mapper_args__ = { + 'polymorphic_on' : type + } + + + Explicit use of :func:`.mapper` + is often referred to as *classical mapping*. The above + declarative example is equivalent in classical form to:: + + my_table = Table("my_table", metadata, + Column('id', Integer, primary_key=True), + Column('type', String(50)), + Column("some_alt", Integer) + ) + + class MyClass(object): + pass + + mapper(MyClass, my_table, + polymorphic_on=my_table.c.type, + properties={ + 'alt':my_table.c.some_alt + }) + + .. seealso:: + + :ref:`classical_mapping` - discussion of direct usage of + :func:`.mapper` + + :param class\_: The class to be mapped. When using Declarative, + this argument is automatically passed as the declared class + itself. + + :param local_table: The :class:`.Table` or other selectable + to which the class is mapped. May be ``None`` if + this mapper inherits from another mapper using single-table + inheritance. When using Declarative, this argument is + automatically passed by the extension, based on what + is configured via the ``__table__`` argument or via the + :class:`.Table` produced as a result of the ``__tablename__`` + and :class:`.Column` arguments present. + + :param always_refresh: If True, all query operations for this mapped + class will overwrite all data within object instances that already + exist within the session, erasing any in-memory changes with + whatever information was loaded from the database. Usage of this + flag is highly discouraged; as an alternative, see the method + :meth:`.Query.populate_existing`. + + :param allow_partial_pks: Defaults to True. Indicates that a + composite primary key with some NULL values should be considered as + possibly existing within the database. This affects whether a + mapper will assign an incoming row to an existing identity, as well + as if :meth:`.Session.merge` will check the database first for a + particular primary key value. A "partial primary key" can occur if + one has mapped to an OUTER JOIN, for example. + + :param batch: Defaults to ``True``, indicating that save operations + of multiple entities can be batched together for efficiency. + Setting to False indicates + that an instance will be fully saved before saving the next + instance. This is used in the extremely rare case that a + :class:`.MapperEvents` listener requires being called + in between individual row persistence operations. + + :param column_prefix: A string which will be prepended + to the mapped attribute name when :class:`.Column` + objects are automatically assigned as attributes to the + mapped class. Does not affect explicitly specified + column-based properties. + + See the section :ref:`column_prefix` for an example. + + :param concrete: If True, indicates this mapper should use concrete + table inheritance with its parent mapper. + + See the section :ref:`concrete_inheritance` for an example. + + :param confirm_deleted_rows: defaults to True; when a DELETE occurs + of one more rows based on specific primary keys, a warning is + emitted when the number of rows matched does not equal the number + of rows expected. This parameter may be set to False to handle the + case where database ON DELETE CASCADE rules may be deleting some of + those rows automatically. The warning may be changed to an + exception in a future release. + + .. versionadded:: 0.9.4 - added + :paramref:`.mapper.confirm_deleted_rows` as well as conditional + matched row checking on delete. + + :param eager_defaults: if True, the ORM will immediately fetch the + value of server-generated default values after an INSERT or UPDATE, + rather than leaving them as expired to be fetched on next access. + This can be used for event schemes where the server-generated values + are needed immediately before the flush completes. By default, + this scheme will emit an individual ``SELECT`` statement per row + inserted or updated, which note can add significant performance + overhead. However, if the + target database supports :term:`RETURNING`, the default values will + be returned inline with the INSERT or UPDATE statement, which can + greatly enhance performance for an application that needs frequent + access to just-generated server defaults. + + .. versionchanged:: 0.9.0 The ``eager_defaults`` option can now + make use of :term:`RETURNING` for backends which support it. + + :param exclude_properties: A list or set of string column names to + be excluded from mapping. + + See :ref:`include_exclude_cols` for an example. + + :param extension: A :class:`.MapperExtension` instance or + list of :class:`.MapperExtension` instances which will be applied + to all operations by this :class:`.Mapper`. **Deprecated.** + Please see :class:`.MapperEvents`. + + :param include_properties: An inclusive list or set of string column + names to map. + + See :ref:`include_exclude_cols` for an example. + + :param inherits: A mapped class or the corresponding :class:`.Mapper` + of one indicating a superclass to which this :class:`.Mapper` + should *inherit* from. The mapped class here must be a subclass + of the other mapper's class. When using Declarative, this argument + is passed automatically as a result of the natural class + hierarchy of the declared classes. + + .. seealso:: + + :ref:`inheritance_toplevel` + + :param inherit_condition: For joined table inheritance, a SQL + expression which will + define how the two tables are joined; defaults to a natural join + between the two tables. + + :param inherit_foreign_keys: When ``inherit_condition`` is used and + the columns present are missing a :class:`.ForeignKey` + configuration, this parameter can be used to specify which columns + are "foreign". In most cases can be left as ``None``. + + :param legacy_is_orphan: Boolean, defaults to ``False``. + When ``True``, specifies that "legacy" orphan consideration + is to be applied to objects mapped by this mapper, which means + that a pending (that is, not persistent) object is auto-expunged + from an owning :class:`.Session` only when it is de-associated + from *all* parents that specify a ``delete-orphan`` cascade towards + this mapper. The new default behavior is that the object is + auto-expunged when it is de-associated with *any* of its parents + that specify ``delete-orphan`` cascade. This behavior is more + consistent with that of a persistent object, and allows behavior to + be consistent in more scenarios independently of whether or not an + orphanable object has been flushed yet or not. + + See the change note and example at :ref:`legacy_is_orphan_addition` + for more detail on this change. + + .. versionadded:: 0.8 - the consideration of a pending object as + an "orphan" has been modified to more closely match the + behavior as that of persistent objects, which is that the object + is expunged from the :class:`.Session` as soon as it is + de-associated from any of its orphan-enabled parents. Previously, + the pending object would be expunged only if de-associated + from all of its orphan-enabled parents. The new flag + ``legacy_is_orphan`` is added to :func:`.orm.mapper` which + re-establishes the legacy behavior. + + :param non_primary: Specify that this :class:`.Mapper` is in addition + to the "primary" mapper, that is, the one used for persistence. + The :class:`.Mapper` created here may be used for ad-hoc + mapping of the class to an alternate selectable, for loading + only. + + :paramref:`.Mapper.non_primary` is not an often used option, but + is useful in some specific :func:`.relationship` cases. + + .. seealso:: + + :ref:`relationship_non_primary_mapper` + + :param order_by: A single :class:`.Column` or list of :class:`.Column` + objects for which selection operations should use as the default + ordering for entities. By default mappers have no pre-defined + ordering. + + :param passive_updates: Indicates UPDATE behavior of foreign key + columns when a primary key column changes on a joined-table + inheritance mapping. Defaults to ``True``. + + When True, it is assumed that ON UPDATE CASCADE is configured on + the foreign key in the database, and that the database will handle + propagation of an UPDATE from a source column to dependent columns + on joined-table rows. + + When False, it is assumed that the database does not enforce + referential integrity and will not be issuing its own CASCADE + operation for an update. The unit of work process will + emit an UPDATE statement for the dependent columns during a + primary key change. + + .. seealso:: + + :ref:`passive_updates` - description of a similar feature as + used with :func:`.relationship` + + :param polymorphic_on: Specifies the column, attribute, or + SQL expression used to determine the target class for an + incoming row, when inheriting classes are present. + + This value is commonly a :class:`.Column` object that's + present in the mapped :class:`.Table`:: + + class Employee(Base): + __tablename__ = 'employee' + + id = Column(Integer, primary_key=True) + discriminator = Column(String(50)) + + __mapper_args__ = { + "polymorphic_on":discriminator, + "polymorphic_identity":"employee" + } + + It may also be specified + as a SQL expression, as in this example where we + use the :func:`.case` construct to provide a conditional + approach:: + + class Employee(Base): + __tablename__ = 'employee' + + id = Column(Integer, primary_key=True) + discriminator = Column(String(50)) + + __mapper_args__ = { + "polymorphic_on":case([ + (discriminator == "EN", "engineer"), + (discriminator == "MA", "manager"), + ], else_="employee"), + "polymorphic_identity":"employee" + } + + It may also refer to any attribute + configured with :func:`.column_property`, or to the + string name of one:: + + class Employee(Base): + __tablename__ = 'employee' + + id = Column(Integer, primary_key=True) + discriminator = Column(String(50)) + employee_type = column_property( + case([ + (discriminator == "EN", "engineer"), + (discriminator == "MA", "manager"), + ], else_="employee") + ) + + __mapper_args__ = { + "polymorphic_on":employee_type, + "polymorphic_identity":"employee" + } + + .. versionchanged:: 0.7.4 + ``polymorphic_on`` may be specified as a SQL expression, + or refer to any attribute configured with + :func:`.column_property`, or to the string name of one. + + When setting ``polymorphic_on`` to reference an + attribute or expression that's not present in the + locally mapped :class:`.Table`, yet the value + of the discriminator should be persisted to the database, + the value of the + discriminator is not automatically set on new + instances; this must be handled by the user, + either through manual means or via event listeners. + A typical approach to establishing such a listener + looks like:: + + from sqlalchemy import event + from sqlalchemy.orm import object_mapper + + @event.listens_for(Employee, "init", propagate=True) + def set_identity(instance, *arg, **kw): + mapper = object_mapper(instance) + instance.discriminator = mapper.polymorphic_identity + + Where above, we assign the value of ``polymorphic_identity`` + for the mapped class to the ``discriminator`` attribute, + thus persisting the value to the ``discriminator`` column + in the database. + + .. warning:: + + Currently, **only one discriminator column may be set**, typically + on the base-most class in the hierarchy. "Cascading" polymorphic + columns are not yet supported. + + .. seealso:: + + :ref:`inheritance_toplevel` + + :param polymorphic_identity: Specifies the value which + identifies this particular class as returned by the + column expression referred to by the ``polymorphic_on`` + setting. As rows are received, the value corresponding + to the ``polymorphic_on`` column expression is compared + to this value, indicating which subclass should + be used for the newly reconstructed object. + + :param properties: A dictionary mapping the string names of object + attributes to :class:`.MapperProperty` instances, which define the + persistence behavior of that attribute. Note that :class:`.Column` + objects present in + the mapped :class:`.Table` are automatically placed into + ``ColumnProperty`` instances upon mapping, unless overridden. + When using Declarative, this argument is passed automatically, + based on all those :class:`.MapperProperty` instances declared + in the declared class body. + + :param primary_key: A list of :class:`.Column` objects which define + the primary key to be used against this mapper's selectable unit. + This is normally simply the primary key of the ``local_table``, but + can be overridden here. + + :param version_id_col: A :class:`.Column` + that will be used to keep a running version id of rows + in the table. This is used to detect concurrent updates or + the presence of stale data in a flush. The methodology is to + detect if an UPDATE statement does not match the last known + version id, a + :class:`~sqlalchemy.orm.exc.StaleDataError` exception is + thrown. + By default, the column must be of :class:`.Integer` type, + unless ``version_id_generator`` specifies an alternative version + generator. + + .. seealso:: + + :ref:`mapper_version_counter` - discussion of version counting + and rationale. + + :param version_id_generator: Define how new version ids should + be generated. Defaults to ``None``, which indicates that + a simple integer counting scheme be employed. To provide a custom + versioning scheme, provide a callable function of the form:: + + def generate_version(version): + return next_version + + Alternatively, server-side versioning functions such as triggers, + or programmatic versioning schemes outside of the version id + generator may be used, by specifying the value ``False``. + Please see :ref:`server_side_version_counter` for a discussion + of important points when using this option. + + .. versionadded:: 0.9.0 ``version_id_generator`` supports + server-side version number generation. + + .. seealso:: + + :ref:`custom_version_counter` + + :ref:`server_side_version_counter` + + + :param with_polymorphic: A tuple in the form ``(, + )`` indicating the default style of "polymorphic" + loading, that is, which tables are queried at once. is + any single or list of mappers and/or classes indicating the + inherited classes that should be loaded at once. The special value + ``'*'`` may be used to indicate all descending classes should be + loaded immediately. The second tuple argument + indicates a selectable that will be used to query for multiple + classes. + + .. seealso:: + + :ref:`with_polymorphic` - discussion of polymorphic querying + techniques. + + """ + + self.class_ = util.assert_arg_type(class_, type, 'class_') + + self.class_manager = None + + self._primary_key_argument = util.to_list(primary_key) + self.non_primary = non_primary + + if order_by is not False: + self.order_by = util.to_list(order_by) + else: + self.order_by = order_by + + self.always_refresh = always_refresh + + if isinstance(version_id_col, MapperProperty): + self.version_id_prop = version_id_col + self.version_id_col = None + else: + self.version_id_col = version_id_col + if version_id_generator is False: + self.version_id_generator = False + elif version_id_generator is None: + self.version_id_generator = lambda x: (x or 0) + 1 + else: + self.version_id_generator = version_id_generator + + self.concrete = concrete + self.single = False + self.inherits = inherits + self.local_table = local_table + self.inherit_condition = inherit_condition + self.inherit_foreign_keys = inherit_foreign_keys + self._init_properties = properties or {} + self._delete_orphans = [] + self.batch = batch + self.eager_defaults = eager_defaults + self.column_prefix = column_prefix + self.polymorphic_on = expression._clause_element_as_expr( + polymorphic_on) + self._dependency_processors = [] + self.validators = util.immutabledict() + self.passive_updates = passive_updates + self.legacy_is_orphan = legacy_is_orphan + self._clause_adapter = None + self._requires_row_aliasing = False + self._inherits_equated_pairs = None + self._memoized_values = {} + self._compiled_cache_size = _compiled_cache_size + self._reconstructor = None + self._deprecated_extensions = util.to_list(extension or []) + self.allow_partial_pks = allow_partial_pks + + if self.inherits and not self.concrete: + self.confirm_deleted_rows = False + else: + self.confirm_deleted_rows = confirm_deleted_rows + + self._set_with_polymorphic(with_polymorphic) + + if isinstance(self.local_table, expression.SelectBase): + raise sa_exc.InvalidRequestError( + "When mapping against a select() construct, map against " + "an alias() of the construct instead." + "This because several databases don't allow a " + "SELECT from a subquery that does not have an alias." + ) + + if self.with_polymorphic and \ + isinstance(self.with_polymorphic[1], + expression.SelectBase): + self.with_polymorphic = (self.with_polymorphic[0], + self.with_polymorphic[1].alias()) + + # our 'polymorphic identity', a string name that when located in a + # result set row indicates this Mapper should be used to construct + # the object instance for that row. + self.polymorphic_identity = polymorphic_identity + + # a dictionary of 'polymorphic identity' names, associating those + # names with Mappers that will be used to construct object instances + # upon a select operation. + if _polymorphic_map is None: + self.polymorphic_map = {} + else: + self.polymorphic_map = _polymorphic_map + + if include_properties is not None: + self.include_properties = util.to_set(include_properties) + else: + self.include_properties = None + if exclude_properties: + self.exclude_properties = util.to_set(exclude_properties) + else: + self.exclude_properties = None + + self.configured = False + + # prevent this mapper from being constructed + # while a configure_mappers() is occurring (and defer a + # configure_mappers() until construction succeeds) + _CONFIGURE_MUTEX.acquire() + try: + self.dispatch._events._new_mapper_instance(class_, self) + self._configure_inheritance() + self._configure_legacy_instrument_class() + self._configure_class_instrumentation() + self._configure_listeners() + self._configure_properties() + self._configure_polymorphic_setter() + self._configure_pks() + Mapper._new_mappers = True + self._log("constructed") + self._expire_memoizations() + finally: + _CONFIGURE_MUTEX.release() + + # major attributes initialized at the classlevel so that + # they can be Sphinx-documented. + + is_mapper = True + """Part of the inspection API.""" + + @property + def mapper(self): + """Part of the inspection API. + + Returns self. + + """ + return self + + @property + def entity(self): + """Part of the inspection API. + + Returns self.class\_. + + """ + return self.class_ + + local_table = None + """The :class:`.Selectable` which this :class:`.Mapper` manages. + + Typically is an instance of :class:`.Table` or :class:`.Alias`. + May also be ``None``. + + The "local" table is the + selectable that the :class:`.Mapper` is directly responsible for + managing from an attribute access and flush perspective. For + non-inheriting mappers, the local table is the same as the + "mapped" table. For joined-table inheritance mappers, local_table + will be the particular sub-table of the overall "join" which + this :class:`.Mapper` represents. If this mapper is a + single-table inheriting mapper, local_table will be ``None``. + + .. seealso:: + + :attr:`~.Mapper.mapped_table`. + + """ + + mapped_table = None + """The :class:`.Selectable` to which this :class:`.Mapper` is mapped. + + Typically an instance of :class:`.Table`, :class:`.Join`, or + :class:`.Alias`. + + The "mapped" table is the selectable that + the mapper selects from during queries. For non-inheriting + mappers, the mapped table is the same as the "local" table. + For joined-table inheritance mappers, mapped_table references the + full :class:`.Join` representing full rows for this particular + subclass. For single-table inheritance mappers, mapped_table + references the base table. + + .. seealso:: + + :attr:`~.Mapper.local_table`. + + """ + + inherits = None + """References the :class:`.Mapper` which this :class:`.Mapper` + inherits from, if any. + + This is a *read only* attribute determined during mapper construction. + Behavior is undefined if directly modified. + + """ + + configured = None + """Represent ``True`` if this :class:`.Mapper` has been configured. + + This is a *read only* attribute determined during mapper construction. + Behavior is undefined if directly modified. + + .. seealso:: + + :func:`.configure_mappers`. + + """ + + concrete = None + """Represent ``True`` if this :class:`.Mapper` is a concrete + inheritance mapper. + + This is a *read only* attribute determined during mapper construction. + Behavior is undefined if directly modified. + + """ + + tables = None + """An iterable containing the collection of :class:`.Table` objects + which this :class:`.Mapper` is aware of. + + If the mapper is mapped to a :class:`.Join`, or an :class:`.Alias` + representing a :class:`.Select`, the individual :class:`.Table` + objects that comprise the full construct will be represented here. + + This is a *read only* attribute determined during mapper construction. + Behavior is undefined if directly modified. + + """ + + primary_key = None + """An iterable containing the collection of :class:`.Column` objects + which comprise the 'primary key' of the mapped table, from the + perspective of this :class:`.Mapper`. + + This list is against the selectable in :attr:`~.Mapper.mapped_table`. In + the case of inheriting mappers, some columns may be managed by a + superclass mapper. For example, in the case of a :class:`.Join`, the + primary key is determined by all of the primary key columns across all + tables referenced by the :class:`.Join`. + + The list is also not necessarily the same as the primary key column + collection associated with the underlying tables; the :class:`.Mapper` + features a ``primary_key`` argument that can override what the + :class:`.Mapper` considers as primary key columns. + + This is a *read only* attribute determined during mapper construction. + Behavior is undefined if directly modified. + + """ + + class_ = None + """The Python class which this :class:`.Mapper` maps. + + This is a *read only* attribute determined during mapper construction. + Behavior is undefined if directly modified. + + """ + + class_manager = None + """The :class:`.ClassManager` which maintains event listeners + and class-bound descriptors for this :class:`.Mapper`. + + This is a *read only* attribute determined during mapper construction. + Behavior is undefined if directly modified. + + """ + + single = None + """Represent ``True`` if this :class:`.Mapper` is a single table + inheritance mapper. + + :attr:`~.Mapper.local_table` will be ``None`` if this flag is set. + + This is a *read only* attribute determined during mapper construction. + Behavior is undefined if directly modified. + + """ + + non_primary = None + """Represent ``True`` if this :class:`.Mapper` is a "non-primary" + mapper, e.g. a mapper that is used only to selet rows but not for + persistence management. + + This is a *read only* attribute determined during mapper construction. + Behavior is undefined if directly modified. + + """ + + polymorphic_on = None + """The :class:`.Column` or SQL expression specified as the + ``polymorphic_on`` argument + for this :class:`.Mapper`, within an inheritance scenario. + + This attribute is normally a :class:`.Column` instance but + may also be an expression, such as one derived from + :func:`.cast`. + + This is a *read only* attribute determined during mapper construction. + Behavior is undefined if directly modified. + + """ + + polymorphic_map = None + """A mapping of "polymorphic identity" identifiers mapped to + :class:`.Mapper` instances, within an inheritance scenario. + + The identifiers can be of any type which is comparable to the + type of column represented by :attr:`~.Mapper.polymorphic_on`. + + An inheritance chain of mappers will all reference the same + polymorphic map object. The object is used to correlate incoming + result rows to target mappers. + + This is a *read only* attribute determined during mapper construction. + Behavior is undefined if directly modified. + + """ + + polymorphic_identity = None + """Represent an identifier which is matched against the + :attr:`~.Mapper.polymorphic_on` column during result row loading. + + Used only with inheritance, this object can be of any type which is + comparable to the type of column represented by + :attr:`~.Mapper.polymorphic_on`. + + This is a *read only* attribute determined during mapper construction. + Behavior is undefined if directly modified. + + """ + + base_mapper = None + """The base-most :class:`.Mapper` in an inheritance chain. + + In a non-inheriting scenario, this attribute will always be this + :class:`.Mapper`. In an inheritance scenario, it references + the :class:`.Mapper` which is parent to all other :class:`.Mapper` + objects in the inheritance chain. + + This is a *read only* attribute determined during mapper construction. + Behavior is undefined if directly modified. + + """ + + columns = None + """A collection of :class:`.Column` or other scalar expression + objects maintained by this :class:`.Mapper`. + + The collection behaves the same as that of the ``c`` attribute on + any :class:`.Table` object, except that only those columns included in + this mapping are present, and are keyed based on the attribute name + defined in the mapping, not necessarily the ``key`` attribute of the + :class:`.Column` itself. Additionally, scalar expressions mapped + by :func:`.column_property` are also present here. + + This is a *read only* attribute determined during mapper construction. + Behavior is undefined if directly modified. + + """ + + validators = None + """An immutable dictionary of attributes which have been decorated + using the :func:`~.orm.validates` decorator. + + The dictionary contains string attribute names as keys + mapped to the actual validation method. + + """ + + c = None + """A synonym for :attr:`~.Mapper.columns`.""" + + @util.memoized_property + def _path_registry(self): + return PathRegistry.per_mapper(self) + + def _configure_inheritance(self): + """Configure settings related to inherting and/or inherited mappers + being present.""" + + # a set of all mappers which inherit from this one. + self._inheriting_mappers = util.WeakSequence() + + if self.inherits: + if isinstance(self.inherits, type): + self.inherits = class_mapper(self.inherits, configure=False) + if not issubclass(self.class_, self.inherits.class_): + raise sa_exc.ArgumentError( + "Class '%s' does not inherit from '%s'" % + (self.class_.__name__, self.inherits.class_.__name__)) + if self.non_primary != self.inherits.non_primary: + np = not self.non_primary and "primary" or "non-primary" + raise sa_exc.ArgumentError( + "Inheritance of %s mapper for class '%s' is " + "only allowed from a %s mapper" % + (np, self.class_.__name__, np)) + # inherit_condition is optional. + if self.local_table is None: + self.local_table = self.inherits.local_table + self.mapped_table = self.inherits.mapped_table + self.single = True + elif self.local_table is not self.inherits.local_table: + if self.concrete: + self.mapped_table = self.local_table + for mapper in self.iterate_to_root(): + if mapper.polymorphic_on is not None: + mapper._requires_row_aliasing = True + else: + if self.inherit_condition is None: + # figure out inherit condition from our table to the + # immediate table of the inherited mapper, not its + # full table which could pull in other stuff we don't + # want (allows test/inheritance.InheritTest4 to pass) + self.inherit_condition = sql_util.join_condition( + self.inherits.local_table, + self.local_table) + self.mapped_table = sql.join( + self.inherits.mapped_table, + self.local_table, + self.inherit_condition) + + fks = util.to_set(self.inherit_foreign_keys) + self._inherits_equated_pairs = \ + sql_util.criterion_as_pairs( + self.mapped_table.onclause, + consider_as_foreign_keys=fks) + else: + self.mapped_table = self.local_table + + if self.polymorphic_identity is not None and not self.concrete: + self._identity_class = self.inherits._identity_class + else: + self._identity_class = self.class_ + + if self.version_id_col is None: + self.version_id_col = self.inherits.version_id_col + self.version_id_generator = self.inherits.version_id_generator + elif self.inherits.version_id_col is not None and \ + self.version_id_col is not self.inherits.version_id_col: + util.warn( + "Inheriting version_id_col '%s' does not match inherited " + "version_id_col '%s' and will not automatically populate " + "the inherited versioning column. " + "version_id_col should only be specified on " + "the base-most mapper that includes versioning." % + (self.version_id_col.description, + self.inherits.version_id_col.description) + ) + + if self.order_by is False and \ + not self.concrete and \ + self.inherits.order_by is not False: + self.order_by = self.inherits.order_by + + self.polymorphic_map = self.inherits.polymorphic_map + self.batch = self.inherits.batch + self.inherits._inheriting_mappers.append(self) + self.base_mapper = self.inherits.base_mapper + self.passive_updates = self.inherits.passive_updates + self._all_tables = self.inherits._all_tables + + if self.polymorphic_identity is not None: + if self.polymorphic_identity in self.polymorphic_map: + util.warn( + "Reassigning polymorphic association for identity %r " + "from %r to %r: Check for duplicate use of %r as " + "value for polymorphic_identity." % + (self.polymorphic_identity, + self.polymorphic_map[self.polymorphic_identity], + self, self.polymorphic_identity) + ) + self.polymorphic_map[self.polymorphic_identity] = self + + else: + self._all_tables = set() + self.base_mapper = self + self.mapped_table = self.local_table + if self.polymorphic_identity is not None: + self.polymorphic_map[self.polymorphic_identity] = self + self._identity_class = self.class_ + + if self.mapped_table is None: + raise sa_exc.ArgumentError( + "Mapper '%s' does not have a mapped_table specified." + % self) + + def _set_with_polymorphic(self, with_polymorphic): + if with_polymorphic == '*': + self.with_polymorphic = ('*', None) + elif isinstance(with_polymorphic, (tuple, list)): + if isinstance( + with_polymorphic[0], util.string_types + (tuple, list)): + self.with_polymorphic = with_polymorphic + else: + self.with_polymorphic = (with_polymorphic, None) + elif with_polymorphic is not None: + raise sa_exc.ArgumentError("Invalid setting for with_polymorphic") + else: + self.with_polymorphic = None + + if isinstance(self.local_table, expression.SelectBase): + raise sa_exc.InvalidRequestError( + "When mapping against a select() construct, map against " + "an alias() of the construct instead." + "This because several databases don't allow a " + "SELECT from a subquery that does not have an alias." + ) + + if self.with_polymorphic and \ + isinstance(self.with_polymorphic[1], + expression.SelectBase): + self.with_polymorphic = (self.with_polymorphic[0], + self.with_polymorphic[1].alias()) + if self.configured: + self._expire_memoizations() + + def _set_concrete_base(self, mapper): + """Set the given :class:`.Mapper` as the 'inherits' for this + :class:`.Mapper`, assuming this :class:`.Mapper` is concrete + and does not already have an inherits.""" + + assert self.concrete + assert not self.inherits + assert isinstance(mapper, Mapper) + self.inherits = mapper + self.inherits.polymorphic_map.update(self.polymorphic_map) + self.polymorphic_map = self.inherits.polymorphic_map + for mapper in self.iterate_to_root(): + if mapper.polymorphic_on is not None: + mapper._requires_row_aliasing = True + self.batch = self.inherits.batch + for mp in self.self_and_descendants: + mp.base_mapper = self.inherits.base_mapper + self.inherits._inheriting_mappers.append(self) + self.passive_updates = self.inherits.passive_updates + self._all_tables = self.inherits._all_tables + for key, prop in mapper._props.items(): + if key not in self._props and \ + not self._should_exclude(key, key, local=False, + column=None): + self._adapt_inherited_property(key, prop, False) + + def _set_polymorphic_on(self, polymorphic_on): + self.polymorphic_on = polymorphic_on + self._configure_polymorphic_setter(True) + + def _configure_legacy_instrument_class(self): + + if self.inherits: + self.dispatch._update(self.inherits.dispatch) + super_extensions = set( + chain(*[m._deprecated_extensions + for m in self.inherits.iterate_to_root()])) + else: + super_extensions = set() + + for ext in self._deprecated_extensions: + if ext not in super_extensions: + ext._adapt_instrument_class(self, ext) + + def _configure_listeners(self): + if self.inherits: + super_extensions = set( + chain(*[m._deprecated_extensions + for m in self.inherits.iterate_to_root()])) + else: + super_extensions = set() + + for ext in self._deprecated_extensions: + if ext not in super_extensions: + ext._adapt_listener(self, ext) + + def _configure_class_instrumentation(self): + """If this mapper is to be a primary mapper (i.e. the + non_primary flag is not set), associate this Mapper with the + given class_ and entity name. + + Subsequent calls to ``class_mapper()`` for the class_/entity + name combination will return this mapper. Also decorate the + `__init__` method on the mapped class to include optional + auto-session attachment logic. + + """ + + manager = attributes.manager_of_class(self.class_) + + if self.non_primary: + if not manager or not manager.is_mapped: + raise sa_exc.InvalidRequestError( + "Class %s has no primary mapper configured. Configure " + "a primary mapper first before setting up a non primary " + "Mapper." % self.class_) + self.class_manager = manager + self._identity_class = manager.mapper._identity_class + _mapper_registry[self] = True + return + + if manager is not None: + assert manager.class_ is self.class_ + if manager.is_mapped: + raise sa_exc.ArgumentError( + "Class '%s' already has a primary mapper defined. " + "Use non_primary=True to " + "create a non primary Mapper. clear_mappers() will " + "remove *all* current mappers from all classes." % + self.class_) + # else: + # a ClassManager may already exist as + # ClassManager.instrument_attribute() creates + # new managers for each subclass if they don't yet exist. + + _mapper_registry[self] = True + + # note: this *must be called before instrumentation.register_class* + # to maintain the documented behavior of instrument_class + self.dispatch.instrument_class(self, self.class_) + + if manager is None: + manager = instrumentation.register_class(self.class_) + + self.class_manager = manager + + manager.mapper = self + manager.deferred_scalar_loader = util.partial( + loading.load_scalar_attributes, self) + + # The remaining members can be added by any mapper, + # e_name None or not. + if manager.info.get(_INSTRUMENTOR, False): + return + + event.listen(manager, 'first_init', _event_on_first_init, raw=True) + event.listen(manager, 'init', _event_on_init, raw=True) + + for key, method in util.iterate_attributes(self.class_): + if isinstance(method, types.FunctionType): + if hasattr(method, '__sa_reconstructor__'): + self._reconstructor = method + event.listen(manager, 'load', _event_on_load, raw=True) + elif hasattr(method, '__sa_validators__'): + validation_opts = method.__sa_validation_opts__ + for name in method.__sa_validators__: + self.validators = self.validators.union( + {name: (method, validation_opts)} + ) + + manager.info[_INSTRUMENTOR] = self + + @classmethod + def _configure_all(cls): + """Class-level path to the :func:`.configure_mappers` call. + """ + configure_mappers() + + def dispose(self): + # Disable any attribute-based compilation. + self.configured = True + + if hasattr(self, '_configure_failed'): + del self._configure_failed + + if not self.non_primary and \ + self.class_manager is not None and \ + self.class_manager.is_mapped and \ + self.class_manager.mapper is self: + instrumentation.unregister_class(self.class_) + + def _configure_pks(self): + + self.tables = sql_util.find_tables(self.mapped_table) + + self._pks_by_table = {} + self._cols_by_table = {} + + all_cols = util.column_set(chain(*[ + col.proxy_set for col in + self._columntoproperty])) + + pk_cols = util.column_set(c for c in all_cols if c.primary_key) + + # identify primary key columns which are also mapped by this mapper. + tables = set(self.tables + [self.mapped_table]) + self._all_tables.update(tables) + for t in tables: + if t.primary_key and pk_cols.issuperset(t.primary_key): + # ordering is important since it determines the ordering of + # mapper.primary_key (and therefore query.get()) + self._pks_by_table[t] = \ + util.ordered_column_set(t.primary_key).\ + intersection(pk_cols) + self._cols_by_table[t] = \ + util.ordered_column_set(t.c).\ + intersection(all_cols) + + # if explicit PK argument sent, add those columns to the + # primary key mappings + if self._primary_key_argument: + for k in self._primary_key_argument: + if k.table not in self._pks_by_table: + self._pks_by_table[k.table] = util.OrderedSet() + self._pks_by_table[k.table].add(k) + + # otherwise, see that we got a full PK for the mapped table + elif self.mapped_table not in self._pks_by_table or \ + len(self._pks_by_table[self.mapped_table]) == 0: + raise sa_exc.ArgumentError( + "Mapper %s could not assemble any primary " + "key columns for mapped table '%s'" % + (self, self.mapped_table.description)) + elif self.local_table not in self._pks_by_table and \ + isinstance(self.local_table, schema.Table): + util.warn("Could not assemble any primary " + "keys for locally mapped table '%s' - " + "no rows will be persisted in this Table." + % self.local_table.description) + + if self.inherits and \ + not self.concrete and \ + not self._primary_key_argument: + # if inheriting, the "primary key" for this mapper is + # that of the inheriting (unless concrete or explicit) + self.primary_key = self.inherits.primary_key + else: + # determine primary key from argument or mapped_table pks - + # reduce to the minimal set of columns + if self._primary_key_argument: + primary_key = sql_util.reduce_columns( + [self.mapped_table.corresponding_column(c) for c in + self._primary_key_argument], + ignore_nonexistent_tables=True) + else: + primary_key = sql_util.reduce_columns( + self._pks_by_table[self.mapped_table], + ignore_nonexistent_tables=True) + + if len(primary_key) == 0: + raise sa_exc.ArgumentError( + "Mapper %s could not assemble any primary " + "key columns for mapped table '%s'" % + (self, self.mapped_table.description)) + + self.primary_key = tuple(primary_key) + self._log("Identified primary key columns: %s", primary_key) + + # determine cols that aren't expressed within our tables; mark these + # as "read only" properties which are refreshed upon INSERT/UPDATE + self._readonly_props = set( + self._columntoproperty[col] + for col in self._columntoproperty + if self._columntoproperty[col] not in self._identity_key_props and + (not hasattr(col, 'table') or + col.table not in self._cols_by_table)) + + def _configure_properties(self): + + # Column and other ClauseElement objects which are mapped + self.columns = self.c = util.OrderedProperties() + + # object attribute names mapped to MapperProperty objects + self._props = util.OrderedDict() + + # table columns mapped to lists of MapperProperty objects + # using a list allows a single column to be defined as + # populating multiple object attributes + self._columntoproperty = _ColumnMapping(self) + + # load custom properties + if self._init_properties: + for key, prop in self._init_properties.items(): + self._configure_property(key, prop, False) + + # pull properties from the inherited mapper if any. + if self.inherits: + for key, prop in self.inherits._props.items(): + if key not in self._props and \ + not self._should_exclude(key, key, local=False, + column=None): + self._adapt_inherited_property(key, prop, False) + + # create properties for each column in the mapped table, + # for those columns which don't already map to a property + for column in self.mapped_table.columns: + if column in self._columntoproperty: + continue + + column_key = (self.column_prefix or '') + column.key + + if self._should_exclude( + column.key, column_key, + local=self.local_table.c.contains_column(column), + column=column + ): + continue + + # adjust the "key" used for this column to that + # of the inheriting mapper + for mapper in self.iterate_to_root(): + if column in mapper._columntoproperty: + column_key = mapper._columntoproperty[column].key + + self._configure_property(column_key, + column, + init=False, + setparent=True) + + def _configure_polymorphic_setter(self, init=False): + """Configure an attribute on the mapper representing the + 'polymorphic_on' column, if applicable, and not + already generated by _configure_properties (which is typical). + + Also create a setter function which will assign this + attribute to the value of the 'polymorphic_identity' + upon instance construction, also if applicable. This + routine will run when an instance is created. + + """ + setter = False + + if self.polymorphic_on is not None: + setter = True + + if isinstance(self.polymorphic_on, util.string_types): + # polymorphic_on specified as a string - link + # it to mapped ColumnProperty + try: + self.polymorphic_on = self._props[self.polymorphic_on] + except KeyError: + raise sa_exc.ArgumentError( + "Can't determine polymorphic_on " + "value '%s' - no attribute is " + "mapped to this name." % self.polymorphic_on) + + if self.polymorphic_on in self._columntoproperty: + # polymorphic_on is a column that is already mapped + # to a ColumnProperty + prop = self._columntoproperty[self.polymorphic_on] + polymorphic_key = prop.key + self.polymorphic_on = prop.columns[0] + polymorphic_key = prop.key + elif isinstance(self.polymorphic_on, MapperProperty): + # polymorphic_on is directly a MapperProperty, + # ensure it's a ColumnProperty + if not isinstance(self.polymorphic_on, + properties.ColumnProperty): + raise sa_exc.ArgumentError( + "Only direct column-mapped " + "property or SQL expression " + "can be passed for polymorphic_on") + prop = self.polymorphic_on + self.polymorphic_on = prop.columns[0] + polymorphic_key = prop.key + elif not expression._is_column(self.polymorphic_on): + # polymorphic_on is not a Column and not a ColumnProperty; + # not supported right now. + raise sa_exc.ArgumentError( + "Only direct column-mapped " + "property or SQL expression " + "can be passed for polymorphic_on" + ) + else: + # polymorphic_on is a Column or SQL expression and + # doesn't appear to be mapped. this means it can be 1. + # only present in the with_polymorphic selectable or + # 2. a totally standalone SQL expression which we'd + # hope is compatible with this mapper's mapped_table + col = self.mapped_table.corresponding_column( + self.polymorphic_on) + if col is None: + # polymorphic_on doesn't derive from any + # column/expression isn't present in the mapped + # table. we will make a "hidden" ColumnProperty + # for it. Just check that if it's directly a + # schema.Column and we have with_polymorphic, it's + # likely a user error if the schema.Column isn't + # represented somehow in either mapped_table or + # with_polymorphic. Otherwise as of 0.7.4 we + # just go with it and assume the user wants it + # that way (i.e. a CASE statement) + setter = False + instrument = False + col = self.polymorphic_on + if isinstance(col, schema.Column) and ( + self.with_polymorphic is None or + self.with_polymorphic[1]. + corresponding_column(col) is None): + raise sa_exc.InvalidRequestError( + "Could not map polymorphic_on column " + "'%s' to the mapped table - polymorphic " + "loads will not function properly" + % col.description) + else: + # column/expression that polymorphic_on derives from + # is present in our mapped table + # and is probably mapped, but polymorphic_on itself + # is not. This happens when + # the polymorphic_on is only directly present in the + # with_polymorphic selectable, as when use + # polymorphic_union. + # we'll make a separate ColumnProperty for it. + instrument = True + key = getattr(col, 'key', None) + if key: + if self._should_exclude(col.key, col.key, False, col): + raise sa_exc.InvalidRequestError( + "Cannot exclude or override the " + "discriminator column %r" % + col.key) + else: + self.polymorphic_on = col = \ + col.label("_sa_polymorphic_on") + key = col.key + + self._configure_property( + key, + properties.ColumnProperty(col, + _instrument=instrument), + init=init, setparent=True) + polymorphic_key = key + else: + # no polymorphic_on was set. + # check inheriting mappers for one. + for mapper in self.iterate_to_root(): + # determine if polymorphic_on of the parent + # should be propagated here. If the col + # is present in our mapped table, or if our mapped + # table is the same as the parent (i.e. single table + # inheritance), we can use it + if mapper.polymorphic_on is not None: + if self.mapped_table is mapper.mapped_table: + self.polymorphic_on = mapper.polymorphic_on + else: + self.polymorphic_on = \ + self.mapped_table.corresponding_column( + mapper.polymorphic_on) + # we can use the parent mapper's _set_polymorphic_identity + # directly; it ensures the polymorphic_identity of the + # instance's mapper is used so is portable to subclasses. + if self.polymorphic_on is not None: + self._set_polymorphic_identity = \ + mapper._set_polymorphic_identity + self._validate_polymorphic_identity = \ + mapper._validate_polymorphic_identity + else: + self._set_polymorphic_identity = None + return + + if setter: + def _set_polymorphic_identity(state): + dict_ = state.dict + state.get_impl(polymorphic_key).set( + state, dict_, + state.manager.mapper.polymorphic_identity, + None) + + def _validate_polymorphic_identity(mapper, state, dict_): + if polymorphic_key in dict_ and \ + dict_[polymorphic_key] not in \ + mapper._acceptable_polymorphic_identities: + util.warn_limited( + "Flushing object %s with " + "incompatible polymorphic identity %r; the " + "object may not refresh and/or load correctly", + (state_str(state), dict_[polymorphic_key]) + ) + + self._set_polymorphic_identity = _set_polymorphic_identity + self._validate_polymorphic_identity = \ + _validate_polymorphic_identity + else: + self._set_polymorphic_identity = None + + _validate_polymorphic_identity = None + + @_memoized_configured_property + def _version_id_prop(self): + if self.version_id_col is not None: + return self._columntoproperty[self.version_id_col] + else: + return None + + @_memoized_configured_property + def _acceptable_polymorphic_identities(self): + identities = set() + + stack = deque([self]) + while stack: + item = stack.popleft() + if item.mapped_table is self.mapped_table: + identities.add(item.polymorphic_identity) + stack.extend(item._inheriting_mappers) + + return identities + + @_memoized_configured_property + def _prop_set(self): + return frozenset(self._props.values()) + + def _adapt_inherited_property(self, key, prop, init): + if not self.concrete: + self._configure_property(key, prop, init=False, setparent=False) + elif key not in self._props: + self._configure_property( + key, + properties.ConcreteInheritedProperty(), + init=init, setparent=True) + + def _configure_property(self, key, prop, init=True, setparent=True): + self._log("_configure_property(%s, %s)", key, prop.__class__.__name__) + + if not isinstance(prop, MapperProperty): + prop = self._property_from_column(key, prop) + + if isinstance(prop, properties.ColumnProperty): + col = self.mapped_table.corresponding_column(prop.columns[0]) + + # if the column is not present in the mapped table, + # test if a column has been added after the fact to the + # parent table (or their parent, etc.) [ticket:1570] + if col is None and self.inherits: + path = [self] + for m in self.inherits.iterate_to_root(): + col = m.local_table.corresponding_column(prop.columns[0]) + if col is not None: + for m2 in path: + m2.mapped_table._reset_exported() + col = self.mapped_table.corresponding_column( + prop.columns[0]) + break + path.append(m) + + # subquery expression, column not present in the mapped + # selectable. + if col is None: + col = prop.columns[0] + + # column is coming in after _readonly_props was + # initialized; check for 'readonly' + if hasattr(self, '_readonly_props') and \ + (not hasattr(col, 'table') or + col.table not in self._cols_by_table): + self._readonly_props.add(prop) + + else: + # if column is coming in after _cols_by_table was + # initialized, ensure the col is in the right set + if hasattr(self, '_cols_by_table') and \ + col.table in self._cols_by_table and \ + col not in self._cols_by_table[col.table]: + self._cols_by_table[col.table].add(col) + + # if this properties.ColumnProperty represents the "polymorphic + # discriminator" column, mark it. We'll need this when rendering + # columns in SELECT statements. + if not hasattr(prop, '_is_polymorphic_discriminator'): + prop._is_polymorphic_discriminator = \ + (col is self.polymorphic_on or + prop.columns[0] is self.polymorphic_on) + + self.columns[key] = col + for col in prop.columns + prop._orig_columns: + for col in col.proxy_set: + self._columntoproperty[col] = prop + + prop.key = key + + if setparent: + prop.set_parent(self, init) + + if key in self._props and \ + getattr(self._props[key], '_mapped_by_synonym', False): + syn = self._props[key]._mapped_by_synonym + raise sa_exc.ArgumentError( + "Can't call map_column=True for synonym %r=%r, " + "a ColumnProperty already exists keyed to the name " + "%r for column %r" % (syn, key, key, syn) + ) + + if key in self._props and \ + not isinstance(prop, properties.ColumnProperty) and \ + not isinstance(self._props[key], properties.ColumnProperty): + util.warn("Property %s on %s being replaced with new " + "property %s; the old property will be discarded" % ( + self._props[key], + self, + prop, + )) + oldprop = self._props[key] + self._path_registry.pop(oldprop, None) + + self._props[key] = prop + + if not self.non_primary: + prop.instrument_class(self) + + for mapper in self._inheriting_mappers: + mapper._adapt_inherited_property(key, prop, init) + + if init: + prop.init() + prop.post_instrument_class(self) + + if self.configured: + self._expire_memoizations() + + def _property_from_column(self, key, prop): + """generate/update a :class:`.ColumnProprerty` given a + :class:`.Column` object. """ + + # we were passed a Column or a list of Columns; + # generate a properties.ColumnProperty + columns = util.to_list(prop) + column = columns[0] + if not expression._is_column(column): + raise sa_exc.ArgumentError( + "%s=%r is not an instance of MapperProperty or Column" + % (key, prop)) + + prop = self._props.get(key, None) + + if isinstance(prop, properties.ColumnProperty): + if ( + not self._inherits_equated_pairs or + (prop.columns[0], column) not in self._inherits_equated_pairs + ) and \ + not prop.columns[0].shares_lineage(column) and \ + prop.columns[0] is not self.version_id_col and \ + column is not self.version_id_col: + warn_only = prop.parent is not self + msg = ("Implicitly combining column %s with column " + "%s under attribute '%s'. Please configure one " + "or more attributes for these same-named columns " + "explicitly." % (prop.columns[-1], column, key)) + if warn_only: + util.warn(msg) + else: + raise sa_exc.InvalidRequestError(msg) + + # existing properties.ColumnProperty from an inheriting + # mapper. make a copy and append our column to it + prop = prop.copy() + prop.columns.insert(0, column) + self._log("inserting column to existing list " + "in properties.ColumnProperty %s" % (key)) + return prop + elif prop is None or isinstance(prop, + properties.ConcreteInheritedProperty): + mapped_column = [] + for c in columns: + mc = self.mapped_table.corresponding_column(c) + if mc is None: + mc = self.local_table.corresponding_column(c) + if mc is not None: + # if the column is in the local table but not the + # mapped table, this corresponds to adding a + # column after the fact to the local table. + # [ticket:1523] + self.mapped_table._reset_exported() + mc = self.mapped_table.corresponding_column(c) + if mc is None: + raise sa_exc.ArgumentError( + "When configuring property '%s' on %s, " + "column '%s' is not represented in the mapper's " + "table. Use the `column_property()` function to " + "force this column to be mapped as a read-only " + "attribute." % (key, self, c)) + mapped_column.append(mc) + return properties.ColumnProperty(*mapped_column) + else: + raise sa_exc.ArgumentError( + "WARNING: when configuring property '%s' on %s, " + "column '%s' conflicts with property '%r'. " + "To resolve this, map the column to the class under a " + "different name in the 'properties' dictionary. Or, " + "to remove all awareness of the column entirely " + "(including its availability as a foreign key), " + "use the 'include_properties' or 'exclude_properties' " + "mapper arguments to control specifically which table " + "columns get mapped." % + (key, self, column.key, prop)) + + def _post_configure_properties(self): + """Call the ``init()`` method on all ``MapperProperties`` + attached to this mapper. + + This is a deferred configuration step which is intended + to execute once all mappers have been constructed. + + """ + + self._log("_post_configure_properties() started") + l = [(key, prop) for key, prop in self._props.items()] + for key, prop in l: + self._log("initialize prop %s", key) + + if prop.parent is self and not prop._configure_started: + prop.init() + + if prop._configure_finished: + prop.post_instrument_class(self) + + self._log("_post_configure_properties() complete") + self.configured = True + + def add_properties(self, dict_of_properties): + """Add the given dictionary of properties to this mapper, + using `add_property`. + + """ + for key, value in dict_of_properties.items(): + self.add_property(key, value) + + def add_property(self, key, prop): + """Add an individual MapperProperty to this mapper. + + If the mapper has not been configured yet, just adds the + property to the initial properties dictionary sent to the + constructor. If this Mapper has already been configured, then + the given MapperProperty is configured immediately. + + """ + self._init_properties[key] = prop + self._configure_property(key, prop, init=self.configured) + + def _expire_memoizations(self): + for mapper in self.iterate_to_root(): + _memoized_configured_property.expire_instance(mapper) + + @property + def _log_desc(self): + return "(" + self.class_.__name__ + \ + "|" + \ + (self.local_table is not None and + self.local_table.description or + str(self.local_table)) +\ + (self.non_primary and + "|non-primary" or "") + ")" + + def _log(self, msg, *args): + self.logger.info( + "%s " + msg, *((self._log_desc,) + args) + ) + + def _log_debug(self, msg, *args): + self.logger.debug( + "%s " + msg, *((self._log_desc,) + args) + ) + + def __repr__(self): + return '' % ( + id(self), self.class_.__name__) + + def __str__(self): + return "Mapper|%s|%s%s" % ( + self.class_.__name__, + self.local_table is not None and + self.local_table.description or None, + self.non_primary and "|non-primary" or "" + ) + + def _is_orphan(self, state): + orphan_possible = False + for mapper in self.iterate_to_root(): + for (key, cls) in mapper._delete_orphans: + orphan_possible = True + + has_parent = attributes.manager_of_class(cls).has_parent( + state, key, optimistic=state.has_identity) + + if self.legacy_is_orphan and has_parent: + return False + elif not self.legacy_is_orphan and not has_parent: + return True + + if self.legacy_is_orphan: + return orphan_possible + else: + return False + + def has_property(self, key): + return key in self._props + + def get_property(self, key, _configure_mappers=True): + """return a MapperProperty associated with the given key. + """ + + if _configure_mappers and Mapper._new_mappers: + configure_mappers() + + try: + return self._props[key] + except KeyError: + raise sa_exc.InvalidRequestError( + "Mapper '%s' has no property '%s'" % (self, key)) + + def get_property_by_column(self, column): + """Given a :class:`.Column` object, return the + :class:`.MapperProperty` which maps this column.""" + + return self._columntoproperty[column] + + @property + def iterate_properties(self): + """return an iterator of all MapperProperty objects.""" + if Mapper._new_mappers: + configure_mappers() + return iter(self._props.values()) + + def _mappers_from_spec(self, spec, selectable): + """given a with_polymorphic() argument, return the set of mappers it + represents. + + Trims the list of mappers to just those represented within the given + selectable, if present. This helps some more legacy-ish mappings. + + """ + if spec == '*': + mappers = list(self.self_and_descendants) + elif spec: + mappers = set() + for m in util.to_list(spec): + m = _class_to_mapper(m) + if not m.isa(self): + raise sa_exc.InvalidRequestError( + "%r does not inherit from %r" % + (m, self)) + + if selectable is None: + mappers.update(m.iterate_to_root()) + else: + mappers.add(m) + mappers = [m for m in self.self_and_descendants if m in mappers] + else: + mappers = [] + + if selectable is not None: + tables = set(sql_util.find_tables(selectable, + include_aliases=True)) + mappers = [m for m in mappers if m.local_table in tables] + return mappers + + def _selectable_from_mappers(self, mappers, innerjoin): + """given a list of mappers (assumed to be within this mapper's + inheritance hierarchy), construct an outerjoin amongst those mapper's + mapped tables. + + """ + from_obj = self.mapped_table + for m in mappers: + if m is self: + continue + if m.concrete: + raise sa_exc.InvalidRequestError( + "'with_polymorphic()' requires 'selectable' argument " + "when concrete-inheriting mappers are used.") + elif not m.single: + if innerjoin: + from_obj = from_obj.join(m.local_table, + m.inherit_condition) + else: + from_obj = from_obj.outerjoin(m.local_table, + m.inherit_condition) + + return from_obj + + @_memoized_configured_property + def _single_table_criterion(self): + if self.single and \ + self.inherits and \ + self.polymorphic_on is not None: + return self.polymorphic_on.in_( + m.polymorphic_identity + for m in self.self_and_descendants) + else: + return None + + @_memoized_configured_property + def _with_polymorphic_mappers(self): + if Mapper._new_mappers: + configure_mappers() + if not self.with_polymorphic: + return [] + return self._mappers_from_spec(*self.with_polymorphic) + + @_memoized_configured_property + def _with_polymorphic_selectable(self): + if not self.with_polymorphic: + return self.mapped_table + + spec, selectable = self.with_polymorphic + if selectable is not None: + return selectable + else: + return self._selectable_from_mappers( + self._mappers_from_spec(spec, selectable), + False) + + with_polymorphic_mappers = _with_polymorphic_mappers + """The list of :class:`.Mapper` objects included in the + default "polymorphic" query. + + """ + + @_memoized_configured_property + def _insert_cols_as_none(self): + return dict( + ( + table, + frozenset( + col.key for col in columns + if not col.primary_key and + not col.server_default and not col.default) + ) + for table, columns in self._cols_by_table.items() + ) + + @_memoized_configured_property + def _propkey_to_col(self): + return dict( + ( + table, + dict( + (self._columntoproperty[col].key, col) + for col in columns + ) + ) + for table, columns in self._cols_by_table.items() + ) + + @_memoized_configured_property + def _pk_keys_by_table(self): + return dict( + ( + table, + frozenset([col.key for col in pks]) + ) + for table, pks in self._pks_by_table.items() + ) + + @_memoized_configured_property + def _server_default_cols(self): + return dict( + ( + table, + frozenset([ + col.key for col in columns + if col.server_default is not None]) + ) + for table, columns in self._cols_by_table.items() + ) + + @_memoized_configured_property + def _server_onupdate_default_cols(self): + return dict( + ( + table, + frozenset([ + col.key for col in columns + if col.server_onupdate is not None]) + ) + for table, columns in self._cols_by_table.items() + ) + + @property + def selectable(self): + """The :func:`.select` construct this :class:`.Mapper` selects from + by default. + + Normally, this is equivalent to :attr:`.mapped_table`, unless + the ``with_polymorphic`` feature is in use, in which case the + full "polymorphic" selectable is returned. + + """ + return self._with_polymorphic_selectable + + def _with_polymorphic_args(self, spec=None, selectable=False, + innerjoin=False): + if self.with_polymorphic: + if not spec: + spec = self.with_polymorphic[0] + if selectable is False: + selectable = self.with_polymorphic[1] + elif selectable is False: + selectable = None + mappers = self._mappers_from_spec(spec, selectable) + if selectable is not None: + return mappers, selectable + else: + return mappers, self._selectable_from_mappers(mappers, + innerjoin) + + @_memoized_configured_property + def _polymorphic_properties(self): + return list(self._iterate_polymorphic_properties( + self._with_polymorphic_mappers)) + + def _iterate_polymorphic_properties(self, mappers=None): + """Return an iterator of MapperProperty objects which will render into + a SELECT.""" + if mappers is None: + mappers = self._with_polymorphic_mappers + + if not mappers: + for c in self.iterate_properties: + yield c + else: + # in the polymorphic case, filter out discriminator columns + # from other mappers, as these are sometimes dependent on that + # mapper's polymorphic selectable (which we don't want rendered) + for c in util.unique_list( + chain(*[ + list(mapper.iterate_properties) for mapper in + [self] + mappers + ]) + ): + if getattr(c, '_is_polymorphic_discriminator', False) and \ + (self.polymorphic_on is None or + c.columns[0] is not self.polymorphic_on): + continue + yield c + + @util.memoized_property + def attrs(self): + """A namespace of all :class:`.MapperProperty` objects + associated this mapper. + + This is an object that provides each property based on + its key name. For instance, the mapper for a + ``User`` class which has ``User.name`` attribute would + provide ``mapper.attrs.name``, which would be the + :class:`.ColumnProperty` representing the ``name`` + column. The namespace object can also be iterated, + which would yield each :class:`.MapperProperty`. + + :class:`.Mapper` has several pre-filtered views + of this attribute which limit the types of properties + returned, inclding :attr:`.synonyms`, :attr:`.column_attrs`, + :attr:`.relationships`, and :attr:`.composites`. + + .. warning:: + + the :attr:`.Mapper.relationships` accessor namespace is an + instance of :class:`.OrderedProperties`. This is + a dictionary-like object which includes a small number of + named methods such as :meth:`.OrderedProperties.items` + and :meth:`.OrderedProperties.values`. When + accessing attributes dynamically, favor using the dict-access + scheme, e.g. ``mapper.attrs[somename]`` over + ``getattr(mapper.attrs, somename)`` to avoid name collisions. + + .. seealso:: + + :attr:`.Mapper.all_orm_descriptors` + + """ + if Mapper._new_mappers: + configure_mappers() + return util.ImmutableProperties(self._props) + + @util.memoized_property + def all_orm_descriptors(self): + """A namespace of all :class:`.InspectionAttr` attributes associated + with the mapped class. + + These attributes are in all cases Python :term:`descriptors` + associated with the mapped class or its superclasses. + + This namespace includes attributes that are mapped to the class + as well as attributes declared by extension modules. + It includes any Python descriptor type that inherits from + :class:`.InspectionAttr`. This includes + :class:`.QueryableAttribute`, as well as extension types such as + :class:`.hybrid_property`, :class:`.hybrid_method` and + :class:`.AssociationProxy`. + + To distinguish between mapped attributes and extension attributes, + the attribute :attr:`.InspectionAttr.extension_type` will refer + to a constant that distinguishes between different extension types. + + When dealing with a :class:`.QueryableAttribute`, the + :attr:`.QueryableAttribute.property` attribute refers to the + :class:`.MapperProperty` property, which is what you get when + referring to the collection of mapped properties via + :attr:`.Mapper.attrs`. + + .. warning:: + + the :attr:`.Mapper.relationships` accessor namespace is an + instance of :class:`.OrderedProperties`. This is + a dictionary-like object which includes a small number of + named methods such as :meth:`.OrderedProperties.items` + and :meth:`.OrderedProperties.values`. When + accessing attributes dynamically, favor using the dict-access + scheme, e.g. ``mapper.attrs[somename]`` over + ``getattr(mapper.attrs, somename)`` to avoid name collisions. + + .. versionadded:: 0.8.0 + + .. seealso:: + + :attr:`.Mapper.attrs` + + """ + return util.ImmutableProperties( + dict(self.class_manager._all_sqla_attributes())) + + @_memoized_configured_property + def synonyms(self): + """Return a namespace of all :class:`.SynonymProperty` + properties maintained by this :class:`.Mapper`. + + .. seealso:: + + :attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty` + objects. + + """ + return self._filter_properties(properties.SynonymProperty) + + @_memoized_configured_property + def column_attrs(self): + """Return a namespace of all :class:`.ColumnProperty` + properties maintained by this :class:`.Mapper`. + + .. seealso:: + + :attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty` + objects. + + """ + return self._filter_properties(properties.ColumnProperty) + + @_memoized_configured_property + def relationships(self): + """Return a namespace of all :class:`.RelationshipProperty` + properties maintained by this :class:`.Mapper`. + + .. warning:: + + the :attr:`.Mapper.relationships` accessor namespace is an + instance of :class:`.OrderedProperties`. This is + a dictionary-like object which includes a small number of + named methods such as :meth:`.OrderedProperties.items` + and :meth:`.OrderedProperties.values`. When + accessing attributes dynamically, favor using the dict-access + scheme, e.g. ``mapper.attrs[somename]`` over + ``getattr(mapper.attrs, somename)`` to avoid name collisions. + + .. seealso:: + + :attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty` + objects. + + """ + return self._filter_properties(properties.RelationshipProperty) + + @_memoized_configured_property + def composites(self): + """Return a namespace of all :class:`.CompositeProperty` + properties maintained by this :class:`.Mapper`. + + .. seealso:: + + :attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty` + objects. + + """ + return self._filter_properties(properties.CompositeProperty) + + def _filter_properties(self, type_): + if Mapper._new_mappers: + configure_mappers() + return util.ImmutableProperties(util.OrderedDict( + (k, v) for k, v in self._props.items() + if isinstance(v, type_) + )) + + @_memoized_configured_property + def _get_clause(self): + """create a "get clause" based on the primary key. this is used + by query.get() and many-to-one lazyloads to load this item + by primary key. + + """ + params = [(primary_key, sql.bindparam(None, type_=primary_key.type)) + for primary_key in self.primary_key] + return sql.and_(*[k == v for (k, v) in params]), \ + util.column_dict(params) + + @_memoized_configured_property + def _equivalent_columns(self): + """Create a map of all *equivalent* columns, based on + the determination of column pairs that are equated to + one another based on inherit condition. This is designed + to work with the queries that util.polymorphic_union + comes up with, which often don't include the columns from + the base table directly (including the subclass table columns + only). + + The resulting structure is a dictionary of columns mapped + to lists of equivalent columns, i.e. + + { + tablea.col1: + set([tableb.col1, tablec.col1]), + tablea.col2: + set([tabled.col2]) + } + + """ + result = util.column_dict() + + def visit_binary(binary): + if binary.operator == operators.eq: + if binary.left in result: + result[binary.left].add(binary.right) + else: + result[binary.left] = util.column_set((binary.right,)) + if binary.right in result: + result[binary.right].add(binary.left) + else: + result[binary.right] = util.column_set((binary.left,)) + for mapper in self.base_mapper.self_and_descendants: + if mapper.inherit_condition is not None: + visitors.traverse( + mapper.inherit_condition, {}, + {'binary': visit_binary}) + + return result + + def _is_userland_descriptor(self, obj): + if isinstance(obj, (_MappedAttribute, + instrumentation.ClassManager, + expression.ColumnElement)): + return False + else: + return True + + def _should_exclude(self, name, assigned_name, local, column): + """determine whether a particular property should be implicitly + present on the class. + + This occurs when properties are propagated from an inherited class, or + are applied from the columns present in the mapped table. + + """ + + # check for class-bound attributes and/or descriptors, + # either local or from an inherited class + if local: + if self.class_.__dict__.get(assigned_name, None) is not None \ + and self._is_userland_descriptor( + self.class_.__dict__[assigned_name]): + return True + else: + if getattr(self.class_, assigned_name, None) is not None \ + and self._is_userland_descriptor( + getattr(self.class_, assigned_name)): + return True + + if self.include_properties is not None and \ + name not in self.include_properties and \ + (column is None or column not in self.include_properties): + self._log("not including property %s" % (name)) + return True + + if self.exclude_properties is not None and \ + ( + name in self.exclude_properties or + (column is not None and column in self.exclude_properties) + ): + self._log("excluding property %s" % (name)) + return True + + return False + + def common_parent(self, other): + """Return true if the given mapper shares a + common inherited parent as this mapper.""" + + return self.base_mapper is other.base_mapper + + def _canload(self, state, allow_subtypes): + s = self.primary_mapper() + if self.polymorphic_on is not None or allow_subtypes: + return _state_mapper(state).isa(s) + else: + return _state_mapper(state) is s + + def isa(self, other): + """Return True if the this mapper inherits from the given mapper.""" + + m = self + while m and m is not other: + m = m.inherits + return bool(m) + + def iterate_to_root(self): + m = self + while m: + yield m + m = m.inherits + + @_memoized_configured_property + def self_and_descendants(self): + """The collection including this mapper and all descendant mappers. + + This includes not just the immediately inheriting mappers but + all their inheriting mappers as well. + + """ + descendants = [] + stack = deque([self]) + while stack: + item = stack.popleft() + descendants.append(item) + stack.extend(item._inheriting_mappers) + return util.WeakSequence(descendants) + + def polymorphic_iterator(self): + """Iterate through the collection including this mapper and + all descendant mappers. + + This includes not just the immediately inheriting mappers but + all their inheriting mappers as well. + + To iterate through an entire hierarchy, use + ``mapper.base_mapper.polymorphic_iterator()``. + + """ + return iter(self.self_and_descendants) + + def primary_mapper(self): + """Return the primary mapper corresponding to this mapper's class key + (class).""" + + return self.class_manager.mapper + + @property + def primary_base_mapper(self): + return self.class_manager.mapper.base_mapper + + def _result_has_identity_key(self, result, adapter=None): + pk_cols = self.primary_key + if adapter: + pk_cols = [adapter.columns[c] for c in pk_cols] + for col in pk_cols: + if not result._has_key(col): + return False + else: + return True + + def identity_key_from_row(self, row, adapter=None): + """Return an identity-map key for use in storing/retrieving an + item from the identity map. + + :param row: A :class:`.RowProxy` instance. The columns which are + mapped by this :class:`.Mapper` should be locatable in the row, + preferably via the :class:`.Column` object directly (as is the case + when a :func:`.select` construct is executed), or via string names of + the form ``_``. + + """ + pk_cols = self.primary_key + if adapter: + pk_cols = [adapter.columns[c] for c in pk_cols] + + return self._identity_class, \ + tuple(row[column] for column in pk_cols) + + def identity_key_from_primary_key(self, primary_key): + """Return an identity-map key for use in storing/retrieving an + item from an identity map. + + :param primary_key: A list of values indicating the identifier. + + """ + return self._identity_class, tuple(primary_key) + + def identity_key_from_instance(self, instance): + """Return the identity key for the given instance, based on + its primary key attributes. + + If the instance's state is expired, calling this method + will result in a database check to see if the object has been deleted. + If the row no longer exists, + :class:`~sqlalchemy.orm.exc.ObjectDeletedError` is raised. + + This value is typically also found on the instance state under the + attribute name `key`. + + """ + return self.identity_key_from_primary_key( + self.primary_key_from_instance(instance)) + + def _identity_key_from_state(self, state): + dict_ = state.dict + manager = state.manager + return self._identity_class, tuple([ + manager[self._columntoproperty[col].key]. + impl.get(state, dict_, attributes.PASSIVE_RETURN_NEVER_SET) + for col in self.primary_key + ]) + + def primary_key_from_instance(self, instance): + """Return the list of primary key values for the given + instance. + + If the instance's state is expired, calling this method + will result in a database check to see if the object has been deleted. + If the row no longer exists, + :class:`~sqlalchemy.orm.exc.ObjectDeletedError` is raised. + + """ + state = attributes.instance_state(instance) + return self._primary_key_from_state(state, attributes.PASSIVE_OFF) + + def _primary_key_from_state( + self, state, passive=attributes.PASSIVE_RETURN_NEVER_SET): + dict_ = state.dict + manager = state.manager + return [ + manager[prop.key]. + impl.get(state, dict_, passive) + for prop in self._identity_key_props + ] + + @_memoized_configured_property + def _identity_key_props(self): + return [self._columntoproperty[col] for col in self.primary_key] + + @_memoized_configured_property + def _all_pk_props(self): + collection = set() + for table in self.tables: + collection.update(self._pks_by_table[table]) + return collection + + @_memoized_configured_property + def _should_undefer_in_wildcard(self): + cols = set(self.primary_key) + if self.polymorphic_on is not None: + cols.add(self.polymorphic_on) + return cols + + @_memoized_configured_property + def _primary_key_propkeys(self): + return set([prop.key for prop in self._all_pk_props]) + + def _get_state_attr_by_column( + self, state, dict_, column, + passive=attributes.PASSIVE_RETURN_NEVER_SET): + prop = self._columntoproperty[column] + return state.manager[prop.key].impl.get(state, dict_, passive=passive) + + def _set_committed_state_attr_by_column(self, state, dict_, column, value): + prop = self._columntoproperty[column] + state.manager[prop.key].impl.set_committed_value(state, dict_, value) + + def _set_state_attr_by_column(self, state, dict_, column, value): + prop = self._columntoproperty[column] + state.manager[prop.key].impl.set(state, dict_, value, None) + + def _get_committed_attr_by_column(self, obj, column): + state = attributes.instance_state(obj) + dict_ = attributes.instance_dict(obj) + return self._get_committed_state_attr_by_column( + state, dict_, column, passive=attributes.PASSIVE_OFF) + + def _get_committed_state_attr_by_column( + self, state, dict_, column, + passive=attributes.PASSIVE_RETURN_NEVER_SET): + + prop = self._columntoproperty[column] + return state.manager[prop.key].impl.\ + get_committed_value(state, dict_, passive=passive) + + def _optimized_get_statement(self, state, attribute_names): + """assemble a WHERE clause which retrieves a given state by primary + key, using a minimized set of tables. + + Applies to a joined-table inheritance mapper where the + requested attribute names are only present on joined tables, + not the base table. The WHERE clause attempts to include + only those tables to minimize joins. + + """ + props = self._props + + tables = set(chain( + *[sql_util.find_tables(c, check_columns=True) + for key in attribute_names + for c in props[key].columns] + )) + + if self.base_mapper.local_table in tables: + return None + + class ColumnsNotAvailable(Exception): + pass + + def visit_binary(binary): + leftcol = binary.left + rightcol = binary.right + if leftcol is None or rightcol is None: + return + + if leftcol.table not in tables: + leftval = self._get_committed_state_attr_by_column( + state, state.dict, + leftcol, + passive=attributes.PASSIVE_NO_INITIALIZE) + if leftval in orm_util._none_set: + raise ColumnsNotAvailable() + binary.left = sql.bindparam(None, leftval, + type_=binary.right.type) + elif rightcol.table not in tables: + rightval = self._get_committed_state_attr_by_column( + state, state.dict, + rightcol, + passive=attributes.PASSIVE_NO_INITIALIZE) + if rightval in orm_util._none_set: + raise ColumnsNotAvailable() + binary.right = sql.bindparam(None, rightval, + type_=binary.right.type) + + allconds = [] + + try: + start = False + for mapper in reversed(list(self.iterate_to_root())): + if mapper.local_table in tables: + start = True + elif not isinstance(mapper.local_table, + expression.TableClause): + return None + if start and not mapper.single: + allconds.append(visitors.cloned_traverse( + mapper.inherit_condition, + {}, + {'binary': visit_binary} + ) + ) + except ColumnsNotAvailable: + return None + + cond = sql.and_(*allconds) + + cols = [] + for key in attribute_names: + cols.extend(props[key].columns) + return sql.select(cols, cond, use_labels=True) + + def cascade_iterator(self, type_, state, halt_on=None): + """Iterate each element and its mapper in an object graph, + for all relationships that meet the given cascade rule. + + :param type_: + The name of the cascade rule (i.e. ``"save-update"``, ``"delete"``, + etc.). + + .. note:: the ``"all"`` cascade is not accepted here. For a generic + object traversal function, see :ref:`faq_walk_objects`. + + :param state: + The lead InstanceState. child items will be processed per + the relationships defined for this object's mapper. + + :return: the method yields individual object instances. + + .. seealso:: + + :ref:`unitofwork_cascades` + + :ref:`faq_walk_objects` - illustrates a generic function to + traverse all objects without relying on cascades. + + """ + visited_states = set() + prp, mpp = object(), object() + + visitables = deque([(deque(self._props.values()), prp, + state, state.dict)]) + + while visitables: + iterator, item_type, parent_state, parent_dict = visitables[-1] + if not iterator: + visitables.pop() + continue + + if item_type is prp: + prop = iterator.popleft() + if type_ not in prop.cascade: + continue + queue = deque(prop.cascade_iterator( + type_, parent_state, parent_dict, + visited_states, halt_on)) + if queue: + visitables.append((queue, mpp, None, None)) + elif item_type is mpp: + instance, instance_mapper, corresponding_state, \ + corresponding_dict = iterator.popleft() + yield instance, instance_mapper, \ + corresponding_state, corresponding_dict + visitables.append((deque(instance_mapper._props.values()), + prp, corresponding_state, + corresponding_dict)) + + @_memoized_configured_property + def _compiled_cache(self): + return util.LRUCache(self._compiled_cache_size) + + @_memoized_configured_property + def _sorted_tables(self): + table_to_mapper = {} + + for mapper in self.base_mapper.self_and_descendants: + for t in mapper.tables: + table_to_mapper.setdefault(t, mapper) + + extra_dependencies = [] + for table, mapper in table_to_mapper.items(): + super_ = mapper.inherits + if super_: + extra_dependencies.extend([ + (super_table, table) + for super_table in super_.tables + ]) + + def skip(fk): + # attempt to skip dependencies that are not + # significant to the inheritance chain + # for two tables that are related by inheritance. + # while that dependency may be important, it's technically + # not what we mean to sort on here. + parent = table_to_mapper.get(fk.parent.table) + dep = table_to_mapper.get(fk.column.table) + if parent is not None and \ + dep is not None and \ + dep is not parent and \ + dep.inherit_condition is not None: + cols = set(sql_util._find_columns(dep.inherit_condition)) + if parent.inherit_condition is not None: + cols = cols.union(sql_util._find_columns( + parent.inherit_condition)) + return fk.parent not in cols and fk.column not in cols + else: + return fk.parent not in cols + return False + + sorted_ = sql_util.sort_tables(table_to_mapper, + skip_fn=skip, + extra_dependencies=extra_dependencies) + + ret = util.OrderedDict() + for t in sorted_: + ret[t] = table_to_mapper[t] + return ret + + def _memo(self, key, callable_): + if key in self._memoized_values: + return self._memoized_values[key] + else: + self._memoized_values[key] = value = callable_() + return value + + @util.memoized_property + def _table_to_equated(self): + """memoized map of tables to collections of columns to be + synchronized upwards to the base mapper.""" + + result = util.defaultdict(list) + + for table in self._sorted_tables: + cols = set(table.c) + for m in self.iterate_to_root(): + if m._inherits_equated_pairs and \ + cols.intersection( + util.reduce(set.union, + [l.proxy_set for l, r in + m._inherits_equated_pairs]) + ): + result[table].append((m, m._inherits_equated_pairs)) + + return result + + +def configure_mappers(): + """Initialize the inter-mapper relationships of all mappers that + have been constructed thus far. + + This function can be called any number of times, but in + most cases is invoked automatically, the first time mappings are used, + as well as whenever mappings are used and additional not-yet-configured + mappers have been constructed. + + Points at which this occur include when a mapped class is instantiated + into an instance, as well as when the :meth:`.Session.query` method + is used. + + The :func:`.configure_mappers` function provides several event hooks + that can be used to augment its functionality. These methods include: + + * :meth:`.MapperEvents.before_configured` - called once before + :func:`.configure_mappers` does any work; this can be used to establish + additional options, properties, or related mappings before the operation + proceeds. + + * :meth:`.MapperEvents.mapper_configured` - called as each indivudal + :class:`.Mapper` is configured within the process; will include all + mapper state except for backrefs set up by other mappers that are still + to be configured. + + * :meth:`.MapperEvents.after_configured` - called once after + :func:`.configure_mappers` is complete; at this stage, all + :class:`.Mapper` objects that are known to SQLAlchemy will be fully + configured. Note that the calling application may still have other + mappings that haven't been produced yet, such as if they are in modules + as yet unimported. + + """ + + if not Mapper._new_mappers: + return + + _CONFIGURE_MUTEX.acquire() + try: + global _already_compiling + if _already_compiling: + return + _already_compiling = True + try: + + # double-check inside mutex + if not Mapper._new_mappers: + return + + Mapper.dispatch._for_class(Mapper).before_configured() + # initialize properties on all mappers + # note that _mapper_registry is unordered, which + # may randomly conceal/reveal issues related to + # the order of mapper compilation + + for mapper in list(_mapper_registry): + if getattr(mapper, '_configure_failed', False): + e = sa_exc.InvalidRequestError( + "One or more mappers failed to initialize - " + "can't proceed with initialization of other " + "mappers. Original exception was: %s" + % mapper._configure_failed) + e._configure_failed = mapper._configure_failed + raise e + if not mapper.configured: + try: + mapper._post_configure_properties() + mapper._expire_memoizations() + mapper.dispatch.mapper_configured( + mapper, mapper.class_) + except Exception: + exc = sys.exc_info()[1] + if not hasattr(exc, '_configure_failed'): + mapper._configure_failed = exc + raise + + Mapper._new_mappers = False + finally: + _already_compiling = False + finally: + _CONFIGURE_MUTEX.release() + Mapper.dispatch._for_class(Mapper).after_configured() + + +def reconstructor(fn): + """Decorate a method as the 'reconstructor' hook. + + Designates a method as the "reconstructor", an ``__init__``-like + method that will be called by the ORM after the instance has been + loaded from the database or otherwise reconstituted. + + The reconstructor will be invoked with no arguments. Scalar + (non-collection) database-mapped attributes of the instance will + be available for use within the function. Eagerly-loaded + collections are generally not yet available and will usually only + contain the first element. ORM state changes made to objects at + this stage will not be recorded for the next flush() operation, so + the activity within a reconstructor should be conservative. + + """ + fn.__sa_reconstructor__ = True + return fn + + +def validates(*names, **kw): + """Decorate a method as a 'validator' for one or more named properties. + + Designates a method as a validator, a method which receives the + name of the attribute as well as a value to be assigned, or in the + case of a collection, the value to be added to the collection. + The function can then raise validation exceptions to halt the + process from continuing (where Python's built-in ``ValueError`` + and ``AssertionError`` exceptions are reasonable choices), or can + modify or replace the value before proceeding. The function should + otherwise return the given value. + + Note that a validator for a collection **cannot** issue a load of that + collection within the validation routine - this usage raises + an assertion to avoid recursion overflows. This is a reentrant + condition which is not supported. + + :param \*names: list of attribute names to be validated. + :param include_removes: if True, "remove" events will be + sent as well - the validation function must accept an additional + argument "is_remove" which will be a boolean. + + .. versionadded:: 0.7.7 + :param include_backrefs: defaults to ``True``; if ``False``, the + validation function will not emit if the originator is an attribute + event related via a backref. This can be used for bi-directional + :func:`.validates` usage where only one validator should emit per + attribute operation. + + .. versionadded:: 0.9.0 + + .. seealso:: + + :ref:`simple_validators` - usage examples for :func:`.validates` + + """ + include_removes = kw.pop('include_removes', False) + include_backrefs = kw.pop('include_backrefs', True) + + def wrap(fn): + fn.__sa_validators__ = names + fn.__sa_validation_opts__ = { + "include_removes": include_removes, + "include_backrefs": include_backrefs + } + return fn + return wrap + + +def _event_on_load(state, ctx): + instrumenting_mapper = state.manager.info[_INSTRUMENTOR] + if instrumenting_mapper._reconstructor: + instrumenting_mapper._reconstructor(state.obj()) + + +def _event_on_first_init(manager, cls): + """Initial mapper compilation trigger. + + instrumentation calls this one when InstanceState + is first generated, and is needed for legacy mutable + attributes to work. + """ + + instrumenting_mapper = manager.info.get(_INSTRUMENTOR) + if instrumenting_mapper: + if Mapper._new_mappers: + configure_mappers() + + +def _event_on_init(state, args, kwargs): + """Run init_instance hooks. + + This also includes mapper compilation, normally not needed + here but helps with some piecemeal configuration + scenarios (such as in the ORM tutorial). + + """ + + instrumenting_mapper = state.manager.info.get(_INSTRUMENTOR) + if instrumenting_mapper: + if Mapper._new_mappers: + configure_mappers() + if instrumenting_mapper._set_polymorphic_identity: + instrumenting_mapper._set_polymorphic_identity(state) + + +class _ColumnMapping(dict): + """Error reporting helper for mapper._columntoproperty.""" + + __slots__ = 'mapper', + + def __init__(self, mapper): + self.mapper = mapper + + def __missing__(self, column): + prop = self.mapper._props.get(column) + if prop: + raise orm_exc.UnmappedColumnError( + "Column '%s.%s' is not available, due to " + "conflicting property '%s':%r" % ( + column.table.name, column.name, column.key, prop)) + raise orm_exc.UnmappedColumnError( + "No column %s is configured on mapper %s..." % + (column, self.mapper)) diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/path_registry.py b/lib/python3.4/site-packages/sqlalchemy/orm/path_registry.py new file mode 100644 index 0000000..cf18465 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/orm/path_registry.py @@ -0,0 +1,291 @@ +# orm/path_registry.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +"""Path tracking utilities, representing mapper graph traversals. + +""" + +from .. import inspection +from .. import util +from .. import exc +from itertools import chain +from .base import class_mapper +import logging + +log = logging.getLogger(__name__) + + +def _unreduce_path(path): + return PathRegistry.deserialize(path) + + +_WILDCARD_TOKEN = "*" +_DEFAULT_TOKEN = "_sa_default" + + +class PathRegistry(object): + """Represent query load paths and registry functions. + + Basically represents structures like: + + (, "orders", , "items", ) + + These structures are generated by things like + query options (joinedload(), subqueryload(), etc.) and are + used to compose keys stored in the query._attributes dictionary + for various options. + + They are then re-composed at query compile/result row time as + the query is formed and as rows are fetched, where they again + serve to compose keys to look up options in the context.attributes + dictionary, which is copied from query._attributes. + + The path structure has a limited amount of caching, where each + "root" ultimately pulls from a fixed registry associated with + the first mapper, that also contains elements for each of its + property keys. However paths longer than two elements, which + are the exception rather than the rule, are generated on an + as-needed basis. + + """ + + is_token = False + is_root = False + + def __eq__(self, other): + return other is not None and \ + self.path == other.path + + def set(self, attributes, key, value): + log.debug("set '%s' on path '%s' to '%s'", key, self, value) + attributes[(key, self.path)] = value + + def setdefault(self, attributes, key, value): + log.debug("setdefault '%s' on path '%s' to '%s'", key, self, value) + attributes.setdefault((key, self.path), value) + + def get(self, attributes, key, value=None): + key = (key, self.path) + if key in attributes: + return attributes[key] + else: + return value + + def __len__(self): + return len(self.path) + + @property + def length(self): + return len(self.path) + + def pairs(self): + path = self.path + for i in range(0, len(path), 2): + yield path[i], path[i + 1] + + def contains_mapper(self, mapper): + for path_mapper in [ + self.path[i] for i in range(0, len(self.path), 2) + ]: + if path_mapper.is_mapper and \ + path_mapper.isa(mapper): + return True + else: + return False + + def contains(self, attributes, key): + return (key, self.path) in attributes + + def __reduce__(self): + return _unreduce_path, (self.serialize(), ) + + def serialize(self): + path = self.path + return list(zip( + [m.class_ for m in [path[i] for i in range(0, len(path), 2)]], + [path[i].key for i in range(1, len(path), 2)] + [None] + )) + + @classmethod + def deserialize(cls, path): + if path is None: + return None + + p = tuple(chain(*[(class_mapper(mcls), + class_mapper(mcls).attrs[key] + if key is not None else None) + for mcls, key in path])) + if p and p[-1] is None: + p = p[0:-1] + return cls.coerce(p) + + @classmethod + def per_mapper(cls, mapper): + return EntityRegistry( + cls.root, mapper + ) + + @classmethod + def coerce(cls, raw): + return util.reduce(lambda prev, next: prev[next], raw, cls.root) + + def token(self, token): + if token.endswith(':' + _WILDCARD_TOKEN): + return TokenRegistry(self, token) + elif token.endswith(":" + _DEFAULT_TOKEN): + return TokenRegistry(self.root, token) + else: + raise exc.ArgumentError("invalid token: %s" % token) + + def __add__(self, other): + return util.reduce( + lambda prev, next: prev[next], + other.path, self) + + def __repr__(self): + return "%s(%r)" % (self.__class__.__name__, self.path, ) + + +class RootRegistry(PathRegistry): + """Root registry, defers to mappers so that + paths are maintained per-root-mapper. + + """ + path = () + has_entity = False + is_aliased_class = False + is_root = True + + def __getitem__(self, entity): + return entity._path_registry + +PathRegistry.root = RootRegistry() + + +class TokenRegistry(PathRegistry): + def __init__(self, parent, token): + self.token = token + self.parent = parent + self.path = parent.path + (token,) + + has_entity = False + + is_token = True + + def generate_for_superclasses(self): + if not self.parent.is_aliased_class and not self.parent.is_root: + for ent in self.parent.mapper.iterate_to_root(): + yield TokenRegistry(self.parent.parent[ent], self.token) + else: + yield self + + def __getitem__(self, entity): + raise NotImplementedError() + + +class PropRegistry(PathRegistry): + def __init__(self, parent, prop): + # restate this path in terms of the + # given MapperProperty's parent. + insp = inspection.inspect(parent[-1]) + if not insp.is_aliased_class or insp._use_mapper_path: + parent = parent.parent[prop.parent] + elif insp.is_aliased_class and insp.with_polymorphic_mappers: + if prop.parent is not insp.mapper and \ + prop.parent in insp.with_polymorphic_mappers: + subclass_entity = parent[-1]._entity_for_mapper(prop.parent) + parent = parent.parent[subclass_entity] + + self.prop = prop + self.parent = parent + self.path = parent.path + (prop,) + + def __str__(self): + return " -> ".join( + str(elem) for elem in self.path + ) + + @util.memoized_property + def has_entity(self): + return hasattr(self.prop, "mapper") + + @util.memoized_property + def entity(self): + return self.prop.mapper + + @util.memoized_property + def _wildcard_path_loader_key(self): + """Given a path (mapper A, prop X), replace the prop with the wildcard, + e.g. (mapper A, 'relationship:.*') or (mapper A, 'column:.*'), then + return within the ("loader", path) structure. + + """ + return ("loader", + self.parent.token( + "%s:%s" % ( + self.prop.strategy_wildcard_key, _WILDCARD_TOKEN) + ).path + ) + + @util.memoized_property + def _default_path_loader_key(self): + return ("loader", + self.parent.token( + "%s:%s" % (self.prop.strategy_wildcard_key, + _DEFAULT_TOKEN) + ).path + ) + + @util.memoized_property + def _loader_key(self): + return ("loader", self.path) + + @property + def mapper(self): + return self.entity + + @property + def entity_path(self): + return self[self.entity] + + def __getitem__(self, entity): + if isinstance(entity, (int, slice)): + return self.path[entity] + else: + return EntityRegistry( + self, entity + ) + + +class EntityRegistry(PathRegistry, dict): + is_aliased_class = False + has_entity = True + + def __init__(self, parent, entity): + self.key = entity + self.parent = parent + self.is_aliased_class = entity.is_aliased_class + self.entity = entity + self.path = parent.path + (entity,) + self.entity_path = self + + @property + def mapper(self): + return inspection.inspect(self.entity).mapper + + def __bool__(self): + return True + __nonzero__ = __bool__ + + def __getitem__(self, entity): + if isinstance(entity, (int, slice)): + return self.path[entity] + else: + return dict.__getitem__(self, entity) + + def __missing__(self, key): + self[key] = item = PropRegistry(self, key) + return item diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/persistence.py b/lib/python3.4/site-packages/sqlalchemy/orm/persistence.py new file mode 100644 index 0000000..5d69f51 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/orm/persistence.py @@ -0,0 +1,1408 @@ +# orm/persistence.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""private module containing functions used to emit INSERT, UPDATE +and DELETE statements on behalf of a :class:`.Mapper` and its descending +mappers. + +The functions here are called only by the unit of work functions +in unitofwork.py. + +""" + +import operator +from itertools import groupby, chain +from .. import sql, util, exc as sa_exc +from . import attributes, sync, exc as orm_exc, evaluator +from .base import state_str, _attr_as_key, _entity_descriptor +from ..sql import expression +from ..sql.base import _from_objects +from . import loading + + +def _bulk_insert( + mapper, mappings, session_transaction, isstates, return_defaults): + base_mapper = mapper.base_mapper + + cached_connections = _cached_connection_dict(base_mapper) + + if session_transaction.session.connection_callable: + raise NotImplementedError( + "connection_callable / per-instance sharding " + "not supported in bulk_insert()") + + if isstates: + if return_defaults: + states = [(state, state.dict) for state in mappings] + mappings = [dict_ for (state, dict_) in states] + else: + mappings = [state.dict for state in mappings] + else: + mappings = list(mappings) + + connection = session_transaction.connection(base_mapper) + for table, super_mapper in base_mapper._sorted_tables.items(): + if not mapper.isa(super_mapper): + continue + + records = ( + (None, state_dict, params, mapper, + connection, value_params, has_all_pks, has_all_defaults) + for + state, state_dict, params, mp, + conn, value_params, has_all_pks, + has_all_defaults in _collect_insert_commands(table, ( + (None, mapping, mapper, connection) + for mapping in mappings), + bulk=True, return_defaults=return_defaults + ) + ) + _emit_insert_statements(base_mapper, None, + cached_connections, + super_mapper, table, records, + bookkeeping=return_defaults) + + if return_defaults and isstates: + identity_cls = mapper._identity_class + identity_props = [p.key for p in mapper._identity_key_props] + for state, dict_ in states: + state.key = ( + identity_cls, + tuple([dict_[key] for key in identity_props]) + ) + + +def _bulk_update(mapper, mappings, session_transaction, + isstates, update_changed_only): + base_mapper = mapper.base_mapper + + cached_connections = _cached_connection_dict(base_mapper) + + def _changed_dict(mapper, state): + return dict( + (k, v) + for k, v in state.dict.items() if k in state.committed_state or k + in mapper._primary_key_propkeys + ) + + if isstates: + if update_changed_only: + mappings = [_changed_dict(mapper, state) for state in mappings] + else: + mappings = [state.dict for state in mappings] + else: + mappings = list(mappings) + + if session_transaction.session.connection_callable: + raise NotImplementedError( + "connection_callable / per-instance sharding " + "not supported in bulk_update()") + + connection = session_transaction.connection(base_mapper) + + for table, super_mapper in base_mapper._sorted_tables.items(): + if not mapper.isa(super_mapper): + continue + + records = _collect_update_commands(None, table, ( + (None, mapping, mapper, connection, + (mapping[mapper._version_id_prop.key] + if mapper._version_id_prop else None)) + for mapping in mappings + ), bulk=True) + + _emit_update_statements(base_mapper, None, + cached_connections, + super_mapper, table, records, + bookkeeping=False) + + +def save_obj( + base_mapper, states, uowtransaction, single=False): + """Issue ``INSERT`` and/or ``UPDATE`` statements for a list + of objects. + + This is called within the context of a UOWTransaction during a + flush operation, given a list of states to be flushed. The + base mapper in an inheritance hierarchy handles the inserts/ + updates for all descendant mappers. + + """ + + # if batch=false, call _save_obj separately for each object + if not single and not base_mapper.batch: + for state in _sort_states(states): + save_obj(base_mapper, [state], uowtransaction, single=True) + return + + states_to_update = [] + states_to_insert = [] + cached_connections = _cached_connection_dict(base_mapper) + + for (state, dict_, mapper, connection, + has_identity, + row_switch, update_version_id) in _organize_states_for_save( + base_mapper, states, uowtransaction + ): + if has_identity or row_switch: + states_to_update.append( + (state, dict_, mapper, connection, update_version_id) + ) + else: + states_to_insert.append( + (state, dict_, mapper, connection) + ) + + for table, mapper in base_mapper._sorted_tables.items(): + if table not in mapper._pks_by_table: + continue + insert = _collect_insert_commands(table, states_to_insert) + + update = _collect_update_commands( + uowtransaction, table, states_to_update) + + _emit_update_statements(base_mapper, uowtransaction, + cached_connections, + mapper, table, update) + + _emit_insert_statements(base_mapper, uowtransaction, + cached_connections, + mapper, table, insert) + + _finalize_insert_update_commands( + base_mapper, uowtransaction, + chain( + ( + (state, state_dict, mapper, connection, False) + for state, state_dict, mapper, connection in states_to_insert + ), + ( + (state, state_dict, mapper, connection, True) + for state, state_dict, mapper, connection, + update_version_id in states_to_update + ) + ) + ) + + +def post_update(base_mapper, states, uowtransaction, post_update_cols): + """Issue UPDATE statements on behalf of a relationship() which + specifies post_update. + + """ + cached_connections = _cached_connection_dict(base_mapper) + + states_to_update = list(_organize_states_for_post_update( + base_mapper, + states, uowtransaction)) + + for table, mapper in base_mapper._sorted_tables.items(): + if table not in mapper._pks_by_table: + continue + + update = ( + (state, state_dict, sub_mapper, connection) + for + state, state_dict, sub_mapper, connection in states_to_update + if table in sub_mapper._pks_by_table + ) + + update = _collect_post_update_commands(base_mapper, uowtransaction, + table, update, + post_update_cols) + + _emit_post_update_statements(base_mapper, uowtransaction, + cached_connections, + mapper, table, update) + + +def delete_obj(base_mapper, states, uowtransaction): + """Issue ``DELETE`` statements for a list of objects. + + This is called within the context of a UOWTransaction during a + flush operation. + + """ + + cached_connections = _cached_connection_dict(base_mapper) + + states_to_delete = list(_organize_states_for_delete( + base_mapper, + states, + uowtransaction)) + + table_to_mapper = base_mapper._sorted_tables + + for table in reversed(list(table_to_mapper.keys())): + mapper = table_to_mapper[table] + if table not in mapper._pks_by_table: + continue + + delete = _collect_delete_commands(base_mapper, uowtransaction, + table, states_to_delete) + + _emit_delete_statements(base_mapper, uowtransaction, + cached_connections, mapper, table, delete) + + for state, state_dict, mapper, connection, \ + update_version_id in states_to_delete: + mapper.dispatch.after_delete(mapper, connection, state) + + +def _organize_states_for_save(base_mapper, states, uowtransaction): + """Make an initial pass across a set of states for INSERT or + UPDATE. + + This includes splitting out into distinct lists for + each, calling before_insert/before_update, obtaining + key information for each state including its dictionary, + mapper, the connection to use for the execution per state, + and the identity flag. + + """ + + for state, dict_, mapper, connection in _connections_for_states( + base_mapper, uowtransaction, + states): + + has_identity = bool(state.key) + + instance_key = state.key or mapper._identity_key_from_state(state) + + row_switch = update_version_id = None + + # call before_XXX extensions + if not has_identity: + mapper.dispatch.before_insert(mapper, connection, state) + else: + mapper.dispatch.before_update(mapper, connection, state) + + if mapper._validate_polymorphic_identity: + mapper._validate_polymorphic_identity(mapper, state, dict_) + + # detect if we have a "pending" instance (i.e. has + # no instance_key attached to it), and another instance + # with the same identity key already exists as persistent. + # convert to an UPDATE if so. + if not has_identity and \ + instance_key in uowtransaction.session.identity_map: + instance = \ + uowtransaction.session.identity_map[instance_key] + existing = attributes.instance_state(instance) + if not uowtransaction.is_deleted(existing): + raise orm_exc.FlushError( + "New instance %s with identity key %s conflicts " + "with persistent instance %s" % + (state_str(state), instance_key, + state_str(existing))) + + base_mapper._log_debug( + "detected row switch for identity %s. " + "will update %s, remove %s from " + "transaction", instance_key, + state_str(state), state_str(existing)) + + # remove the "delete" flag from the existing element + uowtransaction.remove_state_actions(existing) + row_switch = existing + + if (has_identity or row_switch) and mapper.version_id_col is not None: + update_version_id = mapper._get_committed_state_attr_by_column( + row_switch if row_switch else state, + row_switch.dict if row_switch else dict_, + mapper.version_id_col) + + yield (state, dict_, mapper, connection, + has_identity, row_switch, update_version_id) + + +def _organize_states_for_post_update(base_mapper, states, + uowtransaction): + """Make an initial pass across a set of states for UPDATE + corresponding to post_update. + + This includes obtaining key information for each state + including its dictionary, mapper, the connection to use for + the execution per state. + + """ + return _connections_for_states(base_mapper, uowtransaction, states) + + +def _organize_states_for_delete(base_mapper, states, uowtransaction): + """Make an initial pass across a set of states for DELETE. + + This includes calling out before_delete and obtaining + key information for each state including its dictionary, + mapper, the connection to use for the execution per state. + + """ + for state, dict_, mapper, connection in _connections_for_states( + base_mapper, uowtransaction, + states): + + mapper.dispatch.before_delete(mapper, connection, state) + + if mapper.version_id_col is not None: + update_version_id = \ + mapper._get_committed_state_attr_by_column( + state, dict_, + mapper.version_id_col) + else: + update_version_id = None + + yield ( + state, dict_, mapper, connection, update_version_id) + + +def _collect_insert_commands( + table, states_to_insert, + bulk=False, return_defaults=False): + """Identify sets of values to use in INSERT statements for a + list of states. + + """ + for state, state_dict, mapper, connection in states_to_insert: + if table not in mapper._pks_by_table: + continue + + params = {} + value_params = {} + + propkey_to_col = mapper._propkey_to_col[table] + + for propkey in set(propkey_to_col).intersection(state_dict): + value = state_dict[propkey] + col = propkey_to_col[propkey] + if value is None: + continue + elif not bulk and isinstance(value, sql.ClauseElement): + value_params[col.key] = value + else: + params[col.key] = value + + if not bulk: + for colkey in mapper._insert_cols_as_none[table].\ + difference(params).difference(value_params): + params[colkey] = None + + if not bulk or return_defaults: + has_all_pks = mapper._pk_keys_by_table[table].issubset(params) + + if mapper.base_mapper.eager_defaults: + has_all_defaults = mapper._server_default_cols[table].\ + issubset(params) + else: + has_all_defaults = True + else: + has_all_defaults = has_all_pks = True + + if mapper.version_id_generator is not False \ + and mapper.version_id_col is not None and \ + mapper.version_id_col in mapper._cols_by_table[table]: + params[mapper.version_id_col.key] = \ + mapper.version_id_generator(None) + + yield ( + state, state_dict, params, mapper, + connection, value_params, has_all_pks, + has_all_defaults) + + +def _collect_update_commands( + uowtransaction, table, states_to_update, + bulk=False): + """Identify sets of values to use in UPDATE statements for a + list of states. + + This function works intricately with the history system + to determine exactly what values should be updated + as well as how the row should be matched within an UPDATE + statement. Includes some tricky scenarios where the primary + key of an object might have been changed. + + """ + + for state, state_dict, mapper, connection, \ + update_version_id in states_to_update: + + if table not in mapper._pks_by_table: + continue + + pks = mapper._pks_by_table[table] + + value_params = {} + + propkey_to_col = mapper._propkey_to_col[table] + + if bulk: + params = dict( + (propkey_to_col[propkey].key, state_dict[propkey]) + for propkey in + set(propkey_to_col).intersection(state_dict).difference( + mapper._pk_keys_by_table[table]) + ) + has_all_defaults = True + else: + params = {} + for propkey in set(propkey_to_col).intersection( + state.committed_state): + value = state_dict[propkey] + col = propkey_to_col[propkey] + + if isinstance(value, sql.ClauseElement): + value_params[col] = value + # guard against values that generate non-__nonzero__ + # objects for __eq__() + elif state.manager[propkey].impl.is_equal( + value, state.committed_state[propkey]) is not True: + params[col.key] = value + + if mapper.base_mapper.eager_defaults: + has_all_defaults = mapper._server_onupdate_default_cols[table].\ + issubset(params) + else: + has_all_defaults = True + + if update_version_id is not None and \ + mapper.version_id_col in mapper._cols_by_table[table]: + + if not bulk and not (params or value_params): + # HACK: check for history in other tables, in case the + # history is only in a different table than the one + # where the version_id_col is. This logic was lost + # from 0.9 -> 1.0.0 and restored in 1.0.6. + for prop in mapper._columntoproperty.values(): + history = ( + state.manager[prop.key].impl.get_history( + state, state_dict, + attributes.PASSIVE_NO_INITIALIZE)) + if history.added: + break + else: + # no net change, break + continue + + col = mapper.version_id_col + params[col._label] = update_version_id + + if (bulk or col.key not in params) and \ + mapper.version_id_generator is not False: + val = mapper.version_id_generator(update_version_id) + params[col.key] = val + + elif not (params or value_params): + continue + + if bulk: + pk_params = dict( + (propkey_to_col[propkey]._label, state_dict.get(propkey)) + for propkey in + set(propkey_to_col). + intersection(mapper._pk_keys_by_table[table]) + ) + else: + pk_params = {} + for col in pks: + propkey = mapper._columntoproperty[col].key + + history = state.manager[propkey].impl.get_history( + state, state_dict, attributes.PASSIVE_OFF) + + if history.added: + if not history.deleted or \ + ("pk_cascaded", state, col) in \ + uowtransaction.attributes: + pk_params[col._label] = history.added[0] + params.pop(col.key, None) + else: + # else, use the old value to locate the row + pk_params[col._label] = history.deleted[0] + params[col.key] = history.added[0] + else: + pk_params[col._label] = history.unchanged[0] + if pk_params[col._label] is None: + raise orm_exc.FlushError( + "Can't update table %s using NULL for primary " + "key value on column %s" % (table, col)) + + if params or value_params: + params.update(pk_params) + yield ( + state, state_dict, params, mapper, + connection, value_params, has_all_defaults) + + +def _collect_post_update_commands(base_mapper, uowtransaction, table, + states_to_update, post_update_cols): + """Identify sets of values to use in UPDATE statements for a + list of states within a post_update operation. + + """ + + for state, state_dict, mapper, connection in states_to_update: + + # assert table in mapper._pks_by_table + + pks = mapper._pks_by_table[table] + params = {} + hasdata = False + + for col in mapper._cols_by_table[table]: + if col in pks: + params[col._label] = \ + mapper._get_state_attr_by_column( + state, + state_dict, col, passive=attributes.PASSIVE_OFF) + + elif col in post_update_cols: + prop = mapper._columntoproperty[col] + history = state.manager[prop.key].impl.get_history( + state, state_dict, + attributes.PASSIVE_NO_INITIALIZE) + if history.added: + value = history.added[0] + params[col.key] = value + hasdata = True + if hasdata: + yield params, connection + + +def _collect_delete_commands(base_mapper, uowtransaction, table, + states_to_delete): + """Identify values to use in DELETE statements for a list of + states to be deleted.""" + + for state, state_dict, mapper, connection, \ + update_version_id in states_to_delete: + + if table not in mapper._pks_by_table: + continue + + params = {} + for col in mapper._pks_by_table[table]: + params[col.key] = \ + value = \ + mapper._get_committed_state_attr_by_column( + state, state_dict, col) + if value is None: + raise orm_exc.FlushError( + "Can't delete from table %s " + "using NULL for primary " + "key value on column %s" % (table, col)) + + if update_version_id is not None and \ + mapper.version_id_col in mapper._cols_by_table[table]: + params[mapper.version_id_col.key] = update_version_id + yield params, connection + + +def _emit_update_statements(base_mapper, uowtransaction, + cached_connections, mapper, table, update, + bookkeeping=True): + """Emit UPDATE statements corresponding to value lists collected + by _collect_update_commands().""" + + needs_version_id = mapper.version_id_col is not None and \ + mapper.version_id_col in mapper._cols_by_table[table] + + def update_stmt(): + clause = sql.and_() + + for col in mapper._pks_by_table[table]: + clause.clauses.append(col == sql.bindparam(col._label, + type_=col.type)) + + if needs_version_id: + clause.clauses.append( + mapper.version_id_col == sql.bindparam( + mapper.version_id_col._label, + type_=mapper.version_id_col.type)) + + stmt = table.update(clause) + return stmt + + cached_stmt = base_mapper._memo(('update', table), update_stmt) + + for (connection, paramkeys, hasvalue, has_all_defaults), \ + records in groupby( + update, + lambda rec: ( + rec[4], # connection + set(rec[2]), # set of parameter keys + bool(rec[5]), # whether or not we have "value" parameters + rec[6] # has_all_defaults + ) + ): + rows = 0 + records = list(records) + + statement = cached_stmt + + # TODO: would be super-nice to not have to determine this boolean + # inside the loop here, in the 99.9999% of the time there's only + # one connection in use + assert_singlerow = connection.dialect.supports_sane_rowcount + assert_multirow = assert_singlerow and \ + connection.dialect.supports_sane_multi_rowcount + allow_multirow = has_all_defaults and not needs_version_id + + if bookkeeping and not has_all_defaults and \ + mapper.base_mapper.eager_defaults: + statement = statement.return_defaults() + elif mapper.version_id_col is not None: + statement = statement.return_defaults(mapper.version_id_col) + + if hasvalue: + for state, state_dict, params, mapper, \ + connection, value_params, has_all_defaults in records: + c = connection.execute( + statement.values(value_params), + params) + if bookkeeping: + _postfetch( + mapper, + uowtransaction, + table, + state, + state_dict, + c, + c.context.compiled_parameters[0], + value_params) + rows += c.rowcount + check_rowcount = True + else: + if not allow_multirow: + check_rowcount = assert_singlerow + for state, state_dict, params, mapper, \ + connection, value_params, has_all_defaults in records: + c = cached_connections[connection].\ + execute(statement, params) + + # TODO: why with bookkeeping=False? + _postfetch( + mapper, + uowtransaction, + table, + state, + state_dict, + c, + c.context.compiled_parameters[0], + value_params) + rows += c.rowcount + else: + multiparams = [rec[2] for rec in records] + + check_rowcount = assert_multirow or ( + assert_singlerow and + len(multiparams) == 1 + ) + + c = cached_connections[connection].\ + execute(statement, multiparams) + + rows += c.rowcount + + # TODO: why with bookkeeping=False? + for state, state_dict, params, mapper, \ + connection, value_params, has_all_defaults in records: + _postfetch( + mapper, + uowtransaction, + table, + state, + state_dict, + c, + c.context.compiled_parameters[0], + value_params) + + if check_rowcount: + if rows != len(records): + raise orm_exc.StaleDataError( + "UPDATE statement on table '%s' expected to " + "update %d row(s); %d were matched." % + (table.description, len(records), rows)) + + elif needs_version_id: + util.warn("Dialect %s does not support updated rowcount " + "- versioning cannot be verified." % + c.dialect.dialect_description) + + +def _emit_insert_statements(base_mapper, uowtransaction, + cached_connections, mapper, table, insert, + bookkeeping=True): + """Emit INSERT statements corresponding to value lists collected + by _collect_insert_commands().""" + + cached_stmt = base_mapper._memo(('insert', table), table.insert) + + for (connection, pkeys, hasvalue, has_all_pks, has_all_defaults), \ + records in groupby( + insert, + lambda rec: ( + rec[4], # connection + set(rec[2]), # parameter keys + bool(rec[5]), # whether we have "value" parameters + rec[6], + rec[7])): + + statement = cached_stmt + + if not bookkeeping or \ + ( + has_all_defaults + or not base_mapper.eager_defaults + or not connection.dialect.implicit_returning + ) and has_all_pks and not hasvalue: + + records = list(records) + multiparams = [rec[2] for rec in records] + + c = cached_connections[connection].\ + execute(statement, multiparams) + + if bookkeeping: + for (state, state_dict, params, mapper_rec, + conn, value_params, has_all_pks, has_all_defaults), \ + last_inserted_params in \ + zip(records, c.context.compiled_parameters): + _postfetch( + mapper_rec, + uowtransaction, + table, + state, + state_dict, + c, + last_inserted_params, + value_params) + + else: + if not has_all_defaults and base_mapper.eager_defaults: + statement = statement.return_defaults() + elif mapper.version_id_col is not None: + statement = statement.return_defaults(mapper.version_id_col) + + for state, state_dict, params, mapper_rec, \ + connection, value_params, \ + has_all_pks, has_all_defaults in records: + + if value_params: + result = connection.execute( + statement.values(value_params), + params) + else: + result = cached_connections[connection].\ + execute(statement, params) + + primary_key = result.context.inserted_primary_key + + if primary_key is not None: + # set primary key attributes + for pk, col in zip(primary_key, + mapper._pks_by_table[table]): + prop = mapper_rec._columntoproperty[col] + if state_dict.get(prop.key) is None: + state_dict[prop.key] = pk + _postfetch( + mapper_rec, + uowtransaction, + table, + state, + state_dict, + result, + result.context.compiled_parameters[0], + value_params) + + +def _emit_post_update_statements(base_mapper, uowtransaction, + cached_connections, mapper, table, update): + """Emit UPDATE statements corresponding to value lists collected + by _collect_post_update_commands().""" + + def update_stmt(): + clause = sql.and_() + + for col in mapper._pks_by_table[table]: + clause.clauses.append(col == sql.bindparam(col._label, + type_=col.type)) + + return table.update(clause) + + statement = base_mapper._memo(('post_update', table), update_stmt) + + # execute each UPDATE in the order according to the original + # list of states to guarantee row access order, but + # also group them into common (connection, cols) sets + # to support executemany(). + for key, grouper in groupby( + update, lambda rec: ( + rec[1], # connection + set(rec[0]) # parameter keys + ) + ): + connection = key[0] + multiparams = [params for params, conn in grouper] + cached_connections[connection].\ + execute(statement, multiparams) + + +def _emit_delete_statements(base_mapper, uowtransaction, cached_connections, + mapper, table, delete): + """Emit DELETE statements corresponding to value lists collected + by _collect_delete_commands().""" + + need_version_id = mapper.version_id_col is not None and \ + mapper.version_id_col in mapper._cols_by_table[table] + + def delete_stmt(): + clause = sql.and_() + for col in mapper._pks_by_table[table]: + clause.clauses.append( + col == sql.bindparam(col.key, type_=col.type)) + + if need_version_id: + clause.clauses.append( + mapper.version_id_col == + sql.bindparam( + mapper.version_id_col.key, + type_=mapper.version_id_col.type + ) + ) + + return table.delete(clause) + + statement = base_mapper._memo(('delete', table), delete_stmt) + for connection, recs in groupby( + delete, + lambda rec: rec[1] # connection + ): + del_objects = [params for params, connection in recs] + + connection = cached_connections[connection] + + expected = len(del_objects) + rows_matched = -1 + only_warn = False + if connection.dialect.supports_sane_multi_rowcount: + c = connection.execute(statement, del_objects) + + if not need_version_id: + only_warn = True + + rows_matched = c.rowcount + + elif need_version_id: + if connection.dialect.supports_sane_rowcount: + rows_matched = 0 + # execute deletes individually so that versioned + # rows can be verified + for params in del_objects: + c = connection.execute(statement, params) + rows_matched += c.rowcount + else: + util.warn( + "Dialect %s does not support deleted rowcount " + "- versioning cannot be verified." % + connection.dialect.dialect_description, + stacklevel=12) + connection.execute(statement, del_objects) + else: + connection.execute(statement, del_objects) + + if base_mapper.confirm_deleted_rows and \ + rows_matched > -1 and expected != rows_matched: + if only_warn: + util.warn( + "DELETE statement on table '%s' expected to " + "delete %d row(s); %d were matched. Please set " + "confirm_deleted_rows=False within the mapper " + "configuration to prevent this warning." % + (table.description, expected, rows_matched) + ) + else: + raise orm_exc.StaleDataError( + "DELETE statement on table '%s' expected to " + "delete %d row(s); %d were matched. Please set " + "confirm_deleted_rows=False within the mapper " + "configuration to prevent this warning." % + (table.description, expected, rows_matched) + ) + + +def _finalize_insert_update_commands(base_mapper, uowtransaction, states): + """finalize state on states that have been inserted or updated, + including calling after_insert/after_update events. + + """ + for state, state_dict, mapper, connection, has_identity in states: + + if mapper._readonly_props: + readonly = state.unmodified_intersection( + [p.key for p in mapper._readonly_props + if p.expire_on_flush or p.key not in state.dict] + ) + if readonly: + state._expire_attributes(state.dict, readonly) + + # if eager_defaults option is enabled, load + # all expired cols. Else if we have a version_id_col, make sure + # it isn't expired. + toload_now = [] + + if base_mapper.eager_defaults: + toload_now.extend(state._unloaded_non_object) + elif mapper.version_id_col is not None and \ + mapper.version_id_generator is False: + if mapper._version_id_prop.key in state.unloaded: + toload_now.extend([mapper._version_id_prop.key]) + + if toload_now: + state.key = base_mapper._identity_key_from_state(state) + loading.load_on_ident( + uowtransaction.session.query(base_mapper), + state.key, refresh_state=state, + only_load_props=toload_now) + + # call after_XXX extensions + if not has_identity: + mapper.dispatch.after_insert(mapper, connection, state) + else: + mapper.dispatch.after_update(mapper, connection, state) + + +def _postfetch(mapper, uowtransaction, table, + state, dict_, result, params, value_params, bulk=False): + """Expire attributes in need of newly persisted database state, + after an INSERT or UPDATE statement has proceeded for that + state.""" + + # TODO: bulk is never non-False, need to clean this up + + prefetch_cols = result.context.compiled.prefetch + postfetch_cols = result.context.compiled.postfetch + returning_cols = result.context.compiled.returning + + if mapper.version_id_col is not None and \ + mapper.version_id_col in mapper._cols_by_table[table]: + prefetch_cols = list(prefetch_cols) + [mapper.version_id_col] + + refresh_flush = bool(mapper.class_manager.dispatch.refresh_flush) + if refresh_flush: + load_evt_attrs = [] + + if returning_cols: + row = result.context.returned_defaults + if row is not None: + for col in returning_cols: + if col.primary_key: + continue + dict_[mapper._columntoproperty[col].key] = row[col] + if refresh_flush: + load_evt_attrs.append(mapper._columntoproperty[col].key) + + for c in prefetch_cols: + if c.key in params and c in mapper._columntoproperty: + dict_[mapper._columntoproperty[c].key] = params[c.key] + if refresh_flush: + load_evt_attrs.append(mapper._columntoproperty[c].key) + + if refresh_flush and load_evt_attrs: + mapper.class_manager.dispatch.refresh_flush( + state, uowtransaction, load_evt_attrs) + + if postfetch_cols and state: + state._expire_attributes(state.dict, + [mapper._columntoproperty[c].key + for c in postfetch_cols if c in + mapper._columntoproperty] + ) + + # synchronize newly inserted ids from one table to the next + # TODO: this still goes a little too often. would be nice to + # have definitive list of "columns that changed" here + for m, equated_pairs in mapper._table_to_equated[table]: + if state is None: + sync.bulk_populate_inherit_keys(dict_, m, equated_pairs) + else: + sync.populate(state, m, state, m, + equated_pairs, + uowtransaction, + mapper.passive_updates) + + +def _connections_for_states(base_mapper, uowtransaction, states): + """Return an iterator of (state, state.dict, mapper, connection). + + The states are sorted according to _sort_states, then paired + with the connection they should be using for the given + unit of work transaction. + + """ + # if session has a connection callable, + # organize individual states with the connection + # to use for update + if uowtransaction.session.connection_callable: + connection_callable = \ + uowtransaction.session.connection_callable + else: + connection = uowtransaction.transaction.connection(base_mapper) + connection_callable = None + + for state in _sort_states(states): + if connection_callable: + connection = connection_callable(base_mapper, state.obj()) + + mapper = state.manager.mapper + + yield state, state.dict, mapper, connection + + +def _cached_connection_dict(base_mapper): + # dictionary of connection->connection_with_cache_options. + return util.PopulateDict( + lambda conn: conn.execution_options( + compiled_cache=base_mapper._compiled_cache + )) + + +def _sort_states(states): + pending = set(states) + persistent = set(s for s in pending if s.key is not None) + pending.difference_update(persistent) + return sorted(pending, key=operator.attrgetter("insert_order")) + \ + sorted(persistent, key=lambda q: q.key[1]) + + +class BulkUD(object): + """Handle bulk update and deletes via a :class:`.Query`.""" + + def __init__(self, query): + self.query = query.enable_eagerloads(False) + self.mapper = self.query._bind_mapper() + self._validate_query_state() + + def _validate_query_state(self): + for attr, methname, notset, op in ( + ('_limit', 'limit()', None, operator.is_), + ('_offset', 'offset()', None, operator.is_), + ('_order_by', 'order_by()', False, operator.is_), + ('_group_by', 'group_by()', False, operator.is_), + ('_distinct', 'distinct()', False, operator.is_), + ( + '_from_obj', + 'join(), outerjoin(), select_from(), or from_self()', + (), operator.eq) + ): + if not op(getattr(self.query, attr), notset): + raise sa_exc.InvalidRequestError( + "Can't call Query.update() or Query.delete() " + "when %s has been called" % + (methname, ) + ) + + @property + def session(self): + return self.query.session + + @classmethod + def _factory(cls, lookup, synchronize_session, *arg): + try: + klass = lookup[synchronize_session] + except KeyError: + raise sa_exc.ArgumentError( + "Valid strategies for session synchronization " + "are %s" % (", ".join(sorted(repr(x) + for x in lookup)))) + else: + return klass(*arg) + + def exec_(self): + self._do_pre() + self._do_pre_synchronize() + self._do_exec() + self._do_post_synchronize() + self._do_post() + + @util.dependencies("sqlalchemy.orm.query") + def _do_pre(self, querylib): + query = self.query + self.context = querylib.QueryContext(query) + + if isinstance(query._entities[0], querylib._ColumnEntity): + # check for special case of query(table) + tables = set() + for ent in query._entities: + if not isinstance(ent, querylib._ColumnEntity): + tables.clear() + break + else: + tables.update(_from_objects(ent.column)) + + if len(tables) != 1: + raise sa_exc.InvalidRequestError( + "This operation requires only one Table or " + "entity be specified as the target." + ) + else: + self.primary_table = tables.pop() + + else: + self.primary_table = query._only_entity_zero( + "This operation requires only one Table or " + "entity be specified as the target." + ).mapper.local_table + + session = query.session + + if query._autoflush: + session._autoflush() + + def _do_pre_synchronize(self): + pass + + def _do_post_synchronize(self): + pass + + +class BulkEvaluate(BulkUD): + """BulkUD which does the 'evaluate' method of session state resolution.""" + + def _additional_evaluators(self, evaluator_compiler): + pass + + def _do_pre_synchronize(self): + query = self.query + target_cls = query._mapper_zero().class_ + + try: + evaluator_compiler = evaluator.EvaluatorCompiler(target_cls) + if query.whereclause is not None: + eval_condition = evaluator_compiler.process( + query.whereclause) + else: + def eval_condition(obj): + return True + + self._additional_evaluators(evaluator_compiler) + + except evaluator.UnevaluatableError: + raise sa_exc.InvalidRequestError( + "Could not evaluate current criteria in Python. " + "Specify 'fetch' or False for the " + "synchronize_session parameter.") + + # TODO: detect when the where clause is a trivial primary key match + self.matched_objects = [ + obj for (cls, pk), obj in + query.session.identity_map.items() + if issubclass(cls, target_cls) and + eval_condition(obj)] + + +class BulkFetch(BulkUD): + """BulkUD which does the 'fetch' method of session state resolution.""" + + def _do_pre_synchronize(self): + query = self.query + session = query.session + context = query._compile_context() + select_stmt = context.statement.with_only_columns( + self.primary_table.primary_key) + self.matched_rows = session.execute( + select_stmt, + mapper=self.mapper, + params=query._params).fetchall() + + +class BulkUpdate(BulkUD): + """BulkUD which handles UPDATEs.""" + + def __init__(self, query, values, update_kwargs): + super(BulkUpdate, self).__init__(query) + self.values = values + self.update_kwargs = update_kwargs + + @classmethod + def factory(cls, query, synchronize_session, values, update_kwargs): + return BulkUD._factory({ + "evaluate": BulkUpdateEvaluate, + "fetch": BulkUpdateFetch, + False: BulkUpdate + }, synchronize_session, query, values, update_kwargs) + + def _resolve_string_to_expr(self, key): + if self.mapper and isinstance(key, util.string_types): + attr = _entity_descriptor(self.mapper, key) + return attr.__clause_element__() + else: + return key + + def _resolve_key_to_attrname(self, key): + if self.mapper and isinstance(key, util.string_types): + attr = _entity_descriptor(self.mapper, key) + return attr.property.key + elif isinstance(key, attributes.InstrumentedAttribute): + return key.key + elif hasattr(key, '__clause_element__'): + key = key.__clause_element__() + + if self.mapper and isinstance(key, expression.ColumnElement): + try: + attr = self.mapper._columntoproperty[key] + except orm_exc.UnmappedColumnError: + return None + else: + return attr.key + else: + raise sa_exc.InvalidRequestError( + "Invalid expression type: %r" % key) + + def _do_exec(self): + + values = [ + (self._resolve_string_to_expr(k), v) + for k, v in ( + self.values.items() if hasattr(self.values, 'items') + else self.values) + ] + if not self.update_kwargs.get('preserve_parameter_order', False): + values = dict(values) + + update_stmt = sql.update(self.primary_table, + self.context.whereclause, values, + **self.update_kwargs) + + self.result = self.query.session.execute( + update_stmt, params=self.query._params, + mapper=self.mapper) + self.rowcount = self.result.rowcount + + def _do_post(self): + session = self.query.session + session.dispatch.after_bulk_update(self) + + +class BulkDelete(BulkUD): + """BulkUD which handles DELETEs.""" + + def __init__(self, query): + super(BulkDelete, self).__init__(query) + + @classmethod + def factory(cls, query, synchronize_session): + return BulkUD._factory({ + "evaluate": BulkDeleteEvaluate, + "fetch": BulkDeleteFetch, + False: BulkDelete + }, synchronize_session, query) + + def _do_exec(self): + delete_stmt = sql.delete(self.primary_table, + self.context.whereclause) + + self.result = self.query.session.execute( + delete_stmt, + params=self.query._params, + mapper=self.mapper) + self.rowcount = self.result.rowcount + + def _do_post(self): + session = self.query.session + session.dispatch.after_bulk_delete(self) + + +class BulkUpdateEvaluate(BulkEvaluate, BulkUpdate): + """BulkUD which handles UPDATEs using the "evaluate" + method of session resolution.""" + + def _additional_evaluators(self, evaluator_compiler): + self.value_evaluators = {} + values = (self.values.items() if hasattr(self.values, 'items') + else self.values) + for key, value in values: + key = self._resolve_key_to_attrname(key) + if key is not None: + self.value_evaluators[key] = evaluator_compiler.process( + expression._literal_as_binds(value)) + + def _do_post_synchronize(self): + session = self.query.session + states = set() + evaluated_keys = list(self.value_evaluators.keys()) + for obj in self.matched_objects: + state, dict_ = attributes.instance_state(obj),\ + attributes.instance_dict(obj) + + # only evaluate unmodified attributes + to_evaluate = state.unmodified.intersection( + evaluated_keys) + for key in to_evaluate: + dict_[key] = self.value_evaluators[key](obj) + + state._commit(dict_, list(to_evaluate)) + + # expire attributes with pending changes + # (there was no autoflush, so they are overwritten) + state._expire_attributes(dict_, + set(evaluated_keys). + difference(to_evaluate)) + states.add(state) + session._register_altered(states) + + +class BulkDeleteEvaluate(BulkEvaluate, BulkDelete): + """BulkUD which handles DELETEs using the "evaluate" + method of session resolution.""" + + def _do_post_synchronize(self): + self.query.session._remove_newly_deleted( + [attributes.instance_state(obj) + for obj in self.matched_objects]) + + +class BulkUpdateFetch(BulkFetch, BulkUpdate): + """BulkUD which handles UPDATEs using the "fetch" + method of session resolution.""" + + def _do_post_synchronize(self): + session = self.query.session + target_mapper = self.query._mapper_zero() + + states = set([ + attributes.instance_state(session.identity_map[identity_key]) + for identity_key in [ + target_mapper.identity_key_from_primary_key( + list(primary_key)) + for primary_key in self.matched_rows + ] + if identity_key in session.identity_map + ]) + attrib = [_attr_as_key(k) for k in self.values] + for state in states: + session._expire_state(state, attrib) + session._register_altered(states) + + +class BulkDeleteFetch(BulkFetch, BulkDelete): + """BulkUD which handles DELETEs using the "fetch" + method of session resolution.""" + + def _do_post_synchronize(self): + session = self.query.session + target_mapper = self.query._mapper_zero() + for primary_key in self.matched_rows: + # TODO: inline this and call remove_newly_deleted + # once + identity_key = target_mapper.identity_key_from_primary_key( + list(primary_key)) + if identity_key in session.identity_map: + session._remove_newly_deleted( + [attributes.instance_state( + session.identity_map[identity_key] + )] + ) diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/properties.py b/lib/python3.4/site-packages/sqlalchemy/orm/properties.py new file mode 100644 index 0000000..f8a3532 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/orm/properties.py @@ -0,0 +1,276 @@ +# orm/properties.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""MapperProperty implementations. + +This is a private module which defines the behavior of invidual ORM- +mapped attributes. + +""" +from __future__ import absolute_import + +from .. import util, log +from ..sql import expression +from . import attributes +from .util import _orm_full_deannotate + +from .interfaces import PropComparator, StrategizedProperty + +__all__ = ['ColumnProperty', 'CompositeProperty', 'SynonymProperty', + 'ComparableProperty', 'RelationshipProperty'] + + +@log.class_logger +class ColumnProperty(StrategizedProperty): + """Describes an object attribute that corresponds to a table column. + + Public constructor is the :func:`.orm.column_property` function. + + """ + + strategy_wildcard_key = 'column' + + __slots__ = ( + '_orig_columns', 'columns', 'group', 'deferred', + 'instrument', 'comparator_factory', 'descriptor', 'extension', + 'active_history', 'expire_on_flush', 'info', 'doc', + 'strategy_class', '_creation_order', '_is_polymorphic_discriminator', + '_mapped_by_synonym', '_deferred_column_loader') + + def __init__(self, *columns, **kwargs): + """Provide a column-level property for use with a Mapper. + + Column-based properties can normally be applied to the mapper's + ``properties`` dictionary using the :class:`.Column` element directly. + Use this function when the given column is not directly present within + the mapper's selectable; examples include SQL expressions, functions, + and scalar SELECT queries. + + Columns that aren't present in the mapper's selectable won't be + persisted by the mapper and are effectively "read-only" attributes. + + :param \*cols: + list of Column objects to be mapped. + + :param active_history=False: + When ``True``, indicates that the "previous" value for a + scalar attribute should be loaded when replaced, if not + already loaded. Normally, history tracking logic for + simple non-primary-key scalar values only needs to be + aware of the "new" value in order to perform a flush. This + flag is available for applications that make use of + :func:`.attributes.get_history` or :meth:`.Session.is_modified` + which also need to know + the "previous" value of the attribute. + + .. versionadded:: 0.6.6 + + :param comparator_factory: a class which extends + :class:`.ColumnProperty.Comparator` which provides custom SQL + clause generation for comparison operations. + + :param group: + a group name for this property when marked as deferred. + + :param deferred: + when True, the column property is "deferred", meaning that + it does not load immediately, and is instead loaded when the + attribute is first accessed on an instance. See also + :func:`~sqlalchemy.orm.deferred`. + + :param doc: + optional string that will be applied as the doc on the + class-bound descriptor. + + :param expire_on_flush=True: + Disable expiry on flush. A column_property() which refers + to a SQL expression (and not a single table-bound column) + is considered to be a "read only" property; populating it + has no effect on the state of data, and it can only return + database state. For this reason a column_property()'s value + is expired whenever the parent object is involved in a + flush, that is, has any kind of "dirty" state within a flush. + Setting this parameter to ``False`` will have the effect of + leaving any existing value present after the flush proceeds. + Note however that the :class:`.Session` with default expiration + settings still expires + all attributes after a :meth:`.Session.commit` call, however. + + .. versionadded:: 0.7.3 + + :param info: Optional data dictionary which will be populated into the + :attr:`.MapperProperty.info` attribute of this object. + + .. versionadded:: 0.8 + + :param extension: + an + :class:`.AttributeExtension` + instance, or list of extensions, which will be prepended + to the list of attribute listeners for the resulting + descriptor placed on the class. + **Deprecated.** Please see :class:`.AttributeEvents`. + + """ + super(ColumnProperty, self).__init__() + self._orig_columns = [expression._labeled(c) for c in columns] + self.columns = [expression._labeled(_orm_full_deannotate(c)) + for c in columns] + self.group = kwargs.pop('group', None) + self.deferred = kwargs.pop('deferred', False) + self.instrument = kwargs.pop('_instrument', True) + self.comparator_factory = kwargs.pop('comparator_factory', + self.__class__.Comparator) + self.descriptor = kwargs.pop('descriptor', None) + self.extension = kwargs.pop('extension', None) + self.active_history = kwargs.pop('active_history', False) + self.expire_on_flush = kwargs.pop('expire_on_flush', True) + + if 'info' in kwargs: + self.info = kwargs.pop('info') + + if 'doc' in kwargs: + self.doc = kwargs.pop('doc') + else: + for col in reversed(self.columns): + doc = getattr(col, 'doc', None) + if doc is not None: + self.doc = doc + break + else: + self.doc = None + + if kwargs: + raise TypeError( + "%s received unexpected keyword argument(s): %s" % ( + self.__class__.__name__, + ', '.join(sorted(kwargs.keys())))) + + util.set_creation_order(self) + + self.strategy_class = self._strategy_lookup( + ("deferred", self.deferred), + ("instrument", self.instrument) + ) + + @util.dependencies("sqlalchemy.orm.state", "sqlalchemy.orm.strategies") + def _memoized_attr__deferred_column_loader(self, state, strategies): + return state.InstanceState._instance_level_callable_processor( + self.parent.class_manager, + strategies.LoadDeferredColumns(self.key), self.key) + + @property + def expression(self): + """Return the primary column or expression for this ColumnProperty. + + """ + return self.columns[0] + + def instrument_class(self, mapper): + if not self.instrument: + return + + attributes.register_descriptor( + mapper.class_, + self.key, + comparator=self.comparator_factory(self, mapper), + parententity=mapper, + doc=self.doc + ) + + def do_init(self): + super(ColumnProperty, self).do_init() + if len(self.columns) > 1 and \ + set(self.parent.primary_key).issuperset(self.columns): + util.warn( + ("On mapper %s, primary key column '%s' is being combined " + "with distinct primary key column '%s' in attribute '%s'. " + "Use explicit properties to give each column its own mapped " + "attribute name.") % (self.parent, self.columns[1], + self.columns[0], self.key)) + + def copy(self): + return ColumnProperty( + deferred=self.deferred, + group=self.group, + active_history=self.active_history, + *self.columns) + + def _getcommitted(self, state, dict_, column, + passive=attributes.PASSIVE_OFF): + return state.get_impl(self.key).\ + get_committed_value(state, dict_, passive=passive) + + def merge(self, session, source_state, source_dict, dest_state, + dest_dict, load, _recursive): + if not self.instrument: + return + elif self.key in source_dict: + value = source_dict[self.key] + + if not load: + dest_dict[self.key] = value + else: + impl = dest_state.get_impl(self.key) + impl.set(dest_state, dest_dict, value, None) + elif dest_state.has_identity and self.key not in dest_dict: + dest_state._expire_attributes(dest_dict, [self.key]) + + class Comparator(util.MemoizedSlots, PropComparator): + """Produce boolean, comparison, and other operators for + :class:`.ColumnProperty` attributes. + + See the documentation for :class:`.PropComparator` for a brief + overview. + + See also: + + :class:`.PropComparator` + + :class:`.ColumnOperators` + + :ref:`types_operators` + + :attr:`.TypeEngine.comparator_factory` + + """ + + __slots__ = '__clause_element__', 'info' + + def _memoized_method___clause_element__(self): + if self.adapter: + return self.adapter(self.prop.columns[0]) + else: + # no adapter, so we aren't aliased + # assert self._parententity is self._parentmapper + return self.prop.columns[0]._annotate({ + "parententity": self._parententity, + "parentmapper": self._parententity}) + + def _memoized_attr_info(self): + ce = self.__clause_element__() + try: + return ce.info + except AttributeError: + return self.prop.info + + def _fallback_getattr(self, key): + """proxy attribute access down to the mapped column. + + this allows user-defined comparison methods to be accessed. + """ + return getattr(self.__clause_element__(), key) + + def operate(self, op, *other, **kwargs): + return op(self.__clause_element__(), *other, **kwargs) + + def reverse_operate(self, op, other, **kwargs): + col = self.__clause_element__() + return op(col._bind_param(op, other), col, **kwargs) + + def __str__(self): + return str(self.parent.class_.__name__) + "." + self.key diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/query.py b/lib/python3.4/site-packages/sqlalchemy/orm/query.py new file mode 100644 index 0000000..335832d --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/orm/query.py @@ -0,0 +1,4019 @@ +# orm/query.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""The Query class and support. + +Defines the :class:`.Query` class, the central +construct used by the ORM to construct database queries. + +The :class:`.Query` class should not be confused with the +:class:`.Select` class, which defines database +SELECT operations at the SQL (non-ORM) level. ``Query`` differs from +``Select`` in that it returns ORM-mapped objects and interacts with an +ORM session, whereas the ``Select`` construct interacts directly with the +database to return iterable result sets. + +""" + +from itertools import chain + +from . import ( + attributes, interfaces, object_mapper, persistence, + exc as orm_exc, loading +) +from .base import _entity_descriptor, _is_aliased_class, \ + _is_mapped_class, _orm_columns, _generative, InspectionAttr +from .path_registry import PathRegistry +from .util import ( + AliasedClass, ORMAdapter, join as orm_join, with_parent, aliased +) +from .. import sql, util, log, exc as sa_exc, inspect, inspection +from ..sql.expression import _interpret_as_from +from ..sql import ( + util as sql_util, + expression, visitors +) +from ..sql.base import ColumnCollection +from . import properties + +__all__ = ['Query', 'QueryContext', 'aliased'] + + +_path_registry = PathRegistry.root + + +@inspection._self_inspects +@log.class_logger +class Query(object): + """ORM-level SQL construction object. + + :class:`.Query` is the source of all SELECT statements generated by the + ORM, both those formulated by end-user query operations as well as by + high level internal operations such as related collection loading. It + features a generative interface whereby successive calls return a new + :class:`.Query` object, a copy of the former with additional + criteria and options associated with it. + + :class:`.Query` objects are normally initially generated using the + :meth:`~.Session.query` method of :class:`.Session`. For a full + walkthrough of :class:`.Query` usage, see the + :ref:`ormtutorial_toplevel`. + + """ + + _enable_eagerloads = True + _enable_assertions = True + _with_labels = False + _criterion = None + _yield_per = None + _order_by = False + _group_by = False + _having = None + _distinct = False + _prefixes = None + _suffixes = None + _offset = None + _limit = None + _for_update_arg = None + _statement = None + _correlate = frozenset() + _populate_existing = False + _invoke_all_eagers = True + _version_check = False + _autoflush = True + _only_load_props = None + _refresh_state = None + _from_obj = () + _join_entities = () + _select_from_entity = None + _mapper_adapter_map = {} + _filter_aliases = None + _from_obj_alias = None + _joinpath = _joinpoint = util.immutabledict() + _execution_options = util.immutabledict() + _params = util.immutabledict() + _attributes = util.immutabledict() + _with_options = () + _with_hints = () + _enable_single_crit = True + _orm_only_adapt = True + _orm_only_from_obj_alias = True + _current_path = _path_registry + + def __init__(self, entities, session=None): + self.session = session + self._polymorphic_adapters = {} + self._set_entities(entities) + + def _set_entities(self, entities, entity_wrapper=None): + if entity_wrapper is None: + entity_wrapper = _QueryEntity + self._entities = [] + self._primary_entity = None + for ent in util.to_list(entities): + entity_wrapper(self, ent) + + self._set_entity_selectables(self._entities) + + def _set_entity_selectables(self, entities): + self._mapper_adapter_map = d = self._mapper_adapter_map.copy() + + for ent in entities: + for entity in ent.entities: + if entity not in d: + ext_info = inspect(entity) + if not ext_info.is_aliased_class and \ + ext_info.mapper.with_polymorphic: + if ext_info.mapper.mapped_table not in \ + self._polymorphic_adapters: + self._mapper_loads_polymorphically_with( + ext_info.mapper, + sql_util.ColumnAdapter( + ext_info.selectable, + ext_info.mapper._equivalent_columns + ) + ) + aliased_adapter = None + elif ext_info.is_aliased_class: + aliased_adapter = ext_info._adapter + else: + aliased_adapter = None + + d[entity] = ( + ext_info, + aliased_adapter + ) + ent.setup_entity(*d[entity]) + + def _mapper_loads_polymorphically_with(self, mapper, adapter): + for m2 in mapper._with_polymorphic_mappers or [mapper]: + self._polymorphic_adapters[m2] = adapter + for m in m2.iterate_to_root(): + self._polymorphic_adapters[m.local_table] = adapter + + def _set_select_from(self, obj, set_base_alias): + fa = [] + select_from_alias = None + + for from_obj in obj: + info = inspect(from_obj) + if hasattr(info, 'mapper') and \ + (info.is_mapper or info.is_aliased_class): + self._select_from_entity = from_obj + if set_base_alias: + raise sa_exc.ArgumentError( + "A selectable (FromClause) instance is " + "expected when the base alias is being set.") + fa.append(info.selectable) + elif not info.is_selectable: + raise sa_exc.ArgumentError( + "argument is not a mapped class, mapper, " + "aliased(), or FromClause instance.") + else: + if isinstance(from_obj, expression.SelectBase): + from_obj = from_obj.alias() + if set_base_alias: + select_from_alias = from_obj + fa.append(from_obj) + + self._from_obj = tuple(fa) + + if set_base_alias and \ + len(self._from_obj) == 1 and \ + isinstance(select_from_alias, expression.Alias): + equivs = self.__all_equivs() + self._from_obj_alias = sql_util.ColumnAdapter( + self._from_obj[0], equivs) + + def _reset_polymorphic_adapter(self, mapper): + for m2 in mapper._with_polymorphic_mappers: + self._polymorphic_adapters.pop(m2, None) + for m in m2.iterate_to_root(): + self._polymorphic_adapters.pop(m.local_table, None) + + def _adapt_polymorphic_element(self, element): + if "parententity" in element._annotations: + search = element._annotations['parententity'] + alias = self._polymorphic_adapters.get(search, None) + if alias: + return alias.adapt_clause(element) + + if isinstance(element, expression.FromClause): + search = element + elif hasattr(element, 'table'): + search = element.table + else: + return None + + alias = self._polymorphic_adapters.get(search, None) + if alias: + return alias.adapt_clause(element) + + def _adapt_col_list(self, cols): + return [ + self._adapt_clause( + expression._literal_as_label_reference(o), + True, True) + for o in cols + ] + + @_generative() + def _adapt_all_clauses(self): + self._orm_only_adapt = False + + def _adapt_clause(self, clause, as_filter, orm_only): + """Adapt incoming clauses to transformations which + have been applied within this query.""" + + adapters = [] + # do we adapt all expression elements or only those + # tagged as 'ORM' constructs ? + if not self._orm_only_adapt: + orm_only = False + + if as_filter and self._filter_aliases: + for fa in self._filter_aliases._visitor_iterator: + adapters.append( + ( + orm_only, fa.replace + ) + ) + + if self._from_obj_alias: + # for the "from obj" alias, apply extra rule to the + # 'ORM only' check, if this query were generated from a + # subquery of itself, i.e. _from_selectable(), apply adaption + # to all SQL constructs. + adapters.append( + ( + orm_only if self._orm_only_from_obj_alias else False, + self._from_obj_alias.replace + ) + ) + + if self._polymorphic_adapters: + adapters.append( + ( + orm_only, self._adapt_polymorphic_element + ) + ) + + if not adapters: + return clause + + def replace(elem): + for _orm_only, adapter in adapters: + # if 'orm only', look for ORM annotations + # in the element before adapting. + if not _orm_only or \ + '_orm_adapt' in elem._annotations or \ + "parententity" in elem._annotations: + + e = adapter(elem) + if e is not None: + return e + + return visitors.replacement_traverse( + clause, + {}, + replace + ) + + def _entity_zero(self): + return self._entities[0] + + def _mapper_zero(self): + # TODO: self._select_from_entity is not a mapper + # so this method is misnamed + return self._select_from_entity \ + if self._select_from_entity is not None \ + else self._entity_zero().entity_zero + + @property + def _mapper_entities(self): + for ent in self._entities: + if isinstance(ent, _MapperEntity): + yield ent + + def _joinpoint_zero(self): + return self._joinpoint.get( + '_joinpoint_entity', + self._mapper_zero() + ) + + def _bind_mapper(self): + ezero = self._mapper_zero() + if ezero is not None: + insp = inspect(ezero) + if not insp.is_clause_element: + return insp.mapper + + return None + + def _only_mapper_zero(self, rationale=None): + if len(self._entities) > 1: + raise sa_exc.InvalidRequestError( + rationale or + "This operation requires a Query " + "against a single mapper." + ) + return self._mapper_zero() + + def _only_full_mapper_zero(self, methname): + if self._entities != [self._primary_entity]: + raise sa_exc.InvalidRequestError( + "%s() can only be used against " + "a single mapped class." % methname) + return self._primary_entity.entity_zero + + def _only_entity_zero(self, rationale=None): + if len(self._entities) > 1: + raise sa_exc.InvalidRequestError( + rationale or + "This operation requires a Query " + "against a single mapper." + ) + return self._entity_zero() + + def __all_equivs(self): + equivs = {} + for ent in self._mapper_entities: + equivs.update(ent.mapper._equivalent_columns) + return equivs + + def _get_condition(self): + return self._no_criterion_condition( + "get", order_by=False, distinct=False) + + def _get_existing_condition(self): + self._no_criterion_assertion("get", order_by=False, distinct=False) + + def _no_criterion_assertion(self, meth, order_by=True, distinct=True): + if not self._enable_assertions: + return + if self._criterion is not None or \ + self._statement is not None or self._from_obj or \ + self._limit is not None or self._offset is not None or \ + self._group_by or (order_by and self._order_by) or \ + (distinct and self._distinct): + raise sa_exc.InvalidRequestError( + "Query.%s() being called on a " + "Query with existing criterion. " % meth) + + def _no_criterion_condition(self, meth, order_by=True, distinct=True): + self._no_criterion_assertion(meth, order_by, distinct) + + self._from_obj = () + self._statement = self._criterion = None + self._order_by = self._group_by = self._distinct = False + + def _no_clauseelement_condition(self, meth): + if not self._enable_assertions: + return + if self._order_by: + raise sa_exc.InvalidRequestError( + "Query.%s() being called on a " + "Query with existing criterion. " % meth) + self._no_criterion_condition(meth) + + def _no_statement_condition(self, meth): + if not self._enable_assertions: + return + if self._statement is not None: + raise sa_exc.InvalidRequestError( + ("Query.%s() being called on a Query with an existing full " + "statement - can't apply criterion.") % meth) + + def _no_limit_offset(self, meth): + if not self._enable_assertions: + return + if self._limit is not None or self._offset is not None: + raise sa_exc.InvalidRequestError( + "Query.%s() being called on a Query which already has LIMIT " + "or OFFSET applied. To modify the row-limited results of a " + " Query, call from_self() first. " + "Otherwise, call %s() before limit() or offset() " + "are applied." + % (meth, meth) + ) + + def _get_options(self, populate_existing=None, + version_check=None, + only_load_props=None, + refresh_state=None): + if populate_existing: + self._populate_existing = populate_existing + if version_check: + self._version_check = version_check + if refresh_state: + self._refresh_state = refresh_state + if only_load_props: + self._only_load_props = set(only_load_props) + return self + + def _clone(self): + cls = self.__class__ + q = cls.__new__(cls) + q.__dict__ = self.__dict__.copy() + return q + + @property + def statement(self): + """The full SELECT statement represented by this Query. + + The statement by default will not have disambiguating labels + applied to the construct unless with_labels(True) is called + first. + + """ + + stmt = self._compile_context(labels=self._with_labels).\ + statement + if self._params: + stmt = stmt.params(self._params) + + # TODO: there's no tests covering effects of + # the annotation not being there + return stmt._annotate({'no_replacement_traverse': True}) + + def subquery(self, name=None, with_labels=False, reduce_columns=False): + """return the full SELECT statement represented by + this :class:`.Query`, embedded within an :class:`.Alias`. + + Eager JOIN generation within the query is disabled. + + :param name: string name to be assigned as the alias; + this is passed through to :meth:`.FromClause.alias`. + If ``None``, a name will be deterministically generated + at compile time. + + :param with_labels: if True, :meth:`.with_labels` will be called + on the :class:`.Query` first to apply table-qualified labels + to all columns. + + :param reduce_columns: if True, :meth:`.Select.reduce_columns` will + be called on the resulting :func:`.select` construct, + to remove same-named columns where one also refers to the other + via foreign key or WHERE clause equivalence. + + .. versionchanged:: 0.8 the ``with_labels`` and ``reduce_columns`` + keyword arguments were added. + + """ + q = self.enable_eagerloads(False) + if with_labels: + q = q.with_labels() + q = q.statement + if reduce_columns: + q = q.reduce_columns() + return q.alias(name=name) + + def cte(self, name=None, recursive=False): + """Return the full SELECT statement represented by this + :class:`.Query` represented as a common table expression (CTE). + + .. versionadded:: 0.7.6 + + Parameters and usage are the same as those of the + :meth:`.SelectBase.cte` method; see that method for + further details. + + Here is the `Postgresql WITH + RECURSIVE example + `_. + Note that, in this example, the ``included_parts`` cte and the + ``incl_alias`` alias of it are Core selectables, which + means the columns are accessed via the ``.c.`` attribute. The + ``parts_alias`` object is an :func:`.orm.aliased` instance of the + ``Part`` entity, so column-mapped attributes are available + directly:: + + from sqlalchemy.orm import aliased + + class Part(Base): + __tablename__ = 'part' + part = Column(String, primary_key=True) + sub_part = Column(String, primary_key=True) + quantity = Column(Integer) + + included_parts = session.query( + Part.sub_part, + Part.part, + Part.quantity).\\ + filter(Part.part=="our part").\\ + cte(name="included_parts", recursive=True) + + incl_alias = aliased(included_parts, name="pr") + parts_alias = aliased(Part, name="p") + included_parts = included_parts.union_all( + session.query( + parts_alias.sub_part, + parts_alias.part, + parts_alias.quantity).\\ + filter(parts_alias.part==incl_alias.c.sub_part) + ) + + q = session.query( + included_parts.c.sub_part, + func.sum(included_parts.c.quantity). + label('total_quantity') + ).\\ + group_by(included_parts.c.sub_part) + + .. seealso:: + + :meth:`.SelectBase.cte` + + """ + return self.enable_eagerloads(False).\ + statement.cte(name=name, recursive=recursive) + + def label(self, name): + """Return the full SELECT statement represented by this + :class:`.Query`, converted + to a scalar subquery with a label of the given name. + + Analogous to :meth:`sqlalchemy.sql.expression.SelectBase.label`. + + .. versionadded:: 0.6.5 + + """ + + return self.enable_eagerloads(False).statement.label(name) + + def as_scalar(self): + """Return the full SELECT statement represented by this + :class:`.Query`, converted to a scalar subquery. + + Analogous to :meth:`sqlalchemy.sql.expression.SelectBase.as_scalar`. + + .. versionadded:: 0.6.5 + + """ + + return self.enable_eagerloads(False).statement.as_scalar() + + @property + def selectable(self): + """Return the :class:`.Select` object emitted by this :class:`.Query`. + + Used for :func:`.inspect` compatibility, this is equivalent to:: + + query.enable_eagerloads(False).with_labels().statement + + """ + return self.__clause_element__() + + def __clause_element__(self): + return self.enable_eagerloads(False).with_labels().statement + + @_generative() + def enable_eagerloads(self, value): + """Control whether or not eager joins and subqueries are + rendered. + + When set to False, the returned Query will not render + eager joins regardless of :func:`~sqlalchemy.orm.joinedload`, + :func:`~sqlalchemy.orm.subqueryload` options + or mapper-level ``lazy='joined'``/``lazy='subquery'`` + configurations. + + This is used primarily when nesting the Query's + statement into a subquery or other + selectable, or when using :meth:`.Query.yield_per`. + + """ + self._enable_eagerloads = value + + def _no_yield_per(self, message): + raise sa_exc.InvalidRequestError( + "The yield_per Query option is currently not " + "compatible with %s eager loading. Please " + "specify lazyload('*') or query.enable_eagerloads(False) in " + "order to " + "proceed with query.yield_per()." % message) + + @_generative() + def with_labels(self): + """Apply column labels to the return value of Query.statement. + + Indicates that this Query's `statement` accessor should return + a SELECT statement that applies labels to all columns in the + form _; this is commonly used to + disambiguate columns from multiple tables which have the same + name. + + When the `Query` actually issues SQL to load rows, it always + uses column labeling. + + .. note:: The :meth:`.Query.with_labels` method *only* applies + the output of :attr:`.Query.statement`, and *not* to any of + the result-row invoking systems of :class:`.Query` itself, e.g. + :meth:`.Query.first`, :meth:`.Query.all`, etc. To execute + a query using :meth:`.Query.with_labels`, invoke the + :attr:`.Query.statement` using :meth:`.Session.execute`:: + + result = session.execute(query.with_labels().statement) + + + """ + self._with_labels = True + + @_generative() + def enable_assertions(self, value): + """Control whether assertions are generated. + + When set to False, the returned Query will + not assert its state before certain operations, + including that LIMIT/OFFSET has not been applied + when filter() is called, no criterion exists + when get() is called, and no "from_statement()" + exists when filter()/order_by()/group_by() etc. + is called. This more permissive mode is used by + custom Query subclasses to specify criterion or + other modifiers outside of the usual usage patterns. + + Care should be taken to ensure that the usage + pattern is even possible. A statement applied + by from_statement() will override any criterion + set by filter() or order_by(), for example. + + """ + self._enable_assertions = value + + @property + def whereclause(self): + """A readonly attribute which returns the current WHERE criterion for + this Query. + + This returned value is a SQL expression construct, or ``None`` if no + criterion has been established. + + """ + return self._criterion + + @_generative() + def _with_current_path(self, path): + """indicate that this query applies to objects loaded + within a certain path. + + Used by deferred loaders (see strategies.py) which transfer + query options from an originating query to a newly generated + query intended for the deferred load. + + """ + self._current_path = path + + @_generative(_no_clauseelement_condition) + def with_polymorphic(self, + cls_or_mappers, + selectable=None, + polymorphic_on=None): + """Load columns for inheriting classes. + + :meth:`.Query.with_polymorphic` applies transformations + to the "main" mapped class represented by this :class:`.Query`. + The "main" mapped class here means the :class:`.Query` + object's first argument is a full class, i.e. + ``session.query(SomeClass)``. These transformations allow additional + tables to be present in the FROM clause so that columns for a + joined-inheritance subclass are available in the query, both for the + purposes of load-time efficiency as well as the ability to use + these columns at query time. + + See the documentation section :ref:`with_polymorphic` for + details on how this method is used. + + .. versionchanged:: 0.8 + A new and more flexible function + :func:`.orm.with_polymorphic` supersedes + :meth:`.Query.with_polymorphic`, as it can apply the equivalent + functionality to any set of columns or classes in the + :class:`.Query`, not just the "zero mapper". See that + function for a description of arguments. + + """ + + if not self._primary_entity: + raise sa_exc.InvalidRequestError( + "No primary mapper set up for this Query.") + entity = self._entities[0]._clone() + self._entities = [entity] + self._entities[1:] + entity.set_with_polymorphic(self, + cls_or_mappers, + selectable=selectable, + polymorphic_on=polymorphic_on) + + @_generative() + def yield_per(self, count): + """Yield only ``count`` rows at a time. + + The purpose of this method is when fetching very large result sets + (> 10K rows), to batch results in sub-collections and yield them + out partially, so that the Python interpreter doesn't need to declare + very large areas of memory which is both time consuming and leads + to excessive memory use. The performance from fetching hundreds of + thousands of rows can often double when a suitable yield-per setting + (e.g. approximately 1000) is used, even with DBAPIs that buffer + rows (which are most). + + The :meth:`.Query.yield_per` method **is not compatible with most + eager loading schemes, including subqueryload and joinedload with + collections**. For this reason, it may be helpful to disable + eager loads, either unconditionally with + :meth:`.Query.enable_eagerloads`:: + + q = sess.query(Object).yield_per(100).enable_eagerloads(False) + + Or more selectively using :func:`.lazyload`; such as with + an asterisk to specify the default loader scheme:: + + q = sess.query(Object).yield_per(100).\\ + options(lazyload('*'), joinedload(Object.some_related)) + + .. warning:: + + Use this method with caution; if the same instance is + present in more than one batch of rows, end-user changes + to attributes will be overwritten. + + In particular, it's usually impossible to use this setting + with eagerly loaded collections (i.e. any lazy='joined' or + 'subquery') since those collections will be cleared for a + new load when encountered in a subsequent result batch. + In the case of 'subquery' loading, the full result for all + rows is fetched which generally defeats the purpose of + :meth:`~sqlalchemy.orm.query.Query.yield_per`. + + Also note that while + :meth:`~sqlalchemy.orm.query.Query.yield_per` will set the + ``stream_results`` execution option to True, currently + this is only understood by + :mod:`~sqlalchemy.dialects.postgresql.psycopg2` dialect + which will stream results using server side cursors + instead of pre-buffer all rows for this query. Other + DBAPIs **pre-buffer all rows** before making them + available. The memory use of raw database rows is much less + than that of an ORM-mapped object, but should still be taken into + consideration when benchmarking. + + .. seealso:: + + :meth:`.Query.enable_eagerloads` + + """ + self._yield_per = count + self._execution_options = self._execution_options.union( + {"stream_results": True, + "max_row_buffer": count}) + + def get(self, ident): + """Return an instance based on the given primary key identifier, + or ``None`` if not found. + + E.g.:: + + my_user = session.query(User).get(5) + + some_object = session.query(VersionedFoo).get((5, 10)) + + :meth:`~.Query.get` is special in that it provides direct + access to the identity map of the owning :class:`.Session`. + If the given primary key identifier is present + in the local identity map, the object is returned + directly from this collection and no SQL is emitted, + unless the object has been marked fully expired. + If not present, + a SELECT is performed in order to locate the object. + + :meth:`~.Query.get` also will perform a check if + the object is present in the identity map and + marked as expired - a SELECT + is emitted to refresh the object as well as to + ensure that the row is still present. + If not, :class:`~sqlalchemy.orm.exc.ObjectDeletedError` is raised. + + :meth:`~.Query.get` is only used to return a single + mapped instance, not multiple instances or + individual column constructs, and strictly + on a single primary key value. The originating + :class:`.Query` must be constructed in this way, + i.e. against a single mapped entity, + with no additional filtering criterion. Loading + options via :meth:`~.Query.options` may be applied + however, and will be used if the object is not + yet locally present. + + A lazy-loading, many-to-one attribute configured + by :func:`.relationship`, using a simple + foreign-key-to-primary-key criterion, will also use an + operation equivalent to :meth:`~.Query.get` in order to retrieve + the target value from the local identity map + before querying the database. See :doc:`/orm/loading_relationships` + for further details on relationship loading. + + :param ident: A scalar or tuple value representing + the primary key. For a composite primary key, + the order of identifiers corresponds in most cases + to that of the mapped :class:`.Table` object's + primary key columns. For a :func:`.mapper` that + was given the ``primary key`` argument during + construction, the order of identifiers corresponds + to the elements present in this collection. + + :return: The object instance, or ``None``. + + """ + return self._get_impl(ident, loading.load_on_ident) + + def _get_impl(self, ident, fallback_fn): + # convert composite types to individual args + if hasattr(ident, '__composite_values__'): + ident = ident.__composite_values__() + + ident = util.to_list(ident) + + mapper = self._only_full_mapper_zero("get") + + if len(ident) != len(mapper.primary_key): + raise sa_exc.InvalidRequestError( + "Incorrect number of values in identifier to formulate " + "primary key for query.get(); primary key columns are %s" % + ','.join("'%s'" % c for c in mapper.primary_key)) + + key = mapper.identity_key_from_primary_key(ident) + + if not self._populate_existing and \ + not mapper.always_refresh and \ + self._for_update_arg is None: + + instance = loading.get_from_identity( + self.session, key, attributes.PASSIVE_OFF) + if instance is not None: + self._get_existing_condition() + # reject calls for id in identity map but class + # mismatch. + if not issubclass(instance.__class__, mapper.class_): + return None + return instance + + return fallback_fn(self, key) + + @_generative() + def correlate(self, *args): + """Return a :class:`.Query` construct which will correlate the given + FROM clauses to that of an enclosing :class:`.Query` or + :func:`~.expression.select`. + + The method here accepts mapped classes, :func:`.aliased` constructs, + and :func:`.mapper` constructs as arguments, which are resolved into + expression constructs, in addition to appropriate expression + constructs. + + The correlation arguments are ultimately passed to + :meth:`.Select.correlate` after coercion to expression constructs. + + The correlation arguments take effect in such cases + as when :meth:`.Query.from_self` is used, or when + a subquery as returned by :meth:`.Query.subquery` is + embedded in another :func:`~.expression.select` construct. + + """ + + self._correlate = self._correlate.union( + _interpret_as_from(s) + if s is not None else None + for s in args) + + @_generative() + def autoflush(self, setting): + """Return a Query with a specific 'autoflush' setting. + + Note that a Session with autoflush=False will + not autoflush, even if this flag is set to True at the + Query level. Therefore this flag is usually used only + to disable autoflush for a specific Query. + + """ + self._autoflush = setting + + @_generative() + def populate_existing(self): + """Return a :class:`.Query` that will expire and refresh all instances + as they are loaded, or reused from the current :class:`.Session`. + + :meth:`.populate_existing` does not improve behavior when + the ORM is used normally - the :class:`.Session` object's usual + behavior of maintaining a transaction and expiring all attributes + after rollback or commit handles object state automatically. + This method is not intended for general use. + + """ + self._populate_existing = True + + @_generative() + def _with_invoke_all_eagers(self, value): + """Set the 'invoke all eagers' flag which causes joined- and + subquery loaders to traverse into already-loaded related objects + and collections. + + Default is that of :attr:`.Query._invoke_all_eagers`. + + """ + self._invoke_all_eagers = value + + def with_parent(self, instance, property=None): + """Add filtering criterion that relates the given instance + to a child object or collection, using its attribute state + as well as an established :func:`.relationship()` + configuration. + + The method uses the :func:`.with_parent` function to generate + the clause, the result of which is passed to :meth:`.Query.filter`. + + Parameters are the same as :func:`.with_parent`, with the exception + that the given property can be None, in which case a search is + performed against this :class:`.Query` object's target mapper. + + """ + + if property is None: + mapper_zero = inspect(self._mapper_zero()).mapper + + mapper = object_mapper(instance) + + for prop in mapper.iterate_properties: + if isinstance(prop, properties.RelationshipProperty) and \ + prop.mapper is mapper_zero: + property = prop + break + else: + raise sa_exc.InvalidRequestError( + "Could not locate a property which relates instances " + "of class '%s' to instances of class '%s'" % + ( + self._mapper_zero().class_.__name__, + instance.__class__.__name__) + ) + + return self.filter(with_parent(instance, property)) + + @_generative() + def add_entity(self, entity, alias=None): + """add a mapped entity to the list of result columns + to be returned.""" + + if alias is not None: + entity = aliased(entity, alias) + + self._entities = list(self._entities) + m = _MapperEntity(self, entity) + self._set_entity_selectables([m]) + + @_generative() + def with_session(self, session): + """Return a :class:`.Query` that will use the given :class:`.Session`. + + """ + + self.session = session + + def from_self(self, *entities): + """return a Query that selects from this Query's + SELECT statement. + + :meth:`.Query.from_self` essentially turns the SELECT statement + into a SELECT of itself. Given a query such as:: + + q = session.query(User).filter(User.name.like('e%')) + + Given the :meth:`.Query.from_self` version:: + + q = session.query(User).filter(User.name.like('e%')).from_self() + + This query renders as: + + .. sourcecode:: sql + + SELECT anon_1.user_id AS anon_1_user_id, + anon_1.user_name AS anon_1_user_name + FROM (SELECT "user".id AS user_id, "user".name AS user_name + FROM "user" + WHERE "user".name LIKE :name_1) AS anon_1 + + There are lots of cases where :meth:`.Query.from_self` may be useful. + A simple one is where above, we may want to apply a row LIMIT to + the set of user objects we query against, and then apply additional + joins against that row-limited set:: + + q = session.query(User).filter(User.name.like('e%')).\\ + limit(5).from_self().\\ + join(User.addresses).filter(Address.email.like('q%')) + + The above query joins to the ``Address`` entity but only against the + first five results of the ``User`` query: + + .. sourcecode:: sql + + SELECT anon_1.user_id AS anon_1_user_id, + anon_1.user_name AS anon_1_user_name + FROM (SELECT "user".id AS user_id, "user".name AS user_name + FROM "user" + WHERE "user".name LIKE :name_1 + LIMIT :param_1) AS anon_1 + JOIN address ON anon_1.user_id = address.user_id + WHERE address.email LIKE :email_1 + + **Automatic Aliasing** + + Another key behavior of :meth:`.Query.from_self` is that it applies + **automatic aliasing** to the entities inside the subquery, when + they are referenced on the outside. Above, if we continue to + refer to the ``User`` entity without any additional aliasing applied + to it, those references wil be in terms of the subquery:: + + q = session.query(User).filter(User.name.like('e%')).\\ + limit(5).from_self().\\ + join(User.addresses).filter(Address.email.like('q%')).\\ + order_by(User.name) + + The ORDER BY against ``User.name`` is aliased to be in terms of the + inner subquery: + + .. sourcecode:: sql + + SELECT anon_1.user_id AS anon_1_user_id, + anon_1.user_name AS anon_1_user_name + FROM (SELECT "user".id AS user_id, "user".name AS user_name + FROM "user" + WHERE "user".name LIKE :name_1 + LIMIT :param_1) AS anon_1 + JOIN address ON anon_1.user_id = address.user_id + WHERE address.email LIKE :email_1 ORDER BY anon_1.user_name + + The automatic aliasing feature only works in a **limited** way, + for simple filters and orderings. More ambitious constructions + such as referring to the entity in joins should prefer to use + explicit subquery objects, typically making use of the + :meth:`.Query.subquery` method to produce an explicit subquery object. + Always test the structure of queries by viewing the SQL to ensure + a particular structure does what's expected! + + **Changing the Entities** + + :meth:`.Query.from_self` also includes the ability to modify what + columns are being queried. In our example, we want ``User.id`` + to be queried by the inner query, so that we can join to the + ``Address`` entity on the outside, but we only wanted the outer + query to return the ``Address.email`` column:: + + q = session.query(User).filter(User.name.like('e%')).\\ + limit(5).from_self(Address.email).\\ + join(User.addresses).filter(Address.email.like('q%')) + + yielding: + + .. sourcecode:: sql + + SELECT address.email AS address_email + FROM (SELECT "user".id AS user_id, "user".name AS user_name + FROM "user" + WHERE "user".name LIKE :name_1 + LIMIT :param_1) AS anon_1 + JOIN address ON anon_1.user_id = address.user_id + WHERE address.email LIKE :email_1 + + **Looking out for Inner / Outer Columns** + + Keep in mind that when referring to columns that originate from + inside the subquery, we need to ensure they are present in the + columns clause of the subquery itself; this is an ordinary aspect of + SQL. For example, if we wanted to load from a joined entity inside + the subquery using :func:`.contains_eager`, we need to add those + columns. Below illustrates a join of ``Address`` to ``User``, + then a subquery, and then we'd like :func:`.contains_eager` to access + the ``User`` columns:: + + q = session.query(Address).join(Address.user).\\ + filter(User.name.like('e%')) + + q = q.add_entity(User).from_self().\\ + options(contains_eager(Address.user)) + + We use :meth:`.Query.add_entity` above **before** we call + :meth:`.Query.from_self` so that the ``User`` columns are present + in the inner subquery, so that they are available to the + :func:`.contains_eager` modifier we are using on the outside, + producing: + + .. sourcecode:: sql + + SELECT anon_1.address_id AS anon_1_address_id, + anon_1.address_email AS anon_1_address_email, + anon_1.address_user_id AS anon_1_address_user_id, + anon_1.user_id AS anon_1_user_id, + anon_1.user_name AS anon_1_user_name + FROM ( + SELECT address.id AS address_id, + address.email AS address_email, + address.user_id AS address_user_id, + "user".id AS user_id, + "user".name AS user_name + FROM address JOIN "user" ON "user".id = address.user_id + WHERE "user".name LIKE :name_1) AS anon_1 + + If we didn't call ``add_entity(User)``, but still asked + :func:`.contains_eager` to load the ``User`` entity, it would be + forced to add the table on the outside without the correct + join criteria - note the ``anon1, "user"`` phrase at + the end: + + .. sourcecode:: sql + + -- incorrect query + SELECT anon_1.address_id AS anon_1_address_id, + anon_1.address_email AS anon_1_address_email, + anon_1.address_user_id AS anon_1_address_user_id, + "user".id AS user_id, + "user".name AS user_name + FROM ( + SELECT address.id AS address_id, + address.email AS address_email, + address.user_id AS address_user_id + FROM address JOIN "user" ON "user".id = address.user_id + WHERE "user".name LIKE :name_1) AS anon_1, "user" + + :param \*entities: optional list of entities which will replace + those being selected. + + """ + fromclause = self.with_labels().enable_eagerloads(False).\ + statement.correlate(None) + q = self._from_selectable(fromclause) + q._enable_single_crit = False + q._select_from_entity = self._mapper_zero() + if entities: + q._set_entities(entities) + return q + + @_generative() + def _set_enable_single_crit(self, val): + self._enable_single_crit = val + + @_generative() + def _from_selectable(self, fromclause): + for attr in ( + '_statement', '_criterion', + '_order_by', '_group_by', + '_limit', '_offset', + '_joinpath', '_joinpoint', + '_distinct', '_having', + '_prefixes', '_suffixes' + ): + self.__dict__.pop(attr, None) + self._set_select_from([fromclause], True) + + # this enables clause adaptation for non-ORM + # expressions. + self._orm_only_from_obj_alias = False + + old_entities = self._entities + self._entities = [] + for e in old_entities: + e.adapt_to_selectable(self, self._from_obj[0]) + + def values(self, *columns): + """Return an iterator yielding result tuples corresponding + to the given list of columns""" + + if not columns: + return iter(()) + q = self._clone() + q._set_entities(columns, entity_wrapper=_ColumnEntity) + if not q._yield_per: + q._yield_per = 10 + return iter(q) + _values = values + + def value(self, column): + """Return a scalar result corresponding to the given + column expression.""" + try: + return next(self.values(column))[0] + except StopIteration: + return None + + @_generative() + def with_entities(self, *entities): + """Return a new :class:`.Query` replacing the SELECT list with the + given entities. + + e.g.:: + + # Users, filtered on some arbitrary criterion + # and then ordered by related email address + q = session.query(User).\\ + join(User.address).\\ + filter(User.name.like('%ed%')).\\ + order_by(Address.email) + + # given *only* User.id==5, Address.email, and 'q', what + # would the *next* User in the result be ? + subq = q.with_entities(Address.email).\\ + order_by(None).\\ + filter(User.id==5).\\ + subquery() + q = q.join((subq, subq.c.email < Address.email)).\\ + limit(1) + + .. versionadded:: 0.6.5 + + """ + self._set_entities(entities) + + @_generative() + def add_columns(self, *column): + """Add one or more column expressions to the list + of result columns to be returned.""" + + self._entities = list(self._entities) + l = len(self._entities) + for c in column: + _ColumnEntity(self, c) + # _ColumnEntity may add many entities if the + # given arg is a FROM clause + self._set_entity_selectables(self._entities[l:]) + + @util.pending_deprecation("0.7", + ":meth:`.add_column` is superseded " + "by :meth:`.add_columns`", + False) + def add_column(self, column): + """Add a column expression to the list of result columns to be + returned. + + Pending deprecation: :meth:`.add_column` will be superseded by + :meth:`.add_columns`. + + """ + return self.add_columns(column) + + def options(self, *args): + """Return a new Query object, applying the given list of + mapper options. + + Most supplied options regard changing how column- and + relationship-mapped attributes are loaded. See the sections + :ref:`deferred` and :doc:`/orm/loading_relationships` for reference + documentation. + + """ + return self._options(False, *args) + + def _conditional_options(self, *args): + return self._options(True, *args) + + @_generative() + def _options(self, conditional, *args): + # most MapperOptions write to the '_attributes' dictionary, + # so copy that as well + self._attributes = self._attributes.copy() + opts = tuple(util.flatten_iterator(args)) + self._with_options = self._with_options + opts + if conditional: + for opt in opts: + opt.process_query_conditionally(self) + else: + for opt in opts: + opt.process_query(self) + + def with_transformation(self, fn): + """Return a new :class:`.Query` object transformed by + the given function. + + E.g.:: + + def filter_something(criterion): + def transform(q): + return q.filter(criterion) + return transform + + q = q.with_transformation(filter_something(x==5)) + + This allows ad-hoc recipes to be created for :class:`.Query` + objects. See the example at :ref:`hybrid_transformers`. + + .. versionadded:: 0.7.4 + + """ + return fn(self) + + @_generative() + def with_hint(self, selectable, text, dialect_name='*'): + """Add an indexing or other executional context + hint for the given entity or selectable to + this :class:`.Query`. + + Functionality is passed straight through to + :meth:`~sqlalchemy.sql.expression.Select.with_hint`, + with the addition that ``selectable`` can be a + :class:`.Table`, :class:`.Alias`, or ORM entity / mapped class + /etc. + + .. seealso:: + + :meth:`.Query.with_statement_hint` + + """ + if selectable is not None: + selectable = inspect(selectable).selectable + + self._with_hints += ((selectable, text, dialect_name),) + + def with_statement_hint(self, text, dialect_name='*'): + """add a statement hint to this :class:`.Select`. + + This method is similar to :meth:`.Select.with_hint` except that + it does not require an individual table, and instead applies to the + statement as a whole. + + This feature calls down into :meth:`.Select.with_statement_hint`. + + .. versionadded:: 1.0.0 + + .. seealso:: + + :meth:`.Query.with_hint` + + """ + return self.with_hint(None, text, dialect_name) + + @_generative() + def execution_options(self, **kwargs): + """ Set non-SQL options which take effect during execution. + + The options are the same as those accepted by + :meth:`.Connection.execution_options`. + + Note that the ``stream_results`` execution option is enabled + automatically if the :meth:`~sqlalchemy.orm.query.Query.yield_per()` + method is used. + + """ + self._execution_options = self._execution_options.union(kwargs) + + @_generative() + def with_lockmode(self, mode): + """Return a new :class:`.Query` object with the specified "locking mode", + which essentially refers to the ``FOR UPDATE`` clause. + + .. deprecated:: 0.9.0 superseded by :meth:`.Query.with_for_update`. + + :param mode: a string representing the desired locking mode. + Valid values are: + + * ``None`` - translates to no lockmode + + * ``'update'`` - translates to ``FOR UPDATE`` + (standard SQL, supported by most dialects) + + * ``'update_nowait'`` - translates to ``FOR UPDATE NOWAIT`` + (supported by Oracle, PostgreSQL 8.1 upwards) + + * ``'read'`` - translates to ``LOCK IN SHARE MODE`` (for MySQL), + and ``FOR SHARE`` (for PostgreSQL) + + .. seealso:: + + :meth:`.Query.with_for_update` - improved API for + specifying the ``FOR UPDATE`` clause. + + """ + self._for_update_arg = LockmodeArg.parse_legacy_query(mode) + + @_generative() + def with_for_update(self, read=False, nowait=False, of=None): + """return a new :class:`.Query` with the specified options for the + ``FOR UPDATE`` clause. + + The behavior of this method is identical to that of + :meth:`.SelectBase.with_for_update`. When called with no arguments, + the resulting ``SELECT`` statement will have a ``FOR UPDATE`` clause + appended. When additional arguments are specified, backend-specific + options such as ``FOR UPDATE NOWAIT`` or ``LOCK IN SHARE MODE`` + can take effect. + + E.g.:: + + q = sess.query(User).with_for_update(nowait=True, of=User) + + The above query on a Postgresql backend will render like:: + + SELECT users.id AS users_id FROM users FOR UPDATE OF users NOWAIT + + .. versionadded:: 0.9.0 :meth:`.Query.with_for_update` supersedes + the :meth:`.Query.with_lockmode` method. + + .. seealso:: + + :meth:`.GenerativeSelect.with_for_update` - Core level method with + full argument and behavioral description. + + """ + self._for_update_arg = LockmodeArg(read=read, nowait=nowait, of=of) + + @_generative() + def params(self, *args, **kwargs): + """add values for bind parameters which may have been + specified in filter(). + + parameters may be specified using \**kwargs, or optionally a single + dictionary as the first positional argument. The reason for both is + that \**kwargs is convenient, however some parameter dictionaries + contain unicode keys in which case \**kwargs cannot be used. + + """ + if len(args) == 1: + kwargs.update(args[0]) + elif len(args) > 0: + raise sa_exc.ArgumentError( + "params() takes zero or one positional argument, " + "which is a dictionary.") + self._params = self._params.copy() + self._params.update(kwargs) + + @_generative(_no_statement_condition, _no_limit_offset) + def filter(self, *criterion): + """apply the given filtering criterion to a copy + of this :class:`.Query`, using SQL expressions. + + e.g.:: + + session.query(MyClass).filter(MyClass.name == 'some name') + + Multiple criteria may be specified as comma separated; the effect + is that they will be joined together using the :func:`.and_` + function:: + + session.query(MyClass).\\ + filter(MyClass.name == 'some name', MyClass.id > 5) + + The criterion is any SQL expression object applicable to the + WHERE clause of a select. String expressions are coerced + into SQL expression constructs via the :func:`.text` construct. + + .. seealso:: + + :meth:`.Query.filter_by` - filter on keyword expressions. + + """ + for criterion in list(criterion): + criterion = expression._expression_literal_as_text(criterion) + + criterion = self._adapt_clause(criterion, True, True) + + if self._criterion is not None: + self._criterion = self._criterion & criterion + else: + self._criterion = criterion + + def filter_by(self, **kwargs): + """apply the given filtering criterion to a copy + of this :class:`.Query`, using keyword expressions. + + e.g.:: + + session.query(MyClass).filter_by(name = 'some name') + + Multiple criteria may be specified as comma separated; the effect + is that they will be joined together using the :func:`.and_` + function:: + + session.query(MyClass).\\ + filter_by(name = 'some name', id = 5) + + The keyword expressions are extracted from the primary + entity of the query, or the last entity that was the + target of a call to :meth:`.Query.join`. + + .. seealso:: + + :meth:`.Query.filter` - filter on SQL expressions. + + """ + + clauses = [_entity_descriptor(self._joinpoint_zero(), key) == value + for key, value in kwargs.items()] + return self.filter(sql.and_(*clauses)) + + @_generative(_no_statement_condition, _no_limit_offset) + def order_by(self, *criterion): + """apply one or more ORDER BY criterion to the query and return + the newly resulting ``Query`` + + All existing ORDER BY settings can be suppressed by + passing ``None`` - this will suppress any ORDER BY configured + on mappers as well. + + Alternatively, an existing ORDER BY setting on the Query + object can be entirely cancelled by passing ``False`` + as the value - use this before calling methods where + an ORDER BY is invalid. + + """ + + if len(criterion) == 1: + if criterion[0] is False: + if '_order_by' in self.__dict__: + del self._order_by + return + if criterion[0] is None: + self._order_by = None + return + + criterion = self._adapt_col_list(criterion) + + if self._order_by is False or self._order_by is None: + self._order_by = criterion + else: + self._order_by = self._order_by + criterion + + @_generative(_no_statement_condition, _no_limit_offset) + def group_by(self, *criterion): + """apply one or more GROUP BY criterion to the query and return + the newly resulting :class:`.Query`""" + + criterion = list(chain(*[_orm_columns(c) for c in criterion])) + criterion = self._adapt_col_list(criterion) + + if self._group_by is False: + self._group_by = criterion + else: + self._group_by = self._group_by + criterion + + @_generative(_no_statement_condition, _no_limit_offset) + def having(self, criterion): + """apply a HAVING criterion to the query and return the + newly resulting :class:`.Query`. + + :meth:`~.Query.having` is used in conjunction with + :meth:`~.Query.group_by`. + + HAVING criterion makes it possible to use filters on aggregate + functions like COUNT, SUM, AVG, MAX, and MIN, eg.:: + + q = session.query(User.id).\\ + join(User.addresses).\\ + group_by(User.id).\\ + having(func.count(Address.id) > 2) + + """ + + criterion = expression._expression_literal_as_text(criterion) + + if criterion is not None and \ + not isinstance(criterion, sql.ClauseElement): + raise sa_exc.ArgumentError( + "having() argument must be of type " + "sqlalchemy.sql.ClauseElement or string") + + criterion = self._adapt_clause(criterion, True, True) + + if self._having is not None: + self._having = self._having & criterion + else: + self._having = criterion + + def union(self, *q): + """Produce a UNION of this Query against one or more queries. + + e.g.:: + + q1 = sess.query(SomeClass).filter(SomeClass.foo=='bar') + q2 = sess.query(SomeClass).filter(SomeClass.bar=='foo') + + q3 = q1.union(q2) + + The method accepts multiple Query objects so as to control + the level of nesting. A series of ``union()`` calls such as:: + + x.union(y).union(z).all() + + will nest on each ``union()``, and produces:: + + SELECT * FROM (SELECT * FROM (SELECT * FROM X UNION + SELECT * FROM y) UNION SELECT * FROM Z) + + Whereas:: + + x.union(y, z).all() + + produces:: + + SELECT * FROM (SELECT * FROM X UNION SELECT * FROM y UNION + SELECT * FROM Z) + + Note that many database backends do not allow ORDER BY to + be rendered on a query called within UNION, EXCEPT, etc. + To disable all ORDER BY clauses including those configured + on mappers, issue ``query.order_by(None)`` - the resulting + :class:`.Query` object will not render ORDER BY within + its SELECT statement. + + """ + + return self._from_selectable( + expression.union(*([self] + list(q)))) + + def union_all(self, *q): + """Produce a UNION ALL of this Query against one or more queries. + + Works the same way as :meth:`~sqlalchemy.orm.query.Query.union`. See + that method for usage examples. + + """ + return self._from_selectable( + expression.union_all(*([self] + list(q))) + ) + + def intersect(self, *q): + """Produce an INTERSECT of this Query against one or more queries. + + Works the same way as :meth:`~sqlalchemy.orm.query.Query.union`. See + that method for usage examples. + + """ + return self._from_selectable( + expression.intersect(*([self] + list(q))) + ) + + def intersect_all(self, *q): + """Produce an INTERSECT ALL of this Query against one or more queries. + + Works the same way as :meth:`~sqlalchemy.orm.query.Query.union`. See + that method for usage examples. + + """ + return self._from_selectable( + expression.intersect_all(*([self] + list(q))) + ) + + def except_(self, *q): + """Produce an EXCEPT of this Query against one or more queries. + + Works the same way as :meth:`~sqlalchemy.orm.query.Query.union`. See + that method for usage examples. + + """ + return self._from_selectable( + expression.except_(*([self] + list(q))) + ) + + def except_all(self, *q): + """Produce an EXCEPT ALL of this Query against one or more queries. + + Works the same way as :meth:`~sqlalchemy.orm.query.Query.union`. See + that method for usage examples. + + """ + return self._from_selectable( + expression.except_all(*([self] + list(q))) + ) + + def join(self, *props, **kwargs): + """Create a SQL JOIN against this :class:`.Query` object's criterion + and apply generatively, returning the newly resulting :class:`.Query`. + + **Simple Relationship Joins** + + Consider a mapping between two classes ``User`` and ``Address``, + with a relationship ``User.addresses`` representing a collection + of ``Address`` objects associated with each ``User``. The most + common usage of :meth:`~.Query.join` is to create a JOIN along this + relationship, using the ``User.addresses`` attribute as an indicator + for how this should occur:: + + q = session.query(User).join(User.addresses) + + Where above, the call to :meth:`~.Query.join` along ``User.addresses`` + will result in SQL equivalent to:: + + SELECT user.* FROM user JOIN address ON user.id = address.user_id + + In the above example we refer to ``User.addresses`` as passed to + :meth:`~.Query.join` as the *on clause*, that is, it indicates + how the "ON" portion of the JOIN should be constructed. For a + single-entity query such as the one above (i.e. we start by selecting + only from ``User`` and nothing else), the relationship can also be + specified by its string name:: + + q = session.query(User).join("addresses") + + :meth:`~.Query.join` can also accommodate multiple + "on clause" arguments to produce a chain of joins, such as below + where a join across four related entities is constructed:: + + q = session.query(User).join("orders", "items", "keywords") + + The above would be shorthand for three separate calls to + :meth:`~.Query.join`, each using an explicit attribute to indicate + the source entity:: + + q = session.query(User).\\ + join(User.orders).\\ + join(Order.items).\\ + join(Item.keywords) + + **Joins to a Target Entity or Selectable** + + A second form of :meth:`~.Query.join` allows any mapped entity + or core selectable construct as a target. In this usage, + :meth:`~.Query.join` will attempt + to create a JOIN along the natural foreign key relationship between + two entities:: + + q = session.query(User).join(Address) + + The above calling form of :meth:`~.Query.join` will raise an error if + either there are no foreign keys between the two entities, or if + there are multiple foreign key linkages between them. In the + above calling form, :meth:`~.Query.join` is called upon to + create the "on clause" automatically for us. The target can + be any mapped entity or selectable, such as a :class:`.Table`:: + + q = session.query(User).join(addresses_table) + + **Joins to a Target with an ON Clause** + + The third calling form allows both the target entity as well + as the ON clause to be passed explicitly. Suppose for + example we wanted to join to ``Address`` twice, using + an alias the second time. We use :func:`~sqlalchemy.orm.aliased` + to create a distinct alias of ``Address``, and join + to it using the ``target, onclause`` form, so that the + alias can be specified explicitly as the target along with + the relationship to instruct how the ON clause should proceed:: + + a_alias = aliased(Address) + + q = session.query(User).\\ + join(User.addresses).\\ + join(a_alias, User.addresses).\\ + filter(Address.email_address=='ed@foo.com').\\ + filter(a_alias.email_address=='ed@bar.com') + + Where above, the generated SQL would be similar to:: + + SELECT user.* FROM user + JOIN address ON user.id = address.user_id + JOIN address AS address_1 ON user.id=address_1.user_id + WHERE address.email_address = :email_address_1 + AND address_1.email_address = :email_address_2 + + The two-argument calling form of :meth:`~.Query.join` + also allows us to construct arbitrary joins with SQL-oriented + "on clause" expressions, not relying upon configured relationships + at all. Any SQL expression can be passed as the ON clause + when using the two-argument form, which should refer to the target + entity in some way as well as an applicable source entity:: + + q = session.query(User).join(Address, User.id==Address.user_id) + + .. versionchanged:: 0.7 + In SQLAlchemy 0.6 and earlier, the two argument form of + :meth:`~.Query.join` requires the usage of a tuple: + ``query(User).join((Address, User.id==Address.user_id))``\ . + This calling form is accepted in 0.7 and further, though + is not necessary unless multiple join conditions are passed to + a single :meth:`~.Query.join` call, which itself is also not + generally necessary as it is now equivalent to multiple + calls (this wasn't always the case). + + **Advanced Join Targeting and Adaption** + + There is a lot of flexibility in what the "target" can be when using + :meth:`~.Query.join`. As noted previously, it also accepts + :class:`.Table` constructs and other selectables such as + :func:`.alias` and :func:`.select` constructs, with either the one + or two-argument forms:: + + addresses_q = select([Address.user_id]).\\ + where(Address.email_address.endswith("@bar.com")).\\ + alias() + + q = session.query(User).\\ + join(addresses_q, addresses_q.c.user_id==User.id) + + :meth:`~.Query.join` also features the ability to *adapt* a + :meth:`~sqlalchemy.orm.relationship` -driven ON clause to the target + selectable. Below we construct a JOIN from ``User`` to a subquery + against ``Address``, allowing the relationship denoted by + ``User.addresses`` to *adapt* itself to the altered target:: + + address_subq = session.query(Address).\\ + filter(Address.email_address == 'ed@foo.com').\\ + subquery() + + q = session.query(User).join(address_subq, User.addresses) + + Producing SQL similar to:: + + SELECT user.* FROM user + JOIN ( + SELECT address.id AS id, + address.user_id AS user_id, + address.email_address AS email_address + FROM address + WHERE address.email_address = :email_address_1 + ) AS anon_1 ON user.id = anon_1.user_id + + The above form allows one to fall back onto an explicit ON + clause at any time:: + + q = session.query(User).\\ + join(address_subq, User.id==address_subq.c.user_id) + + **Controlling what to Join From** + + While :meth:`~.Query.join` exclusively deals with the "right" + side of the JOIN, we can also control the "left" side, in those + cases where it's needed, using :meth:`~.Query.select_from`. + Below we construct a query against ``Address`` but can still + make usage of ``User.addresses`` as our ON clause by instructing + the :class:`.Query` to select first from the ``User`` + entity:: + + q = session.query(Address).select_from(User).\\ + join(User.addresses).\\ + filter(User.name == 'ed') + + Which will produce SQL similar to:: + + SELECT address.* FROM user + JOIN address ON user.id=address.user_id + WHERE user.name = :name_1 + + **Constructing Aliases Anonymously** + + :meth:`~.Query.join` can construct anonymous aliases + using the ``aliased=True`` flag. This feature is useful + when a query is being joined algorithmically, such as + when querying self-referentially to an arbitrary depth:: + + q = session.query(Node).\\ + join("children", "children", aliased=True) + + When ``aliased=True`` is used, the actual "alias" construct + is not explicitly available. To work with it, methods such as + :meth:`.Query.filter` will adapt the incoming entity to + the last join point:: + + q = session.query(Node).\\ + join("children", "children", aliased=True).\\ + filter(Node.name == 'grandchild 1') + + When using automatic aliasing, the ``from_joinpoint=True`` + argument can allow a multi-node join to be broken into + multiple calls to :meth:`~.Query.join`, so that + each path along the way can be further filtered:: + + q = session.query(Node).\\ + join("children", aliased=True).\\ + filter(Node.name='child 1').\\ + join("children", aliased=True, from_joinpoint=True).\\ + filter(Node.name == 'grandchild 1') + + The filtering aliases above can then be reset back to the + original ``Node`` entity using :meth:`~.Query.reset_joinpoint`:: + + q = session.query(Node).\\ + join("children", "children", aliased=True).\\ + filter(Node.name == 'grandchild 1').\\ + reset_joinpoint().\\ + filter(Node.name == 'parent 1) + + For an example of ``aliased=True``, see the distribution + example :ref:`examples_xmlpersistence` which illustrates + an XPath-like query system using algorithmic joins. + + :param \*props: A collection of one or more join conditions, + each consisting of a relationship-bound attribute or string + relationship name representing an "on clause", or a single + target entity, or a tuple in the form of ``(target, onclause)``. + A special two-argument calling form of the form ``target, onclause`` + is also accepted. + :param aliased=False: If True, indicate that the JOIN target should be + anonymously aliased. Subsequent calls to :meth:`~.Query.filter` + and similar will adapt the incoming criterion to the target + alias, until :meth:`~.Query.reset_joinpoint` is called. + :param isouter=False: If True, the join used will be a left outer join, + just as if the :meth:`.Query.outerjoin` method were called. This + flag is here to maintain consistency with the same flag as accepted + by :meth:`.FromClause.join` and other Core constructs. + + + .. versionadded:: 1.0.0 + + :param from_joinpoint=False: When using ``aliased=True``, a setting + of True here will cause the join to be from the most recent + joined target, rather than starting back from the original + FROM clauses of the query. + + .. seealso:: + + :ref:`ormtutorial_joins` in the ORM tutorial. + + :ref:`inheritance_toplevel` for details on how + :meth:`~.Query.join` is used for inheritance relationships. + + :func:`.orm.join` - a standalone ORM-level join function, + used internally by :meth:`.Query.join`, which in previous + SQLAlchemy versions was the primary ORM-level joining interface. + + """ + aliased, from_joinpoint, isouter = kwargs.pop('aliased', False),\ + kwargs.pop('from_joinpoint', False),\ + kwargs.pop('isouter', False) + if kwargs: + raise TypeError("unknown arguments: %s" % + ', '.join(sorted(kwargs))) + return self._join(props, + outerjoin=isouter, create_aliases=aliased, + from_joinpoint=from_joinpoint) + + def outerjoin(self, *props, **kwargs): + """Create a left outer join against this ``Query`` object's criterion + and apply generatively, returning the newly resulting ``Query``. + + Usage is the same as the ``join()`` method. + + """ + aliased, from_joinpoint = kwargs.pop('aliased', False), \ + kwargs.pop('from_joinpoint', False) + if kwargs: + raise TypeError("unknown arguments: %s" % + ', '.join(sorted(kwargs))) + return self._join(props, + outerjoin=True, create_aliases=aliased, + from_joinpoint=from_joinpoint) + + def _update_joinpoint(self, jp): + self._joinpoint = jp + # copy backwards to the root of the _joinpath + # dict, so that no existing dict in the path is mutated + while 'prev' in jp: + f, prev = jp['prev'] + prev = prev.copy() + prev[f] = jp + jp['prev'] = (f, prev) + jp = prev + self._joinpath = jp + + @_generative(_no_statement_condition, _no_limit_offset) + def _join(self, keys, outerjoin, create_aliases, from_joinpoint): + """consumes arguments from join() or outerjoin(), places them into a + consistent format with which to form the actual JOIN constructs. + + """ + + if not from_joinpoint: + self._reset_joinpoint() + + if len(keys) == 2 and \ + isinstance(keys[0], (expression.FromClause, + type, AliasedClass)) and \ + isinstance(keys[1], (str, expression.ClauseElement, + interfaces.PropComparator)): + # detect 2-arg form of join and + # convert to a tuple. + keys = (keys,) + + keylist = util.to_list(keys) + for idx, arg1 in enumerate(keylist): + if isinstance(arg1, tuple): + # "tuple" form of join, multiple + # tuples are accepted as well. The simpler + # "2-arg" form is preferred. May deprecate + # the "tuple" usage. + arg1, arg2 = arg1 + else: + arg2 = None + + # determine onclause/right_entity. there + # is a little bit of legacy behavior still at work here + # which means they might be in either order. may possibly + # lock this down to (right_entity, onclause) in 0.6. + if isinstance( + arg1, (interfaces.PropComparator, util.string_types)): + right_entity, onclause = arg2, arg1 + else: + right_entity, onclause = arg1, arg2 + + left_entity = prop = None + + if isinstance(onclause, interfaces.PropComparator): + of_type = getattr(onclause, '_of_type', None) + else: + of_type = None + + if isinstance(onclause, util.string_types): + left_entity = self._joinpoint_zero() + + descriptor = _entity_descriptor(left_entity, onclause) + onclause = descriptor + + # check for q.join(Class.propname, from_joinpoint=True) + # and Class is that of the current joinpoint + elif from_joinpoint and \ + isinstance(onclause, interfaces.PropComparator): + left_entity = onclause._parententity + + info = inspect(self._joinpoint_zero()) + left_mapper, left_selectable, left_is_aliased = \ + getattr(info, 'mapper', None), \ + info.selectable, \ + getattr(info, 'is_aliased_class', None) + + if left_mapper is left_entity: + left_entity = self._joinpoint_zero() + descriptor = _entity_descriptor(left_entity, + onclause.key) + onclause = descriptor + + if isinstance(onclause, interfaces.PropComparator): + if right_entity is None: + if of_type: + right_entity = of_type + else: + right_entity = onclause.property.mapper + + left_entity = onclause._parententity + + prop = onclause.property + if not isinstance(onclause, attributes.QueryableAttribute): + onclause = prop + + if not create_aliases: + # check for this path already present. + # don't render in that case. + edge = (left_entity, right_entity, prop.key) + if edge in self._joinpoint: + # The child's prev reference might be stale -- + # it could point to a parent older than the + # current joinpoint. If this is the case, + # then we need to update it and then fix the + # tree's spine with _update_joinpoint. Copy + # and then mutate the child, which might be + # shared by a different query object. + jp = self._joinpoint[edge].copy() + jp['prev'] = (edge, self._joinpoint) + self._update_joinpoint(jp) + + if idx == len(keylist) - 1: + util.warn( + "Pathed join target %s has already " + "been joined to; skipping" % prop) + continue + + elif onclause is not None and right_entity is None: + # TODO: no coverage here + raise NotImplementedError("query.join(a==b) not supported.") + + self._join_left_to_right( + left_entity, + right_entity, onclause, + outerjoin, create_aliases, prop) + + def _join_left_to_right(self, left, right, + onclause, outerjoin, create_aliases, prop): + """append a JOIN to the query's from clause.""" + + self._polymorphic_adapters = self._polymorphic_adapters.copy() + + if left is None: + if self._from_obj: + left = self._from_obj[0] + elif self._entities: + left = self._entities[0].entity_zero_or_selectable + + if left is None: + raise sa_exc.InvalidRequestError( + "Don't know how to join from %s; please use " + "select_from() to establish the left " + "entity/selectable of this join" % self._entities[0]) + + if left is right and \ + not create_aliases: + raise sa_exc.InvalidRequestError( + "Can't construct a join from %s to %s, they " + "are the same entity" % + (left, right)) + + l_info = inspect(left) + r_info = inspect(right) + + overlap = False + if not create_aliases: + right_mapper = getattr(r_info, "mapper", None) + # if the target is a joined inheritance mapping, + # be more liberal about auto-aliasing. + if right_mapper and ( + right_mapper.with_polymorphic or + isinstance(right_mapper.mapped_table, expression.Join) + ): + for from_obj in self._from_obj or [l_info.selectable]: + if sql_util.selectables_overlap( + l_info.selectable, from_obj) and \ + sql_util.selectables_overlap( + from_obj, r_info.selectable): + overlap = True + break + + if (overlap or not create_aliases) and \ + l_info.selectable is r_info.selectable: + raise sa_exc.InvalidRequestError( + "Can't join table/selectable '%s' to itself" % + l_info.selectable) + + right, onclause = self._prepare_right_side( + r_info, right, onclause, + create_aliases, + prop, overlap) + + # if joining on a MapperProperty path, + # track the path to prevent redundant joins + if not create_aliases and prop: + self._update_joinpoint({ + '_joinpoint_entity': right, + 'prev': ((left, right, prop.key), self._joinpoint) + }) + else: + self._joinpoint = {'_joinpoint_entity': right} + + self._join_to_left(l_info, left, right, onclause, outerjoin) + + def _prepare_right_side(self, r_info, right, onclause, create_aliases, + prop, overlap): + info = r_info + + right_mapper, right_selectable, right_is_aliased = \ + getattr(info, 'mapper', None), \ + info.selectable, \ + getattr(info, 'is_aliased_class', False) + + if right_mapper: + self._join_entities += (info, ) + + if right_mapper and prop and \ + not right_mapper.common_parent(prop.mapper): + raise sa_exc.InvalidRequestError( + "Join target %s does not correspond to " + "the right side of join condition %s" % (right, onclause) + ) + + if not right_mapper and prop: + right_mapper = prop.mapper + + need_adapter = False + + if right_mapper and right is right_selectable: + if not right_selectable.is_derived_from( + right_mapper.mapped_table): + raise sa_exc.InvalidRequestError( + "Selectable '%s' is not derived from '%s'" % + (right_selectable.description, + right_mapper.mapped_table.description)) + + if isinstance(right_selectable, expression.SelectBase): + # TODO: this isn't even covered now! + right_selectable = right_selectable.alias() + need_adapter = True + + right = aliased(right_mapper, right_selectable) + + aliased_entity = right_mapper and \ + not right_is_aliased and \ + ( + right_mapper.with_polymorphic and isinstance( + right_mapper._with_polymorphic_selectable, + expression.Alias) + or + overlap # test for overlap: + # orm/inheritance/relationships.py + # SelfReferentialM2MTest + ) + + if not need_adapter and (create_aliases or aliased_entity): + right = aliased(right, flat=True) + need_adapter = True + + # if an alias() of the right side was generated here, + # apply an adapter to all subsequent filter() calls + # until reset_joinpoint() is called. + if need_adapter: + self._filter_aliases = ORMAdapter( + right, + equivalents=right_mapper and + right_mapper._equivalent_columns or {}, + chain_to=self._filter_aliases) + + # if the onclause is a ClauseElement, adapt it with any + # adapters that are in place right now + if isinstance(onclause, expression.ClauseElement): + onclause = self._adapt_clause(onclause, True, True) + + # if an alias() on the right side was generated, + # which is intended to wrap a the right side in a subquery, + # ensure that columns retrieved from this target in the result + # set are also adapted. + if aliased_entity and not create_aliases: + self._mapper_loads_polymorphically_with( + right_mapper, + ORMAdapter( + right, + equivalents=right_mapper._equivalent_columns + ) + ) + + return right, onclause + + def _join_to_left(self, l_info, left, right, onclause, outerjoin): + info = l_info + left_mapper = getattr(info, 'mapper', None) + left_selectable = info.selectable + + if self._from_obj: + replace_clause_index, clause = sql_util.find_join_source( + self._from_obj, + left_selectable) + if clause is not None: + try: + clause = orm_join(clause, + right, + onclause, isouter=outerjoin) + except sa_exc.ArgumentError as ae: + raise sa_exc.InvalidRequestError( + "Could not find a FROM clause to join from. " + "Tried joining to %s, but got: %s" % (right, ae)) + + self._from_obj = \ + self._from_obj[:replace_clause_index] + \ + (clause, ) + \ + self._from_obj[replace_clause_index + 1:] + return + + if left_mapper: + for ent in self._entities: + if ent.corresponds_to(left): + clause = ent.selectable + break + else: + clause = left + else: + clause = left_selectable + + assert clause is not None + try: + clause = orm_join(clause, right, onclause, isouter=outerjoin) + except sa_exc.ArgumentError as ae: + raise sa_exc.InvalidRequestError( + "Could not find a FROM clause to join from. " + "Tried joining to %s, but got: %s" % (right, ae)) + self._from_obj = self._from_obj + (clause,) + + def _reset_joinpoint(self): + self._joinpoint = self._joinpath + self._filter_aliases = None + + @_generative(_no_statement_condition) + def reset_joinpoint(self): + """Return a new :class:`.Query`, where the "join point" has + been reset back to the base FROM entities of the query. + + This method is usually used in conjunction with the + ``aliased=True`` feature of the :meth:`~.Query.join` + method. See the example in :meth:`~.Query.join` for how + this is used. + + """ + self._reset_joinpoint() + + @_generative(_no_clauseelement_condition) + def select_from(self, *from_obj): + """Set the FROM clause of this :class:`.Query` explicitly. + + :meth:`.Query.select_from` is often used in conjunction with + :meth:`.Query.join` in order to control which entity is selected + from on the "left" side of the join. + + The entity or selectable object here effectively replaces the + "left edge" of any calls to :meth:`~.Query.join`, when no + joinpoint is otherwise established - usually, the default "join + point" is the leftmost entity in the :class:`~.Query` object's + list of entities to be selected. + + A typical example:: + + q = session.query(Address).select_from(User).\\ + join(User.addresses).\\ + filter(User.name == 'ed') + + Which produces SQL equivalent to:: + + SELECT address.* FROM user + JOIN address ON user.id=address.user_id + WHERE user.name = :name_1 + + :param \*from_obj: collection of one or more entities to apply + to the FROM clause. Entities can be mapped classes, + :class:`.AliasedClass` objects, :class:`.Mapper` objects + as well as core :class:`.FromClause` elements like subqueries. + + .. versionchanged:: 0.9 + This method no longer applies the given FROM object + to be the selectable from which matching entities + select from; the :meth:`.select_entity_from` method + now accomplishes this. See that method for a description + of this behavior. + + .. seealso:: + + :meth:`~.Query.join` + + :meth:`.Query.select_entity_from` + + """ + + self._set_select_from(from_obj, False) + + @_generative(_no_clauseelement_condition) + def select_entity_from(self, from_obj): + """Set the FROM clause of this :class:`.Query` to a + core selectable, applying it as a replacement FROM clause + for corresponding mapped entities. + + This method is similar to the :meth:`.Query.select_from` + method, in that it sets the FROM clause of the query. However, + where :meth:`.Query.select_from` only affects what is placed + in the FROM, this method also applies the given selectable + to replace the FROM which the selected entities would normally + select from. + + The given ``from_obj`` must be an instance of a :class:`.FromClause`, + e.g. a :func:`.select` or :class:`.Alias` construct. + + An example would be a :class:`.Query` that selects ``User`` entities, + but uses :meth:`.Query.select_entity_from` to have the entities + selected from a :func:`.select` construct instead of the + base ``user`` table:: + + select_stmt = select([User]).where(User.id == 7) + + q = session.query(User).\\ + select_entity_from(select_stmt).\\ + filter(User.name == 'ed') + + The query generated will select ``User`` entities directly + from the given :func:`.select` construct, and will be:: + + SELECT anon_1.id AS anon_1_id, anon_1.name AS anon_1_name + FROM (SELECT "user".id AS id, "user".name AS name + FROM "user" + WHERE "user".id = :id_1) AS anon_1 + WHERE anon_1.name = :name_1 + + Notice above that even the WHERE criterion was "adapted" such that + the ``anon_1`` subquery effectively replaces all references to the + ``user`` table, except for the one that it refers to internally. + + Compare this to :meth:`.Query.select_from`, which as of + version 0.9, does not affect existing entities. The + statement below:: + + q = session.query(User).\\ + select_from(select_stmt).\\ + filter(User.name == 'ed') + + Produces SQL where both the ``user`` table as well as the + ``select_stmt`` construct are present as separate elements + in the FROM clause. No "adaptation" of the ``user`` table + is applied:: + + SELECT "user".id AS user_id, "user".name AS user_name + FROM "user", (SELECT "user".id AS id, "user".name AS name + FROM "user" + WHERE "user".id = :id_1) AS anon_1 + WHERE "user".name = :name_1 + + :meth:`.Query.select_entity_from` maintains an older + behavior of :meth:`.Query.select_from`. In modern usage, + similar results can also be achieved using :func:`.aliased`:: + + select_stmt = select([User]).where(User.id == 7) + user_from_select = aliased(User, select_stmt.alias()) + + q = session.query(user_from_select) + + :param from_obj: a :class:`.FromClause` object that will replace + the FROM clause of this :class:`.Query`. + + .. seealso:: + + :meth:`.Query.select_from` + + .. versionadded:: 0.8 + :meth:`.Query.select_entity_from` was added to specify + the specific behavior of entity replacement, however + the :meth:`.Query.select_from` maintains this behavior + as well until 0.9. + + """ + + self._set_select_from([from_obj], True) + + def __getitem__(self, item): + if isinstance(item, slice): + start, stop, step = util.decode_slice(item) + + if isinstance(stop, int) and \ + isinstance(start, int) and \ + stop - start <= 0: + return [] + + # perhaps we should execute a count() here so that we + # can still use LIMIT/OFFSET ? + elif (isinstance(start, int) and start < 0) \ + or (isinstance(stop, int) and stop < 0): + return list(self)[item] + + res = self.slice(start, stop) + if step is not None: + return list(res)[None:None:item.step] + else: + return list(res) + else: + if item == -1: + return list(self)[-1] + else: + return list(self[item:item + 1])[0] + + @_generative(_no_statement_condition) + def slice(self, start, stop): + """apply LIMIT/OFFSET to the ``Query`` based on a " + "range and return the newly resulting ``Query``.""" + + if start is not None and stop is not None: + self._offset = (self._offset or 0) + start + self._limit = stop - start + elif start is None and stop is not None: + self._limit = stop + elif start is not None and stop is None: + self._offset = (self._offset or 0) + start + + if self._offset == 0: + self._offset = None + + @_generative(_no_statement_condition) + def limit(self, limit): + """Apply a ``LIMIT`` to the query and return the newly resulting + + ``Query``. + + """ + self._limit = limit + + @_generative(_no_statement_condition) + def offset(self, offset): + """Apply an ``OFFSET`` to the query and return the newly resulting + ``Query``. + + """ + self._offset = offset + + @_generative(_no_statement_condition) + def distinct(self, *criterion): + """Apply a ``DISTINCT`` to the query and return the newly resulting + ``Query``. + + + .. note:: + + The :meth:`.distinct` call includes logic that will automatically + add columns from the ORDER BY of the query to the columns + clause of the SELECT statement, to satisfy the common need + of the database backend that ORDER BY columns be part of the + SELECT list when DISTINCT is used. These columns *are not* + added to the list of columns actually fetched by the + :class:`.Query`, however, so would not affect results. + The columns are passed through when using the + :attr:`.Query.statement` accessor, however. + + :param \*expr: optional column expressions. When present, + the Postgresql dialect will render a ``DISTINCT ON (>)`` + construct. + + """ + if not criterion: + self._distinct = True + else: + criterion = self._adapt_col_list(criterion) + if isinstance(self._distinct, list): + self._distinct += criterion + else: + self._distinct = criterion + + @_generative() + def prefix_with(self, *prefixes): + """Apply the prefixes to the query and return the newly resulting + ``Query``. + + :param \*prefixes: optional prefixes, typically strings, + not using any commas. In particular is useful for MySQL keywords. + + e.g.:: + + query = sess.query(User.name).\\ + prefix_with('HIGH_PRIORITY').\\ + prefix_with('SQL_SMALL_RESULT', 'ALL') + + Would render:: + + SELECT HIGH_PRIORITY SQL_SMALL_RESULT ALL users.name AS users_name + FROM users + + .. versionadded:: 0.7.7 + + .. seealso:: + + :meth:`.HasPrefixes.prefix_with` + + """ + if self._prefixes: + self._prefixes += prefixes + else: + self._prefixes = prefixes + + @_generative() + def suffix_with(self, *suffixes): + """Apply the suffix to the query and return the newly resulting + ``Query``. + + :param \*suffixes: optional suffixes, typically strings, + not using any commas. + + .. versionadded:: 1.0.0 + + .. seealso:: + + :meth:`.Query.prefix_with` + + :meth:`.HasSuffixes.suffix_with` + + """ + if self._suffixes: + self._suffixes += suffixes + else: + self._suffixes = suffixes + + def all(self): + """Return the results represented by this ``Query`` as a list. + + This results in an execution of the underlying query. + + """ + return list(self) + + @_generative(_no_clauseelement_condition) + def from_statement(self, statement): + """Execute the given SELECT statement and return results. + + This method bypasses all internal statement compilation, and the + statement is executed without modification. + + The statement is typically either a :func:`~.expression.text` + or :func:`~.expression.select` construct, and should return the set + of columns + appropriate to the entity class represented by this :class:`.Query`. + + .. seealso:: + + :ref:`orm_tutorial_literal_sql` - usage examples in the + ORM tutorial + + """ + statement = expression._expression_literal_as_text(statement) + + if not isinstance(statement, + (expression.TextClause, + expression.SelectBase)): + raise sa_exc.ArgumentError( + "from_statement accepts text(), select(), " + "and union() objects only.") + + self._statement = statement + + def first(self): + """Return the first result of this ``Query`` or + None if the result doesn't contain any row. + + first() applies a limit of one within the generated SQL, so that + only one primary entity row is generated on the server side + (note this may consist of multiple result rows if join-loaded + collections are present). + + Calling :meth:`.Query.first` results in an execution of the underlying query. + + """ + if self._statement is not None: + ret = list(self)[0:1] + else: + ret = list(self[0:1]) + if len(ret) > 0: + return ret[0] + else: + return None + + def one_or_none(self): + """Return at most one result or raise an exception. + + Returns ``None`` if the query selects + no rows. Raises ``sqlalchemy.orm.exc.MultipleResultsFound`` + if multiple object identities are returned, or if multiple + rows are returned for a query that returns only scalar values + as opposed to full identity-mapped entities. + + Calling :meth:`.Query.one_or_none` results in an execution of the underlying + query. + + .. versionadded:: 1.0.9 + + Added :meth:`.Query.one_or_none` + + .. seealso:: + + :meth:`.Query.first` + + :meth:`.Query.one` + + + """ + ret = list(self) + + l = len(ret) + if l == 1: + return ret[0] + elif l == 0: + return None + else: + raise orm_exc.MultipleResultsFound( + "Multiple rows were found for one_or_none()") + + def one(self): + """Return exactly one result or raise an exception. + + Raises ``sqlalchemy.orm.exc.NoResultFound`` if the query selects + no rows. Raises ``sqlalchemy.orm.exc.MultipleResultsFound`` + if multiple object identities are returned, or if multiple + rows are returned for a query that returns only scalar values + as opposed to full identity-mapped entities. + + Calling :meth:`.one` results in an execution of the underlying query. + + .. seealso:: + + :meth:`.Query.first` + + :meth:`.Query.one_or_none` + + """ + ret = list(self) + + l = len(ret) + if l == 1: + return ret[0] + elif l == 0: + raise orm_exc.NoResultFound("No row was found for one()") + else: + raise orm_exc.MultipleResultsFound( + "Multiple rows were found for one()") + + def scalar(self): + """Return the first element of the first result or None + if no rows present. If multiple rows are returned, + raises MultipleResultsFound. + + >>> session.query(Item).scalar() + + >>> session.query(Item.id).scalar() + 1 + >>> session.query(Item.id).filter(Item.id < 0).scalar() + None + >>> session.query(Item.id, Item.name).scalar() + 1 + >>> session.query(func.count(Parent.id)).scalar() + 20 + + This results in an execution of the underlying query. + + """ + try: + ret = self.one() + if not isinstance(ret, tuple): + return ret + return ret[0] + except orm_exc.NoResultFound: + return None + + def __iter__(self): + context = self._compile_context() + context.statement.use_labels = True + if self._autoflush and not self._populate_existing: + self.session._autoflush() + return self._execute_and_instances(context) + + def _connection_from_session(self, **kw): + conn = self.session.connection( + **kw) + if self._execution_options: + conn = conn.execution_options(**self._execution_options) + return conn + + def _execute_and_instances(self, querycontext): + conn = self._connection_from_session( + mapper=self._bind_mapper(), + clause=querycontext.statement, + close_with_result=True) + + result = conn.execute(querycontext.statement, self._params) + return loading.instances(querycontext.query, result, querycontext) + + @property + def column_descriptions(self): + """Return metadata about the columns which would be + returned by this :class:`.Query`. + + Format is a list of dictionaries:: + + user_alias = aliased(User, name='user2') + q = sess.query(User, User.id, user_alias) + + # this expression: + q.column_descriptions + + # would return: + [ + { + 'name':'User', + 'type':User, + 'aliased':False, + 'expr':User, + 'entity': User + }, + { + 'name':'id', + 'type':Integer(), + 'aliased':False, + 'expr':User.id, + 'entity': User + }, + { + 'name':'user2', + 'type':User, + 'aliased':True, + 'expr':user_alias, + 'entity': user_alias + } + ] + + """ + + return [ + { + 'name': ent._label_name, + 'type': ent.type, + 'aliased': getattr(insp_ent, 'is_aliased_class', False), + 'expr': ent.expr, + 'entity': + getattr(insp_ent, "entity", None) + if ent.entity_zero is not None + and not insp_ent.is_clause_element + else None + } + for ent, insp_ent in [ + ( + _ent, + (inspect(_ent.entity_zero) + if _ent.entity_zero is not None else None) + ) + for _ent in self._entities + ] + ] + + def instances(self, cursor, __context=None): + """Given a ResultProxy cursor as returned by connection.execute(), + return an ORM result as an iterator. + + e.g.:: + + result = engine.execute("select * from users") + for u in session.query(User).instances(result): + print u + """ + context = __context + if context is None: + context = QueryContext(self) + + return loading.instances(self, cursor, context) + + def merge_result(self, iterator, load=True): + """Merge a result into this :class:`.Query` object's Session. + + Given an iterator returned by a :class:`.Query` of the same structure + as this one, return an identical iterator of results, with all mapped + instances merged into the session using :meth:`.Session.merge`. This + is an optimized method which will merge all mapped instances, + preserving the structure of the result rows and unmapped columns with + less method overhead than that of calling :meth:`.Session.merge` + explicitly for each value. + + The structure of the results is determined based on the column list of + this :class:`.Query` - if these do not correspond, unchecked errors + will occur. + + The 'load' argument is the same as that of :meth:`.Session.merge`. + + For an example of how :meth:`~.Query.merge_result` is used, see + the source code for the example :ref:`examples_caching`, where + :meth:`~.Query.merge_result` is used to efficiently restore state + from a cache back into a target :class:`.Session`. + + """ + + return loading.merge_result(self, iterator, load) + + @property + def _select_args(self): + return { + 'limit': self._limit, + 'offset': self._offset, + 'distinct': self._distinct, + 'prefixes': self._prefixes, + 'suffixes': self._suffixes, + 'group_by': self._group_by or None, + 'having': self._having + } + + @property + def _should_nest_selectable(self): + kwargs = self._select_args + return (kwargs.get('limit') is not None or + kwargs.get('offset') is not None or + kwargs.get('distinct', False)) + + def exists(self): + """A convenience method that turns a query into an EXISTS subquery + of the form EXISTS (SELECT 1 FROM ... WHERE ...). + + e.g.:: + + q = session.query(User).filter(User.name == 'fred') + session.query(q.exists()) + + Producing SQL similar to:: + + SELECT EXISTS ( + SELECT 1 FROM users WHERE users.name = :name_1 + ) AS anon_1 + + The EXISTS construct is usually used in the WHERE clause:: + + session.query(User.id).filter(q.exists()).scalar() + + Note that some databases such as SQL Server don't allow an + EXISTS expression to be present in the columns clause of a + SELECT. To select a simple boolean value based on the exists + as a WHERE, use :func:`.literal`:: + + from sqlalchemy import literal + + session.query(literal(True)).filter(q.exists()).scalar() + + .. versionadded:: 0.8.1 + + """ + + # .add_columns() for the case that we are a query().select_from(X), + # so that ".statement" can be produced (#2995) but also without + # omitting the FROM clause from a query(X) (#2818); + # .with_only_columns() after we have a core select() so that + # we get just "SELECT 1" without any entities. + return sql.exists(self.add_columns('1').with_labels(). + statement.with_only_columns([1])) + + def count(self): + """Return a count of rows this Query would return. + + This generates the SQL for this Query as follows:: + + SELECT count(1) AS count_1 FROM ( + SELECT + ) AS anon_1 + + .. versionchanged:: 0.7 + The above scheme is newly refined as of 0.7b3. + + For fine grained control over specific columns + to count, to skip the usage of a subquery or + otherwise control of the FROM clause, + or to use other aggregate functions, + use :attr:`~sqlalchemy.sql.expression.func` + expressions in conjunction + with :meth:`~.Session.query`, i.e.:: + + from sqlalchemy import func + + # count User records, without + # using a subquery. + session.query(func.count(User.id)) + + # return count of user "id" grouped + # by "name" + session.query(func.count(User.id)).\\ + group_by(User.name) + + from sqlalchemy import distinct + + # count distinct "name" values + session.query(func.count(distinct(User.name))) + + """ + col = sql.func.count(sql.literal_column('*')) + return self.from_self(col).scalar() + + def delete(self, synchronize_session='evaluate'): + """Perform a bulk delete query. + + Deletes rows matched by this query from the database. + + E.g.:: + + sess.query(User).filter(User.age == 25).\\ + delete(synchronize_session=False) + + sess.query(User).filter(User.age == 25).\\ + delete(synchronize_session='evaluate') + + .. warning:: The :meth:`.Query.delete` method is a "bulk" operation, + which bypasses ORM unit-of-work automation in favor of greater + performance. **Please read all caveats and warnings below.** + + :param synchronize_session: chooses the strategy for the removal of + matched objects from the session. Valid values are: + + ``False`` - don't synchronize the session. This option is the most + efficient and is reliable once the session is expired, which + typically occurs after a commit(), or explicitly using + expire_all(). Before the expiration, objects may still remain in + the session which were in fact deleted which can lead to confusing + results if they are accessed via get() or already loaded + collections. + + ``'fetch'`` - performs a select query before the delete to find + objects that are matched by the delete query and need to be + removed from the session. Matched objects are removed from the + session. + + ``'evaluate'`` - Evaluate the query's criteria in Python straight + on the objects in the session. If evaluation of the criteria isn't + implemented, an error is raised. + + The expression evaluator currently doesn't account for differing + string collations between the database and Python. + + :return: the count of rows matched as returned by the database's + "row count" feature. + + .. warning:: **Additional Caveats for bulk query deletes** + + * The method does **not** offer in-Python cascading of + relationships - it is assumed that ON DELETE CASCADE/SET + NULL/etc. is configured for any foreign key references + which require it, otherwise the database may emit an + integrity violation if foreign key references are being + enforced. + + After the DELETE, dependent objects in the + :class:`.Session` which were impacted by an ON DELETE + may not contain the current state, or may have been + deleted. This issue is resolved once the + :class:`.Session` is expired, which normally occurs upon + :meth:`.Session.commit` or can be forced by using + :meth:`.Session.expire_all`. Accessing an expired + object whose row has been deleted will invoke a SELECT + to locate the row; when the row is not found, an + :class:`~sqlalchemy.orm.exc.ObjectDeletedError` is + raised. + + * The ``'fetch'`` strategy results in an additional + SELECT statement emitted and will significantly reduce + performance. + + * The ``'evaluate'`` strategy performs a scan of + all matching objects within the :class:`.Session`; if the + contents of the :class:`.Session` are expired, such as + via a proceeding :meth:`.Session.commit` call, **this will + result in SELECT queries emitted for every matching object**. + + * The :meth:`.MapperEvents.before_delete` and + :meth:`.MapperEvents.after_delete` + events **are not invoked** from this method. Instead, the + :meth:`.SessionEvents.after_bulk_delete` method is provided to + act upon a mass DELETE of entity rows. + + .. seealso:: + + :meth:`.Query.update` + + :ref:`inserts_and_updates` - Core SQL tutorial + + """ + # TODO: cascades need handling. + + delete_op = persistence.BulkDelete.factory( + self, synchronize_session) + delete_op.exec_() + return delete_op.rowcount + + def update(self, values, synchronize_session='evaluate', update_args=None): + """Perform a bulk update query. + + Updates rows matched by this query in the database. + + E.g.:: + + sess.query(User).filter(User.age == 25).\\ + update({User.age: User.age - 10}, synchronize_session=False) + + sess.query(User).filter(User.age == 25).\\ + update({"age": User.age - 10}, synchronize_session='evaluate') + + + .. warning:: The :meth:`.Query.update` method is a "bulk" operation, + which bypasses ORM unit-of-work automation in favor of greater + performance. **Please read all caveats and warnings below.** + + + :param values: a dictionary with attributes names, or alternatively + mapped attributes or SQL expressions, as keys, and literal + values or sql expressions as values. If :ref:`parameter-ordered + mode ` is desired, the values can be + passed as a list of 2-tuples; + this requires that the :paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order` + flag is passed to the :paramref:`.Query.update.update_args` dictionary + as well. + + .. versionchanged:: 1.0.0 - string names in the values dictionary + are now resolved against the mapped entity; previously, these + strings were passed as literal column names with no mapper-level + translation. + + :param synchronize_session: chooses the strategy to update the + attributes on objects in the session. Valid values are: + + ``False`` - don't synchronize the session. This option is the most + efficient and is reliable once the session is expired, which + typically occurs after a commit(), or explicitly using + expire_all(). Before the expiration, updated objects may still + remain in the session with stale values on their attributes, which + can lead to confusing results. + + ``'fetch'`` - performs a select query before the update to find + objects that are matched by the update query. The updated + attributes are expired on matched objects. + + ``'evaluate'`` - Evaluate the Query's criteria in Python straight + on the objects in the session. If evaluation of the criteria isn't + implemented, an exception is raised. + + The expression evaluator currently doesn't account for differing + string collations between the database and Python. + + :param update_args: Optional dictionary, if present will be passed + to the underlying :func:`.update` construct as the ``**kw`` for + the object. May be used to pass dialect-specific arguments such + as ``mysql_limit``, as well as other special arguments such as + :paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order`. + + .. versionadded:: 1.0.0 + + :return: the count of rows matched as returned by the database's + "row count" feature. + + .. warning:: **Additional Caveats for bulk query updates** + + * The method does **not** offer in-Python cascading of + relationships - it is assumed that ON UPDATE CASCADE is + configured for any foreign key references which require + it, otherwise the database may emit an integrity + violation if foreign key references are being enforced. + + After the UPDATE, dependent objects in the + :class:`.Session` which were impacted by an ON UPDATE + CASCADE may not contain the current state; this issue is + resolved once the :class:`.Session` is expired, which + normally occurs upon :meth:`.Session.commit` or can be + forced by using :meth:`.Session.expire_all`. + + * The ``'fetch'`` strategy results in an additional + SELECT statement emitted and will significantly reduce + performance. + + * The ``'evaluate'`` strategy performs a scan of + all matching objects within the :class:`.Session`; if the + contents of the :class:`.Session` are expired, such as + via a proceeding :meth:`.Session.commit` call, **this will + result in SELECT queries emitted for every matching object**. + + * The method supports multiple table updates, as detailed + in :ref:`multi_table_updates`, and this behavior does + extend to support updates of joined-inheritance and + other multiple table mappings. However, the **join + condition of an inheritance mapper is not + automatically rendered**. Care must be taken in any + multiple-table update to explicitly include the joining + condition between those tables, even in mappings where + this is normally automatic. E.g. if a class ``Engineer`` + subclasses ``Employee``, an UPDATE of the ``Engineer`` + local table using criteria against the ``Employee`` + local table might look like:: + + session.query(Engineer).\\ + filter(Engineer.id == Employee.id).\\ + filter(Employee.name == 'dilbert').\\ + update({"engineer_type": "programmer"}) + + * The :meth:`.MapperEvents.before_update` and + :meth:`.MapperEvents.after_update` + events **are not invoked from this method**. Instead, the + :meth:`.SessionEvents.after_bulk_update` method is provided to + act upon a mass UPDATE of entity rows. + + .. seealso:: + + :meth:`.Query.delete` + + :ref:`inserts_and_updates` - Core SQL tutorial + + """ + + update_args = update_args or {} + update_op = persistence.BulkUpdate.factory( + self, synchronize_session, values, update_args) + update_op.exec_() + return update_op.rowcount + + def _compile_context(self, labels=True): + if self.dispatch.before_compile: + for fn in self.dispatch.before_compile: + new_query = fn(self) + if new_query is not None: + self = new_query + + context = QueryContext(self) + + if context.statement is not None: + return context + + context.labels = labels + + context._for_update_arg = self._for_update_arg + + for entity in self._entities: + entity.setup_context(self, context) + + for rec in context.create_eager_joins: + strategy = rec[0] + strategy(*rec[1:]) + + if context.from_clause: + # "load from explicit FROMs" mode, + # i.e. when select_from() or join() is used + context.froms = list(context.from_clause) + # else "load from discrete FROMs" mode, + # i.e. when each _MappedEntity has its own FROM + + if self._enable_single_crit: + self._adjust_for_single_inheritance(context) + + if not context.primary_columns: + if self._only_load_props: + raise sa_exc.InvalidRequestError( + "No column-based properties specified for " + "refresh operation. Use session.expire() " + "to reload collections and related items.") + else: + raise sa_exc.InvalidRequestError( + "Query contains no columns with which to " + "SELECT from.") + + if context.multi_row_eager_loaders and self._should_nest_selectable: + context.statement = self._compound_eager_statement(context) + else: + context.statement = self._simple_statement(context) + + return context + + def _compound_eager_statement(self, context): + # for eager joins present and LIMIT/OFFSET/DISTINCT, + # wrap the query inside a select, + # then append eager joins onto that + + if context.order_by: + order_by_col_expr = list( + chain(*[ + sql_util.unwrap_order_by(o) + for o in context.order_by + ]) + ) + else: + context.order_by = None + order_by_col_expr = [] + + inner = sql.select( + context.primary_columns + order_by_col_expr, + context.whereclause, + from_obj=context.froms, + use_labels=context.labels, + # TODO: this order_by is only needed if + # LIMIT/OFFSET is present in self._select_args, + # else the application on the outside is enough + order_by=context.order_by, + **self._select_args + ) + + for hint in self._with_hints: + inner = inner.with_hint(*hint) + + if self._correlate: + inner = inner.correlate(*self._correlate) + + inner = inner.alias() + + equivs = self.__all_equivs() + + context.adapter = sql_util.ColumnAdapter(inner, equivs) + + statement = sql.select( + [inner] + context.secondary_columns, + use_labels=context.labels) + + statement._for_update_arg = context._for_update_arg + + from_clause = inner + for eager_join in context.eager_joins.values(): + # EagerLoader places a 'stop_on' attribute on the join, + # giving us a marker as to where the "splice point" of + # the join should be + from_clause = sql_util.splice_joins( + from_clause, + eager_join, eager_join.stop_on) + + statement.append_from(from_clause) + + if context.order_by: + statement.append_order_by( + *context.adapter.copy_and_process( + context.order_by + ) + ) + + statement.append_order_by(*context.eager_order_by) + return statement + + def _simple_statement(self, context): + if not context.order_by: + context.order_by = None + + if self._distinct and context.order_by: + order_by_col_expr = list( + chain(*[ + sql_util.unwrap_order_by(o) + for o in context.order_by + ]) + ) + context.primary_columns += order_by_col_expr + + context.froms += tuple(context.eager_joins.values()) + + statement = sql.select( + context.primary_columns + + context.secondary_columns, + context.whereclause, + from_obj=context.froms, + use_labels=context.labels, + order_by=context.order_by, + **self._select_args + ) + statement._for_update_arg = context._for_update_arg + + for hint in self._with_hints: + statement = statement.with_hint(*hint) + + if self._correlate: + statement = statement.correlate(*self._correlate) + + if context.eager_order_by: + statement.append_order_by(*context.eager_order_by) + return statement + + def _adjust_for_single_inheritance(self, context): + """Apply single-table-inheritance filtering. + + For all distinct single-table-inheritance mappers represented in + the columns clause of this query, add criterion to the WHERE + clause of the given QueryContext such that only the appropriate + subtypes are selected from the total results. + + """ + + for (ext_info, adapter) in set(self._mapper_adapter_map.values()): + if ext_info in self._join_entities: + continue + single_crit = ext_info.mapper._single_table_criterion + if single_crit is not None: + if adapter: + single_crit = adapter.traverse(single_crit) + single_crit = self._adapt_clause(single_crit, False, False) + context.whereclause = sql.and_( + sql.True_._ifnone(context.whereclause), + single_crit) + + def __str__(self): + return str(self._compile_context().statement) + +from ..sql.selectable import ForUpdateArg + + +class LockmodeArg(ForUpdateArg): + @classmethod + def parse_legacy_query(self, mode): + if mode in (None, False): + return None + + if mode == "read": + read = True + nowait = False + elif mode == "update": + read = nowait = False + elif mode == "update_nowait": + nowait = True + read = False + else: + raise sa_exc.ArgumentError( + "Unknown with_lockmode argument: %r" % mode) + + return LockmodeArg(read=read, nowait=nowait) + + +class _QueryEntity(object): + """represent an entity column returned within a Query result.""" + + def __new__(cls, *args, **kwargs): + if cls is _QueryEntity: + entity = args[1] + if not isinstance(entity, util.string_types) and \ + _is_mapped_class(entity): + cls = _MapperEntity + elif isinstance(entity, Bundle): + cls = _BundleEntity + else: + cls = _ColumnEntity + return object.__new__(cls) + + def _clone(self): + q = self.__class__.__new__(self.__class__) + q.__dict__ = self.__dict__.copy() + return q + + +class _MapperEntity(_QueryEntity): + """mapper/class/AliasedClass entity""" + + def __init__(self, query, entity): + if not query._primary_entity: + query._primary_entity = self + query._entities.append(self) + + self.entities = [entity] + self.expr = entity + + supports_single_entity = True + + def setup_entity(self, ext_info, aliased_adapter): + self.mapper = ext_info.mapper + self.aliased_adapter = aliased_adapter + self.selectable = ext_info.selectable + self.is_aliased_class = ext_info.is_aliased_class + self._with_polymorphic = ext_info.with_polymorphic_mappers + self._polymorphic_discriminator = \ + ext_info.polymorphic_on + self.entity_zero = ext_info + if ext_info.is_aliased_class: + self._label_name = self.entity_zero.name + else: + self._label_name = self.mapper.class_.__name__ + self.path = self.entity_zero._path_registry + + def set_with_polymorphic(self, query, cls_or_mappers, + selectable, polymorphic_on): + """Receive an update from a call to query.with_polymorphic(). + + Note the newer style of using a free standing with_polymporphic() + construct doesn't make use of this method. + + + """ + if self.is_aliased_class: + # TODO: invalidrequest ? + raise NotImplementedError( + "Can't use with_polymorphic() against " + "an Aliased object" + ) + + if cls_or_mappers is None: + query._reset_polymorphic_adapter(self.mapper) + return + + mappers, from_obj = self.mapper._with_polymorphic_args( + cls_or_mappers, selectable) + self._with_polymorphic = mappers + self._polymorphic_discriminator = polymorphic_on + + self.selectable = from_obj + query._mapper_loads_polymorphically_with( + self.mapper, sql_util.ColumnAdapter( + from_obj, self.mapper._equivalent_columns)) + + filter_fn = id + + @property + def type(self): + return self.mapper.class_ + + @property + def entity_zero_or_selectable(self): + return self.entity_zero + + def corresponds_to(self, entity): + if entity.is_aliased_class: + if self.is_aliased_class: + if entity._base_alias is self.entity_zero._base_alias: + return True + return False + elif self.is_aliased_class: + if self.entity_zero._use_mapper_path: + return entity in self._with_polymorphic + else: + return entity is self.entity_zero + + return entity.common_parent(self.entity_zero) + + def adapt_to_selectable(self, query, sel): + query._entities.append(self) + + def _get_entity_clauses(self, query, context): + + adapter = None + + if not self.is_aliased_class: + if query._polymorphic_adapters: + adapter = query._polymorphic_adapters.get(self.mapper, None) + else: + adapter = self.aliased_adapter + + if adapter: + if query._from_obj_alias: + ret = adapter.wrap(query._from_obj_alias) + else: + ret = adapter + else: + ret = query._from_obj_alias + + return ret + + def row_processor(self, query, context, result): + adapter = self._get_entity_clauses(query, context) + + if context.adapter and adapter: + adapter = adapter.wrap(context.adapter) + elif not adapter: + adapter = context.adapter + + # polymorphic mappers which have concrete tables in + # their hierarchy usually + # require row aliasing unconditionally. + if not adapter and self.mapper._requires_row_aliasing: + adapter = sql_util.ColumnAdapter( + self.selectable, + self.mapper._equivalent_columns) + + if query._primary_entity is self: + only_load_props = query._only_load_props + refresh_state = context.refresh_state + else: + only_load_props = refresh_state = None + + _instance = loading._instance_processor( + self.mapper, + context, + result, + self.path, + adapter, + only_load_props=only_load_props, + refresh_state=refresh_state, + polymorphic_discriminator=self._polymorphic_discriminator + ) + + return _instance, self._label_name + + def setup_context(self, query, context): + adapter = self._get_entity_clauses(query, context) + + # if self._adapted_selectable is None: + context.froms += (self.selectable,) + + if context.order_by is False and self.mapper.order_by: + context.order_by = self.mapper.order_by + + # apply adaptation to the mapper's order_by if needed. + if adapter: + context.order_by = adapter.adapt_list( + util.to_list( + context.order_by + ) + ) + + loading._setup_entity_query( + context, self.mapper, self, + self.path, adapter, context.primary_columns, + with_polymorphic=self._with_polymorphic, + only_load_props=query._only_load_props, + polymorphic_discriminator=self._polymorphic_discriminator) + + def __str__(self): + return str(self.mapper) + + +@inspection._self_inspects +class Bundle(InspectionAttr): + """A grouping of SQL expressions that are returned by a :class:`.Query` + under one namespace. + + The :class:`.Bundle` essentially allows nesting of the tuple-based + results returned by a column-oriented :class:`.Query` object. It also + is extensible via simple subclassing, where the primary capability + to override is that of how the set of expressions should be returned, + allowing post-processing as well as custom return types, without + involving ORM identity-mapped classes. + + .. versionadded:: 0.9.0 + + .. seealso:: + + :ref:`bundles` + + """ + + single_entity = False + """If True, queries for a single Bundle will be returned as a single + entity, rather than an element within a keyed tuple.""" + + is_clause_element = False + + is_mapper = False + + is_aliased_class = False + + def __init__(self, name, *exprs, **kw): + """Construct a new :class:`.Bundle`. + + e.g.:: + + bn = Bundle("mybundle", MyClass.x, MyClass.y) + + for row in session.query(bn).filter( + bn.c.x == 5).filter(bn.c.y == 4): + print(row.mybundle.x, row.mybundle.y) + + :param name: name of the bundle. + :param \*exprs: columns or SQL expressions comprising the bundle. + :param single_entity=False: if True, rows for this :class:`.Bundle` + can be returned as a "single entity" outside of any enclosing tuple + in the same manner as a mapped entity. + + """ + self.name = self._label = name + self.exprs = exprs + self.c = self.columns = ColumnCollection() + self.columns.update((getattr(col, "key", col._label), col) + for col in exprs) + self.single_entity = kw.pop('single_entity', self.single_entity) + + columns = None + """A namespace of SQL expressions referred to by this :class:`.Bundle`. + + e.g.:: + + bn = Bundle("mybundle", MyClass.x, MyClass.y) + + q = sess.query(bn).filter(bn.c.x == 5) + + Nesting of bundles is also supported:: + + b1 = Bundle("b1", + Bundle('b2', MyClass.a, MyClass.b), + Bundle('b3', MyClass.x, MyClass.y) + ) + + q = sess.query(b1).filter( + b1.c.b2.c.a == 5).filter(b1.c.b3.c.y == 9) + + .. seealso:: + + :attr:`.Bundle.c` + + """ + + c = None + """An alias for :attr:`.Bundle.columns`.""" + + def _clone(self): + cloned = self.__class__.__new__(self.__class__) + cloned.__dict__.update(self.__dict__) + return cloned + + def __clause_element__(self): + return expression.ClauseList(group=False, *self.c) + + @property + def clauses(self): + return self.__clause_element__().clauses + + def label(self, name): + """Provide a copy of this :class:`.Bundle` passing a new label.""" + + cloned = self._clone() + cloned.name = name + return cloned + + def create_row_processor(self, query, procs, labels): + """Produce the "row processing" function for this :class:`.Bundle`. + + May be overridden by subclasses. + + .. seealso:: + + :ref:`bundles` - includes an example of subclassing. + + """ + keyed_tuple = util.lightweight_named_tuple('result', labels) + + def proc(row): + return keyed_tuple([proc(row) for proc in procs]) + return proc + + +class _BundleEntity(_QueryEntity): + def __init__(self, query, bundle, setup_entities=True): + query._entities.append(self) + self.bundle = self.expr = bundle + self.type = type(bundle) + self._label_name = bundle.name + self._entities = [] + + if setup_entities: + for expr in bundle.exprs: + if isinstance(expr, Bundle): + _BundleEntity(self, expr) + else: + _ColumnEntity(self, expr, namespace=self) + + self.entities = () + + self.filter_fn = lambda item: item + + self.supports_single_entity = self.bundle.single_entity + + @property + def entity_zero(self): + for ent in self._entities: + ezero = ent.entity_zero + if ezero is not None: + return ezero + else: + return None + + def corresponds_to(self, entity): + # TODO: this seems to have no effect for + # _ColumnEntity either + return False + + @property + def entity_zero_or_selectable(self): + for ent in self._entities: + ezero = ent.entity_zero_or_selectable + if ezero is not None: + return ezero + else: + return None + + def adapt_to_selectable(self, query, sel): + c = _BundleEntity(query, self.bundle, setup_entities=False) + # c._label_name = self._label_name + # c.entity_zero = self.entity_zero + # c.entities = self.entities + + for ent in self._entities: + ent.adapt_to_selectable(c, sel) + + def setup_entity(self, ext_info, aliased_adapter): + for ent in self._entities: + ent.setup_entity(ext_info, aliased_adapter) + + def setup_context(self, query, context): + for ent in self._entities: + ent.setup_context(query, context) + + def row_processor(self, query, context, result): + procs, labels = zip( + *[ent.row_processor(query, context, result) + for ent in self._entities] + ) + + proc = self.bundle.create_row_processor(query, procs, labels) + + return proc, self._label_name + + +class _ColumnEntity(_QueryEntity): + """Column/expression based entity.""" + + def __init__(self, query, column, namespace=None): + self.expr = column + self.namespace = namespace + search_entities = True + check_column = False + + if isinstance(column, util.string_types): + column = sql.literal_column(column) + self._label_name = column.name + search_entities = False + check_column = True + _entity = None + elif isinstance(column, ( + attributes.QueryableAttribute, + interfaces.PropComparator + )): + _entity = getattr(column, '_parententity', None) + if _entity is not None: + search_entities = False + self._label_name = column.key + column = column._query_clause_element() + check_column = True + if isinstance(column, Bundle): + _BundleEntity(query, column) + return + + if not isinstance(column, sql.ColumnElement): + if hasattr(column, '_select_iterable'): + # break out an object like Table into + # individual columns + for c in column._select_iterable: + if c is column: + break + _ColumnEntity(query, c, namespace=column) + else: + return + + raise sa_exc.InvalidRequestError( + "SQL expression, column, or mapped entity " + "expected - got '%r'" % (column, ) + ) + elif not check_column: + self._label_name = getattr(column, 'key', None) + search_entities = True + + self.type = type_ = column.type + if type_.hashable: + self.filter_fn = lambda item: item + else: + counter = util.counter() + self.filter_fn = lambda item: counter() + + # If the Column is unnamed, give it a + # label() so that mutable column expressions + # can be located in the result even + # if the expression's identity has been changed + # due to adaption. + + if not column._label and not getattr(column, 'is_literal', False): + column = column.label(self._label_name) + + query._entities.append(self) + + self.column = column + self.froms = set() + + # look for ORM entities represented within the + # given expression. Try to count only entities + # for columns whose FROM object is in the actual list + # of FROMs for the overall expression - this helps + # subqueries which were built from ORM constructs from + # leaking out their entities into the main select construct + self.actual_froms = actual_froms = set(column._from_objects) + + if not search_entities: + self.entity_zero = _entity + if _entity: + self.entities = [_entity] + else: + self.entities = [] + self._from_entities = set(self.entities) + else: + all_elements = [ + elem for elem in visitors.iterate(column, {}) + if 'parententity' in elem._annotations + ] + + self.entities = util.unique_list([ + elem._annotations['parententity'] + for elem in all_elements + if 'parententity' in elem._annotations + ]) + + self._from_entities = set([ + elem._annotations['parententity'] + for elem in all_elements + if 'parententity' in elem._annotations + and actual_froms.intersection(elem._from_objects) + ]) + if self.entities: + self.entity_zero = self.entities[0] + elif self.namespace is not None: + self.entity_zero = self.namespace + else: + self.entity_zero = None + + supports_single_entity = False + + @property + def entity_zero_or_selectable(self): + if self.entity_zero is not None: + return self.entity_zero + elif self.actual_froms: + return list(self.actual_froms)[0] + else: + return None + + def adapt_to_selectable(self, query, sel): + c = _ColumnEntity(query, sel.corresponding_column(self.column)) + c._label_name = self._label_name + c.entity_zero = self.entity_zero + c.entities = self.entities + + def setup_entity(self, ext_info, aliased_adapter): + if 'selectable' not in self.__dict__: + self.selectable = ext_info.selectable + + if self.actual_froms.intersection(ext_info.selectable._from_objects): + self.froms.add(ext_info.selectable) + + def corresponds_to(self, entity): + # TODO: just returning False here, + # no tests fail + if self.entity_zero is None: + return False + elif _is_aliased_class(entity): + # TODO: polymorphic subclasses ? + return entity is self.entity_zero + else: + return not _is_aliased_class(self.entity_zero) and \ + entity.common_parent(self.entity_zero) + + def row_processor(self, query, context, result): + if ('fetch_column', self) in context.attributes: + column = context.attributes[('fetch_column', self)] + else: + column = query._adapt_clause(self.column, False, True) + + if context.adapter: + column = context.adapter.columns[column] + + getter = result._getter(column) + return getter, self._label_name + + def setup_context(self, query, context): + column = query._adapt_clause(self.column, False, True) + context.froms += tuple(self.froms) + context.primary_columns.append(column) + + context.attributes[('fetch_column', self)] = column + + def __str__(self): + return str(self.column) + + +class QueryContext(object): + __slots__ = ( + 'multi_row_eager_loaders', 'adapter', 'froms', 'for_update', + 'query', 'session', 'autoflush', 'populate_existing', + 'invoke_all_eagers', 'version_check', 'refresh_state', + 'primary_columns', 'secondary_columns', 'eager_order_by', + 'eager_joins', 'create_eager_joins', 'propagate_options', + 'attributes', 'statement', 'from_clause', 'whereclause', + 'order_by', 'labels', '_for_update_arg', 'runid', 'partials' + ) + + def __init__(self, query): + + if query._statement is not None: + if isinstance(query._statement, expression.SelectBase) and \ + not query._statement._textual and \ + not query._statement.use_labels: + self.statement = query._statement.apply_labels() + else: + self.statement = query._statement + else: + self.statement = None + self.from_clause = query._from_obj + self.whereclause = query._criterion + self.order_by = query._order_by + + self.multi_row_eager_loaders = False + self.adapter = None + self.froms = () + self.for_update = None + self.query = query + self.session = query.session + self.autoflush = query._autoflush + self.populate_existing = query._populate_existing + self.invoke_all_eagers = query._invoke_all_eagers + self.version_check = query._version_check + self.refresh_state = query._refresh_state + self.primary_columns = [] + self.secondary_columns = [] + self.eager_order_by = [] + self.eager_joins = {} + self.create_eager_joins = [] + self.propagate_options = set(o for o in query._with_options if + o.propagate_to_loaders) + self.attributes = query._attributes.copy() + + +class AliasOption(interfaces.MapperOption): + + def __init__(self, alias): + """Return a :class:`.MapperOption` that will indicate to the :class:`.Query` + that the main table has been aliased. + + This is a seldom-used option to suit the + very rare case that :func:`.contains_eager` + is being used in conjunction with a user-defined SELECT + statement that aliases the parent table. E.g.:: + + # define an aliased UNION called 'ulist' + ulist = users.select(users.c.user_id==7).\\ + union(users.select(users.c.user_id>7)).\\ + alias('ulist') + + # add on an eager load of "addresses" + statement = ulist.outerjoin(addresses).\\ + select().apply_labels() + + # create query, indicating "ulist" will be an + # alias for the main table, "addresses" + # property should be eager loaded + query = session.query(User).options( + contains_alias(ulist), + contains_eager(User.addresses)) + + # then get results via the statement + results = query.from_statement(statement).all() + + :param alias: is the string name of an alias, or a + :class:`~.sql.expression.Alias` object representing + the alias. + + """ + self.alias = alias + + def process_query(self, query): + if isinstance(self.alias, util.string_types): + alias = query._mapper_zero().mapped_table.alias(self.alias) + else: + alias = self.alias + query._from_obj_alias = sql_util.ColumnAdapter(alias) diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/relationships.py b/lib/python3.4/site-packages/sqlalchemy/orm/relationships.py new file mode 100644 index 0000000..c58dd98 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/orm/relationships.py @@ -0,0 +1,2861 @@ +# orm/relationships.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Heuristics related to join conditions as used in +:func:`.relationship`. + +Provides the :class:`.JoinCondition` object, which encapsulates +SQL annotation and aliasing behavior focused on the `primaryjoin` +and `secondaryjoin` aspects of :func:`.relationship`. + +""" +from __future__ import absolute_import +from .. import sql, util, exc as sa_exc, schema, log + +import weakref +from .util import CascadeOptions, _orm_annotate, _orm_deannotate +from . import dependency +from . import attributes +from ..sql.util import ( + ClauseAdapter, + join_condition, _shallow_annotate, visit_binary_product, + _deep_deannotate, selectables_overlap, adapt_criterion_to_null +) +from ..sql import operators, expression, visitors +from .interfaces import (MANYTOMANY, MANYTOONE, ONETOMANY, + StrategizedProperty, PropComparator) +from ..inspection import inspect +from . import mapper as mapperlib +import collections + + +def remote(expr): + """Annotate a portion of a primaryjoin expression + with a 'remote' annotation. + + See the section :ref:`relationship_custom_foreign` for a + description of use. + + .. versionadded:: 0.8 + + .. seealso:: + + :ref:`relationship_custom_foreign` + + :func:`.foreign` + + """ + return _annotate_columns(expression._clause_element_as_expr(expr), + {"remote": True}) + + +def foreign(expr): + """Annotate a portion of a primaryjoin expression + with a 'foreign' annotation. + + See the section :ref:`relationship_custom_foreign` for a + description of use. + + .. versionadded:: 0.8 + + .. seealso:: + + :ref:`relationship_custom_foreign` + + :func:`.remote` + + """ + + return _annotate_columns(expression._clause_element_as_expr(expr), + {"foreign": True}) + + +@log.class_logger +@util.langhelpers.dependency_for("sqlalchemy.orm.properties") +class RelationshipProperty(StrategizedProperty): + """Describes an object property that holds a single item or list + of items that correspond to a related database table. + + Public constructor is the :func:`.orm.relationship` function. + + See also: + + :ref:`relationship_config_toplevel` + + """ + + strategy_wildcard_key = 'relationship' + + _dependency_processor = None + + def __init__(self, argument, + secondary=None, primaryjoin=None, + secondaryjoin=None, + foreign_keys=None, + uselist=None, + order_by=False, + backref=None, + back_populates=None, + post_update=False, + cascade=False, extension=None, + viewonly=False, lazy=True, + collection_class=None, passive_deletes=False, + passive_updates=True, remote_side=None, + enable_typechecks=True, join_depth=None, + comparator_factory=None, + single_parent=False, innerjoin=False, + distinct_target_key=None, + doc=None, + active_history=False, + cascade_backrefs=True, + load_on_pending=False, + bake_queries=True, + strategy_class=None, _local_remote_pairs=None, + query_class=None, + info=None): + """Provide a relationship between two mapped classes. + + This corresponds to a parent-child or associative table relationship. + The constructed class is an instance of + :class:`.RelationshipProperty`. + + A typical :func:`.relationship`, used in a classical mapping:: + + mapper(Parent, properties={ + 'children': relationship(Child) + }) + + Some arguments accepted by :func:`.relationship` optionally accept a + callable function, which when called produces the desired value. + The callable is invoked by the parent :class:`.Mapper` at "mapper + initialization" time, which happens only when mappers are first used, + and is assumed to be after all mappings have been constructed. This + can be used to resolve order-of-declaration and other dependency + issues, such as if ``Child`` is declared below ``Parent`` in the same + file:: + + mapper(Parent, properties={ + "children":relationship(lambda: Child, + order_by=lambda: Child.id) + }) + + When using the :ref:`declarative_toplevel` extension, the Declarative + initializer allows string arguments to be passed to + :func:`.relationship`. These string arguments are converted into + callables that evaluate the string as Python code, using the + Declarative class-registry as a namespace. This allows the lookup of + related classes to be automatic via their string name, and removes the + need to import related classes at all into the local module space:: + + from sqlalchemy.ext.declarative import declarative_base + + Base = declarative_base() + + class Parent(Base): + __tablename__ = 'parent' + id = Column(Integer, primary_key=True) + children = relationship("Child", order_by="Child.id") + + .. seealso:: + + :ref:`relationship_config_toplevel` - Full introductory and + reference documentation for :func:`.relationship`. + + :ref:`orm_tutorial_relationship` - ORM tutorial introduction. + + :param argument: + a mapped class, or actual :class:`.Mapper` instance, representing + the target of the relationship. + + :paramref:`~.relationship.argument` may also be passed as a callable + function which is evaluated at mapper initialization time, and may + be passed as a Python-evaluable string when using Declarative. + + .. seealso:: + + :ref:`declarative_configuring_relationships` - further detail + on relationship configuration when using Declarative. + + :param secondary: + for a many-to-many relationship, specifies the intermediary + table, and is typically an instance of :class:`.Table`. + In less common circumstances, the argument may also be specified + as an :class:`.Alias` construct, or even a :class:`.Join` construct. + + :paramref:`~.relationship.secondary` may + also be passed as a callable function which is evaluated at + mapper initialization time. When using Declarative, it may also + be a string argument noting the name of a :class:`.Table` that is + present in the :class:`.MetaData` collection associated with the + parent-mapped :class:`.Table`. + + The :paramref:`~.relationship.secondary` keyword argument is + typically applied in the case where the intermediary :class:`.Table` + is not otherwise expressed in any direct class mapping. If the + "secondary" table is also explicitly mapped elsewhere (e.g. as in + :ref:`association_pattern`), one should consider applying the + :paramref:`~.relationship.viewonly` flag so that this + :func:`.relationship` is not used for persistence operations which + may conflict with those of the association object pattern. + + .. seealso:: + + :ref:`relationships_many_to_many` - Reference example of "many + to many". + + :ref:`orm_tutorial_many_to_many` - ORM tutorial introduction to + many-to-many relationships. + + :ref:`self_referential_many_to_many` - Specifics on using + many-to-many in a self-referential case. + + :ref:`declarative_many_to_many` - Additional options when using + Declarative. + + :ref:`association_pattern` - an alternative to + :paramref:`~.relationship.secondary` when composing association + table relationships, allowing additional attributes to be + specified on the association table. + + :ref:`composite_secondary_join` - a lesser-used pattern which + in some cases can enable complex :func:`.relationship` SQL + conditions to be used. + + .. versionadded:: 0.9.2 :paramref:`~.relationship.secondary` works + more effectively when referring to a :class:`.Join` instance. + + :param active_history=False: + When ``True``, indicates that the "previous" value for a + many-to-one reference should be loaded when replaced, if + not already loaded. Normally, history tracking logic for + simple many-to-ones only needs to be aware of the "new" + value in order to perform a flush. This flag is available + for applications that make use of + :func:`.attributes.get_history` which also need to know + the "previous" value of the attribute. + + :param backref: + indicates the string name of a property to be placed on the related + mapper's class that will handle this relationship in the other + direction. The other property will be created automatically + when the mappers are configured. Can also be passed as a + :func:`.backref` object to control the configuration of the + new relationship. + + .. seealso:: + + :ref:`relationships_backref` - Introductory documentation and + examples. + + :paramref:`~.relationship.back_populates` - alternative form + of backref specification. + + :func:`.backref` - allows control over :func:`.relationship` + configuration when using :paramref:`~.relationship.backref`. + + + :param back_populates: + Takes a string name and has the same meaning as + :paramref:`~.relationship.backref`, except the complementing + property is **not** created automatically, and instead must be + configured explicitly on the other mapper. The complementing + property should also indicate + :paramref:`~.relationship.back_populates` to this relationship to + ensure proper functioning. + + .. seealso:: + + :ref:`relationships_backref` - Introductory documentation and + examples. + + :paramref:`~.relationship.backref` - alternative form + of backref specification. + + :param bake_queries=True: + Use the :class:`.BakedQuery` cache to cache the construction of SQL + used in lazy loads, when the :func:`.bake_lazy_loaders` function has + first been called. Defaults to True and is intended to provide an + "opt out" flag per-relationship when the baked query cache system is + in use. + + .. warning:: + + This flag **only** has an effect when the application-wide + :func:`.bake_lazy_loaders` function has been called. It + defaults to True so is an "opt out" flag. + + Setting this flag to False when baked queries are otherwise in + use might be to reduce + ORM memory use for this :func:`.relationship`, or to work around + unresolved stability issues observed within the baked query + cache system. + + .. versionadded:: 1.0.0 + + .. seealso:: + + :ref:`baked_toplevel` + + :param cascade: + a comma-separated list of cascade rules which determines how + Session operations should be "cascaded" from parent to child. + This defaults to ``False``, which means the default cascade + should be used - this default cascade is ``"save-update, merge"``. + + The available cascades are ``save-update``, ``merge``, + ``expunge``, ``delete``, ``delete-orphan``, and ``refresh-expire``. + An additional option, ``all`` indicates shorthand for + ``"save-update, merge, refresh-expire, + expunge, delete"``, and is often used as in ``"all, delete-orphan"`` + to indicate that related objects should follow along with the + parent object in all cases, and be deleted when de-associated. + + .. seealso:: + + :ref:`unitofwork_cascades` - Full detail on each of the available + cascade options. + + :ref:`tutorial_delete_cascade` - Tutorial example describing + a delete cascade. + + :param cascade_backrefs=True: + a boolean value indicating if the ``save-update`` cascade should + operate along an assignment event intercepted by a backref. + When set to ``False``, the attribute managed by this relationship + will not cascade an incoming transient object into the session of a + persistent parent, if the event is received via backref. + + .. seealso:: + + :ref:`backref_cascade` - Full discussion and examples on how + the :paramref:`~.relationship.cascade_backrefs` option is used. + + :param collection_class: + a class or callable that returns a new list-holding object. will + be used in place of a plain list for storing elements. + + .. seealso:: + + :ref:`custom_collections` - Introductory documentation and + examples. + + :param comparator_factory: + a class which extends :class:`.RelationshipProperty.Comparator` + which provides custom SQL clause generation for comparison + operations. + + .. seealso:: + + :class:`.PropComparator` - some detail on redefining comparators + at this level. + + :ref:`custom_comparators` - Brief intro to this feature. + + + :param distinct_target_key=None: + Indicate if a "subquery" eager load should apply the DISTINCT + keyword to the innermost SELECT statement. When left as ``None``, + the DISTINCT keyword will be applied in those cases when the target + columns do not comprise the full primary key of the target table. + When set to ``True``, the DISTINCT keyword is applied to the + innermost SELECT unconditionally. + + It may be desirable to set this flag to False when the DISTINCT is + reducing performance of the innermost subquery beyond that of what + duplicate innermost rows may be causing. + + .. versionadded:: 0.8.3 - + :paramref:`~.relationship.distinct_target_key` allows the + subquery eager loader to apply a DISTINCT modifier to the + innermost SELECT. + + .. versionchanged:: 0.9.0 - + :paramref:`~.relationship.distinct_target_key` now defaults to + ``None``, so that the feature enables itself automatically for + those cases where the innermost query targets a non-unique + key. + + .. seealso:: + + :ref:`loading_toplevel` - includes an introduction to subquery + eager loading. + + :param doc: + docstring which will be applied to the resulting descriptor. + + :param extension: + an :class:`.AttributeExtension` instance, or list of extensions, + which will be prepended to the list of attribute listeners for + the resulting descriptor placed on the class. + + .. deprecated:: 0.7 Please see :class:`.AttributeEvents`. + + :param foreign_keys: + + a list of columns which are to be used as "foreign key" + columns, or columns which refer to the value in a remote + column, within the context of this :func:`.relationship` + object's :paramref:`~.relationship.primaryjoin` condition. + That is, if the :paramref:`~.relationship.primaryjoin` + condition of this :func:`.relationship` is ``a.id == + b.a_id``, and the values in ``b.a_id`` are required to be + present in ``a.id``, then the "foreign key" column of this + :func:`.relationship` is ``b.a_id``. + + In normal cases, the :paramref:`~.relationship.foreign_keys` + parameter is **not required.** :func:`.relationship` will + automatically determine which columns in the + :paramref:`~.relationship.primaryjoin` conditition are to be + considered "foreign key" columns based on those + :class:`.Column` objects that specify :class:`.ForeignKey`, + or are otherwise listed as referencing columns in a + :class:`.ForeignKeyConstraint` construct. + :paramref:`~.relationship.foreign_keys` is only needed when: + + 1. There is more than one way to construct a join from the local + table to the remote table, as there are multiple foreign key + references present. Setting ``foreign_keys`` will limit the + :func:`.relationship` to consider just those columns specified + here as "foreign". + + .. versionchanged:: 0.8 + A multiple-foreign key join ambiguity can be resolved by + setting the :paramref:`~.relationship.foreign_keys` + parameter alone, without the need to explicitly set + :paramref:`~.relationship.primaryjoin` as well. + + 2. The :class:`.Table` being mapped does not actually have + :class:`.ForeignKey` or :class:`.ForeignKeyConstraint` + constructs present, often because the table + was reflected from a database that does not support foreign key + reflection (MySQL MyISAM). + + 3. The :paramref:`~.relationship.primaryjoin` argument is used to + construct a non-standard join condition, which makes use of + columns or expressions that do not normally refer to their + "parent" column, such as a join condition expressed by a + complex comparison using a SQL function. + + The :func:`.relationship` construct will raise informative + error messages that suggest the use of the + :paramref:`~.relationship.foreign_keys` parameter when + presented with an ambiguous condition. In typical cases, + if :func:`.relationship` doesn't raise any exceptions, the + :paramref:`~.relationship.foreign_keys` parameter is usually + not needed. + + :paramref:`~.relationship.foreign_keys` may also be passed as a + callable function which is evaluated at mapper initialization time, + and may be passed as a Python-evaluable string when using + Declarative. + + .. seealso:: + + :ref:`relationship_foreign_keys` + + :ref:`relationship_custom_foreign` + + :func:`.foreign` - allows direct annotation of the "foreign" + columns within a :paramref:`~.relationship.primaryjoin` condition. + + .. versionadded:: 0.8 + The :func:`.foreign` annotation can also be applied + directly to the :paramref:`~.relationship.primaryjoin` + expression, which is an alternate, more specific system of + describing which columns in a particular + :paramref:`~.relationship.primaryjoin` should be considered + "foreign". + + :param info: Optional data dictionary which will be populated into the + :attr:`.MapperProperty.info` attribute of this object. + + .. versionadded:: 0.8 + + :param innerjoin=False: + when ``True``, joined eager loads will use an inner join to join + against related tables instead of an outer join. The purpose + of this option is generally one of performance, as inner joins + generally perform better than outer joins. + + This flag can be set to ``True`` when the relationship references an + object via many-to-one using local foreign keys that are not + nullable, or when the reference is one-to-one or a collection that + is guaranteed to have one or at least one entry. + + The option supports the same "nested" and "unnested" options as + that of :paramref:`.joinedload.innerjoin`. See that flag + for details on nested / unnested behaviors. + + .. seealso:: + + :paramref:`.joinedload.innerjoin` - the option as specified by + loader option, including detail on nesting behavior. + + :ref:`what_kind_of_loading` - Discussion of some details of + various loader options. + + + :param join_depth: + when non-``None``, an integer value indicating how many levels + deep "eager" loaders should join on a self-referring or cyclical + relationship. The number counts how many times the same Mapper + shall be present in the loading condition along a particular join + branch. When left at its default of ``None``, eager loaders + will stop chaining when they encounter a the same target mapper + which is already higher up in the chain. This option applies + both to joined- and subquery- eager loaders. + + .. seealso:: + + :ref:`self_referential_eager_loading` - Introductory documentation + and examples. + + :param lazy='select': specifies + how the related items should be loaded. Default value is + ``select``. Values include: + + * ``select`` - items should be loaded lazily when the property is + first accessed, using a separate SELECT statement, or identity map + fetch for simple many-to-one references. + + * ``immediate`` - items should be loaded as the parents are loaded, + using a separate SELECT statement, or identity map fetch for + simple many-to-one references. + + * ``joined`` - items should be loaded "eagerly" in the same query as + that of the parent, using a JOIN or LEFT OUTER JOIN. Whether + the join is "outer" or not is determined by the + :paramref:`~.relationship.innerjoin` parameter. + + * ``subquery`` - items should be loaded "eagerly" as the parents are + loaded, using one additional SQL statement, which issues a JOIN to + a subquery of the original statement, for each collection + requested. + + * ``noload`` - no loading should occur at any time. This is to + support "write-only" attributes, or attributes which are + populated in some manner specific to the application. + + * ``dynamic`` - the attribute will return a pre-configured + :class:`.Query` object for all read + operations, onto which further filtering operations can be + applied before iterating the results. See + the section :ref:`dynamic_relationship` for more details. + + * True - a synonym for 'select' + + * False - a synonym for 'joined' + + * None - a synonym for 'noload' + + .. seealso:: + + :doc:`/orm/loading_relationships` - Full documentation on relationship loader + configuration. + + :ref:`dynamic_relationship` - detail on the ``dynamic`` option. + + :param load_on_pending=False: + Indicates loading behavior for transient or pending parent objects. + + When set to ``True``, causes the lazy-loader to + issue a query for a parent object that is not persistent, meaning it + has never been flushed. This may take effect for a pending object + when autoflush is disabled, or for a transient object that has been + "attached" to a :class:`.Session` but is not part of its pending + collection. + + The :paramref:`~.relationship.load_on_pending` flag does not improve + behavior when the ORM is used normally - object references should be + constructed at the object level, not at the foreign key level, so + that they are present in an ordinary way before a flush proceeds. + This flag is not not intended for general use. + + .. seealso:: + + :meth:`.Session.enable_relationship_loading` - this method + establishes "load on pending" behavior for the whole object, and + also allows loading on objects that remain transient or + detached. + + :param order_by: + indicates the ordering that should be applied when loading these + items. :paramref:`~.relationship.order_by` is expected to refer to + one of the :class:`.Column` objects to which the target class is + mapped, or the attribute itself bound to the target class which + refers to the column. + + :paramref:`~.relationship.order_by` may also be passed as a callable + function which is evaluated at mapper initialization time, and may + be passed as a Python-evaluable string when using Declarative. + + :param passive_deletes=False: + Indicates loading behavior during delete operations. + + A value of True indicates that unloaded child items should not + be loaded during a delete operation on the parent. Normally, + when a parent item is deleted, all child items are loaded so + that they can either be marked as deleted, or have their + foreign key to the parent set to NULL. Marking this flag as + True usually implies an ON DELETE rule is in + place which will handle updating/deleting child rows on the + database side. + + Additionally, setting the flag to the string value 'all' will + disable the "nulling out" of the child foreign keys, when there + is no delete or delete-orphan cascade enabled. This is + typically used when a triggering or error raise scenario is in + place on the database side. Note that the foreign key + attributes on in-session child objects will not be changed + after a flush occurs so this is a very special use-case + setting. + + .. seealso:: + + :ref:`passive_deletes` - Introductory documentation + and examples. + + :param passive_updates=True: + Indicates the persistence behavior to take when a referenced + primary key value changes in place, indicating that the referencing + foreign key columns will also need their value changed. + + When True, it is assumed that ``ON UPDATE CASCADE`` is configured on + the foreign key in the database, and that the database will + handle propagation of an UPDATE from a source column to + dependent rows. When False, the SQLAlchemy :func:`.relationship` + construct will attempt to emit its own UPDATE statements to + modify related targets. However note that SQLAlchemy **cannot** + emit an UPDATE for more than one level of cascade. Also, + setting this flag to False is not compatible in the case where + the database is in fact enforcing referential integrity, unless + those constraints are explicitly "deferred", if the target backend + supports it. + + It is highly advised that an application which is employing + mutable primary keys keeps ``passive_updates`` set to True, + and instead uses the referential integrity features of the database + itself in order to handle the change efficiently and fully. + + .. seealso:: + + :ref:`passive_updates` - Introductory documentation and + examples. + + :paramref:`.mapper.passive_updates` - a similar flag which + takes effect for joined-table inheritance mappings. + + :param post_update: + this indicates that the relationship should be handled by a + second UPDATE statement after an INSERT or before a + DELETE. Currently, it also will issue an UPDATE after the + instance was UPDATEd as well, although this technically should + be improved. This flag is used to handle saving bi-directional + dependencies between two individual rows (i.e. each row + references the other), where it would otherwise be impossible to + INSERT or DELETE both rows fully since one row exists before the + other. Use this flag when a particular mapping arrangement will + incur two rows that are dependent on each other, such as a table + that has a one-to-many relationship to a set of child rows, and + also has a column that references a single child row within that + list (i.e. both tables contain a foreign key to each other). If + a flush operation returns an error that a "cyclical + dependency" was detected, this is a cue that you might want to + use :paramref:`~.relationship.post_update` to "break" the cycle. + + .. seealso:: + + :ref:`post_update` - Introductory documentation and examples. + + :param primaryjoin: + a SQL expression that will be used as the primary + join of this child object against the parent object, or in a + many-to-many relationship the join of the primary object to the + association table. By default, this value is computed based on the + foreign key relationships of the parent and child tables (or + association table). + + :paramref:`~.relationship.primaryjoin` may also be passed as a + callable function which is evaluated at mapper initialization time, + and may be passed as a Python-evaluable string when using + Declarative. + + .. seealso:: + + :ref:`relationship_primaryjoin` + + :param remote_side: + used for self-referential relationships, indicates the column or + list of columns that form the "remote side" of the relationship. + + :paramref:`.relationship.remote_side` may also be passed as a + callable function which is evaluated at mapper initialization time, + and may be passed as a Python-evaluable string when using + Declarative. + + .. versionchanged:: 0.8 + The :func:`.remote` annotation can also be applied + directly to the ``primaryjoin`` expression, which is an + alternate, more specific system of describing which columns in a + particular ``primaryjoin`` should be considered "remote". + + .. seealso:: + + :ref:`self_referential` - in-depth explanation of how + :paramref:`~.relationship.remote_side` + is used to configure self-referential relationships. + + :func:`.remote` - an annotation function that accomplishes the + same purpose as :paramref:`~.relationship.remote_side`, typically + when a custom :paramref:`~.relationship.primaryjoin` condition + is used. + + :param query_class: + a :class:`.Query` subclass that will be used as the base of the + "appender query" returned by a "dynamic" relationship, that + is, a relationship that specifies ``lazy="dynamic"`` or was + otherwise constructed using the :func:`.orm.dynamic_loader` + function. + + .. seealso:: + + :ref:`dynamic_relationship` - Introduction to "dynamic" + relationship loaders. + + :param secondaryjoin: + a SQL expression that will be used as the join of + an association table to the child object. By default, this value is + computed based on the foreign key relationships of the association + and child tables. + + :paramref:`~.relationship.secondaryjoin` may also be passed as a + callable function which is evaluated at mapper initialization time, + and may be passed as a Python-evaluable string when using + Declarative. + + .. seealso:: + + :ref:`relationship_primaryjoin` + + :param single_parent: + when True, installs a validator which will prevent objects + from being associated with more than one parent at a time. + This is used for many-to-one or many-to-many relationships that + should be treated either as one-to-one or one-to-many. Its usage + is optional, except for :func:`.relationship` constructs which + are many-to-one or many-to-many and also + specify the ``delete-orphan`` cascade option. The + :func:`.relationship` construct itself will raise an error + instructing when this option is required. + + .. seealso:: + + :ref:`unitofwork_cascades` - includes detail on when the + :paramref:`~.relationship.single_parent` flag may be appropriate. + + :param uselist: + a boolean that indicates if this property should be loaded as a + list or a scalar. In most cases, this value is determined + automatically by :func:`.relationship` at mapper configuration + time, based on the type and direction + of the relationship - one to many forms a list, many to one + forms a scalar, many to many is a list. If a scalar is desired + where normally a list would be present, such as a bi-directional + one-to-one relationship, set :paramref:`~.relationship.uselist` to + False. + + The :paramref:`~.relationship.uselist` flag is also available on an + existing :func:`.relationship` construct as a read-only attribute, + which can be used to determine if this :func:`.relationship` deals + with collections or scalar attributes:: + + >>> User.addresses.property.uselist + True + + .. seealso:: + + :ref:`relationships_one_to_one` - Introduction to the "one to + one" relationship pattern, which is typically when the + :paramref:`~.relationship.uselist` flag is needed. + + :param viewonly=False: + when set to True, the relationship is used only for loading objects, + and not for any persistence operation. A :func:`.relationship` + which specifies :paramref:`~.relationship.viewonly` can work + with a wider range of SQL operations within the + :paramref:`~.relationship.primaryjoin` condition, including + operations that feature the use of a variety of comparison operators + as well as SQL functions such as :func:`~.sql.expression.cast`. The + :paramref:`~.relationship.viewonly` flag is also of general use when + defining any kind of :func:`~.relationship` that doesn't represent + the full set of related objects, to prevent modifications of the + collection from resulting in persistence operations. + + + """ + super(RelationshipProperty, self).__init__() + + self.uselist = uselist + self.argument = argument + self.secondary = secondary + self.primaryjoin = primaryjoin + self.secondaryjoin = secondaryjoin + self.post_update = post_update + self.direction = None + self.viewonly = viewonly + self.lazy = lazy + self.single_parent = single_parent + self._user_defined_foreign_keys = foreign_keys + self.collection_class = collection_class + self.passive_deletes = passive_deletes + self.cascade_backrefs = cascade_backrefs + self.passive_updates = passive_updates + self.remote_side = remote_side + self.enable_typechecks = enable_typechecks + self.query_class = query_class + self.innerjoin = innerjoin + self.distinct_target_key = distinct_target_key + self.doc = doc + self.active_history = active_history + self.join_depth = join_depth + self.local_remote_pairs = _local_remote_pairs + self.extension = extension + self.bake_queries = bake_queries + self.load_on_pending = load_on_pending + self.comparator_factory = comparator_factory or \ + RelationshipProperty.Comparator + self.comparator = self.comparator_factory(self, None) + util.set_creation_order(self) + + if info is not None: + self.info = info + + if strategy_class: + self.strategy_class = strategy_class + else: + self.strategy_class = self._strategy_lookup(("lazy", self.lazy)) + + self._reverse_property = set() + + self.cascade = cascade if cascade is not False \ + else "save-update, merge" + + self.order_by = order_by + + self.back_populates = back_populates + + if self.back_populates: + if backref: + raise sa_exc.ArgumentError( + "backref and back_populates keyword arguments " + "are mutually exclusive") + self.backref = None + else: + self.backref = backref + + def instrument_class(self, mapper): + attributes.register_descriptor( + mapper.class_, + self.key, + comparator=self.comparator_factory(self, mapper), + parententity=mapper, + doc=self.doc, + ) + + class Comparator(PropComparator): + """Produce boolean, comparison, and other operators for + :class:`.RelationshipProperty` attributes. + + See the documentation for :class:`.PropComparator` for a brief + overview of ORM level operator definition. + + See also: + + :class:`.PropComparator` + + :class:`.ColumnProperty.Comparator` + + :class:`.ColumnOperators` + + :ref:`types_operators` + + :attr:`.TypeEngine.comparator_factory` + + """ + + _of_type = None + + def __init__( + self, prop, parentmapper, adapt_to_entity=None, of_type=None): + """Construction of :class:`.RelationshipProperty.Comparator` + is internal to the ORM's attribute mechanics. + + """ + self.prop = prop + self._parententity = parentmapper + self._adapt_to_entity = adapt_to_entity + if of_type: + self._of_type = of_type + + def adapt_to_entity(self, adapt_to_entity): + return self.__class__(self.property, self._parententity, + adapt_to_entity=adapt_to_entity, + of_type=self._of_type) + + @util.memoized_property + def mapper(self): + """The target :class:`.Mapper` referred to by this + :class:`.RelationshipProperty.Comparator`. + + This is the "target" or "remote" side of the + :func:`.relationship`. + + """ + return self.property.mapper + + @util.memoized_property + def _parententity(self): + return self.property.parent + + def _source_selectable(self): + if self._adapt_to_entity: + return self._adapt_to_entity.selectable + else: + return self.property.parent._with_polymorphic_selectable + + def __clause_element__(self): + adapt_from = self._source_selectable() + if self._of_type: + of_type = inspect(self._of_type).mapper + else: + of_type = None + + pj, sj, source, dest, \ + secondary, target_adapter = self.property._create_joins( + source_selectable=adapt_from, + source_polymorphic=True, + of_type=of_type) + if sj is not None: + return pj & sj + else: + return pj + + def of_type(self, cls): + """Produce a construct that represents a particular 'subtype' of + attribute for the parent class. + + Currently this is usable in conjunction with :meth:`.Query.join` + and :meth:`.Query.outerjoin`. + + """ + return RelationshipProperty.Comparator( + self.property, + self._parententity, + adapt_to_entity=self._adapt_to_entity, + of_type=cls) + + def in_(self, other): + """Produce an IN clause - this is not implemented + for :func:`~.orm.relationship`-based attributes at this time. + + """ + raise NotImplementedError('in_() not yet supported for ' + 'relationships. For a simple ' + 'many-to-one, use in_() against ' + 'the set of foreign key values.') + + __hash__ = None + + def __eq__(self, other): + """Implement the ``==`` operator. + + In a many-to-one context, such as:: + + MyClass.some_prop == + + this will typically produce a + clause such as:: + + mytable.related_id == + + Where ```` is the primary key of the given + object. + + The ``==`` operator provides partial functionality for non- + many-to-one comparisons: + + * Comparisons against collections are not supported. + Use :meth:`~.RelationshipProperty.Comparator.contains`. + * Compared to a scalar one-to-many, will produce a + clause that compares the target columns in the parent to + the given target. + * Compared to a scalar many-to-many, an alias + of the association table will be rendered as + well, forming a natural join that is part of the + main body of the query. This will not work for + queries that go beyond simple AND conjunctions of + comparisons, such as those which use OR. Use + explicit joins, outerjoins, or + :meth:`~.RelationshipProperty.Comparator.has` for + more comprehensive non-many-to-one scalar + membership tests. + * Comparisons against ``None`` given in a one-to-many + or many-to-many context produce a NOT EXISTS clause. + + """ + if isinstance(other, (util.NoneType, expression.Null)): + if self.property.direction in [ONETOMANY, MANYTOMANY]: + return ~self._criterion_exists() + else: + return _orm_annotate(self.property._optimized_compare( + None, adapt_source=self.adapter)) + elif self.property.uselist: + raise sa_exc.InvalidRequestError( + "Can't compare a collection to an object or collection; " + "use contains() to test for membership.") + else: + return _orm_annotate( + self.property._optimized_compare( + other, adapt_source=self.adapter)) + + def _criterion_exists(self, criterion=None, **kwargs): + if getattr(self, '_of_type', None): + info = inspect(self._of_type) + target_mapper, to_selectable, is_aliased_class = \ + info.mapper, info.selectable, info.is_aliased_class + if self.property._is_self_referential and not \ + is_aliased_class: + to_selectable = to_selectable.alias() + + single_crit = target_mapper._single_table_criterion + if single_crit is not None: + if criterion is not None: + criterion = single_crit & criterion + else: + criterion = single_crit + else: + is_aliased_class = False + to_selectable = None + + if self.adapter: + source_selectable = self._source_selectable() + else: + source_selectable = None + + pj, sj, source, dest, secondary, target_adapter = \ + self.property._create_joins( + dest_polymorphic=True, + dest_selectable=to_selectable, + source_selectable=source_selectable) + + for k in kwargs: + crit = getattr(self.property.mapper.class_, k) == kwargs[k] + if criterion is None: + criterion = crit + else: + criterion = criterion & crit + + # annotate the *local* side of the join condition, in the case + # of pj + sj this is the full primaryjoin, in the case of just + # pj its the local side of the primaryjoin. + if sj is not None: + j = _orm_annotate(pj) & sj + else: + j = _orm_annotate(pj, exclude=self.property.remote_side) + + if criterion is not None and target_adapter and not \ + is_aliased_class: + # limit this adapter to annotated only? + criterion = target_adapter.traverse(criterion) + + # only have the "joined left side" of what we + # return be subject to Query adaption. The right + # side of it is used for an exists() subquery and + # should not correlate or otherwise reach out + # to anything in the enclosing query. + if criterion is not None: + criterion = criterion._annotate( + {'no_replacement_traverse': True}) + + crit = j & sql.True_._ifnone(criterion) + + ex = sql.exists([1], crit, from_obj=dest).correlate_except(dest) + if secondary is not None: + ex = ex.correlate_except(secondary) + return ex + + def any(self, criterion=None, **kwargs): + """Produce an expression that tests a collection against + particular criterion, using EXISTS. + + An expression like:: + + session.query(MyClass).filter( + MyClass.somereference.any(SomeRelated.x==2) + ) + + + Will produce a query like:: + + SELECT * FROM my_table WHERE + EXISTS (SELECT 1 FROM related WHERE related.my_id=my_table.id + AND related.x=2) + + Because :meth:`~.RelationshipProperty.Comparator.any` uses + a correlated subquery, its performance is not nearly as + good when compared against large target tables as that of + using a join. + + :meth:`~.RelationshipProperty.Comparator.any` is particularly + useful for testing for empty collections:: + + session.query(MyClass).filter( + ~MyClass.somereference.any() + ) + + will produce:: + + SELECT * FROM my_table WHERE + NOT EXISTS (SELECT 1 FROM related WHERE + related.my_id=my_table.id) + + :meth:`~.RelationshipProperty.Comparator.any` is only + valid for collections, i.e. a :func:`.relationship` + that has ``uselist=True``. For scalar references, + use :meth:`~.RelationshipProperty.Comparator.has`. + + """ + if not self.property.uselist: + raise sa_exc.InvalidRequestError( + "'any()' not implemented for scalar " + "attributes. Use has()." + ) + + return self._criterion_exists(criterion, **kwargs) + + def has(self, criterion=None, **kwargs): + """Produce an expression that tests a scalar reference against + particular criterion, using EXISTS. + + An expression like:: + + session.query(MyClass).filter( + MyClass.somereference.has(SomeRelated.x==2) + ) + + + Will produce a query like:: + + SELECT * FROM my_table WHERE + EXISTS (SELECT 1 FROM related WHERE + related.id==my_table.related_id AND related.x=2) + + Because :meth:`~.RelationshipProperty.Comparator.has` uses + a correlated subquery, its performance is not nearly as + good when compared against large target tables as that of + using a join. + + :meth:`~.RelationshipProperty.Comparator.has` is only + valid for scalar references, i.e. a :func:`.relationship` + that has ``uselist=False``. For collection references, + use :meth:`~.RelationshipProperty.Comparator.any`. + + """ + if self.property.uselist: + raise sa_exc.InvalidRequestError( + "'has()' not implemented for collections. " + "Use any().") + return self._criterion_exists(criterion, **kwargs) + + def contains(self, other, **kwargs): + """Return a simple expression that tests a collection for + containment of a particular item. + + :meth:`~.RelationshipProperty.Comparator.contains` is + only valid for a collection, i.e. a + :func:`~.orm.relationship` that implements + one-to-many or many-to-many with ``uselist=True``. + + When used in a simple one-to-many context, an + expression like:: + + MyClass.contains(other) + + Produces a clause like:: + + mytable.id == + + Where ```` is the value of the foreign key + attribute on ``other`` which refers to the primary + key of its parent object. From this it follows that + :meth:`~.RelationshipProperty.Comparator.contains` is + very useful when used with simple one-to-many + operations. + + For many-to-many operations, the behavior of + :meth:`~.RelationshipProperty.Comparator.contains` + has more caveats. The association table will be + rendered in the statement, producing an "implicit" + join, that is, includes multiple tables in the FROM + clause which are equated in the WHERE clause:: + + query(MyClass).filter(MyClass.contains(other)) + + Produces a query like:: + + SELECT * FROM my_table, my_association_table AS + my_association_table_1 WHERE + my_table.id = my_association_table_1.parent_id + AND my_association_table_1.child_id = + + Where ```` would be the primary key of + ``other``. From the above, it is clear that + :meth:`~.RelationshipProperty.Comparator.contains` + will **not** work with many-to-many collections when + used in queries that move beyond simple AND + conjunctions, such as multiple + :meth:`~.RelationshipProperty.Comparator.contains` + expressions joined by OR. In such cases subqueries or + explicit "outer joins" will need to be used instead. + See :meth:`~.RelationshipProperty.Comparator.any` for + a less-performant alternative using EXISTS, or refer + to :meth:`.Query.outerjoin` as well as :ref:`ormtutorial_joins` + for more details on constructing outer joins. + + """ + if not self.property.uselist: + raise sa_exc.InvalidRequestError( + "'contains' not implemented for scalar " + "attributes. Use ==") + clause = self.property._optimized_compare( + other, adapt_source=self.adapter) + + if self.property.secondaryjoin is not None: + clause.negation_clause = \ + self.__negated_contains_or_equals(other) + + return clause + + def __negated_contains_or_equals(self, other): + if self.property.direction == MANYTOONE: + state = attributes.instance_state(other) + + def state_bindparam(x, state, col): + dict_ = state.dict + return sql.bindparam( + x, unique=True, + callable_=self.property._get_attr_w_warn_on_none( + col, + self.property.mapper._get_state_attr_by_column, + state, dict_, col, passive=attributes.PASSIVE_OFF + ) + ) + + def adapt(col): + if self.adapter: + return self.adapter(col) + else: + return col + + if self.property._use_get: + return sql.and_(*[ + sql.or_( + adapt(x) != state_bindparam(adapt(x), state, y), + adapt(x) == None) + for (x, y) in self.property.local_remote_pairs]) + + criterion = sql.and_(*[ + x == y for (x, y) in + zip( + self.property.mapper.primary_key, + self.property.mapper.primary_key_from_instance(other) + ) + ]) + + return ~self._criterion_exists(criterion) + + def __ne__(self, other): + """Implement the ``!=`` operator. + + In a many-to-one context, such as:: + + MyClass.some_prop != + + This will typically produce a clause such as:: + + mytable.related_id != + + Where ```` is the primary key of the + given object. + + The ``!=`` operator provides partial functionality for non- + many-to-one comparisons: + + * Comparisons against collections are not supported. + Use + :meth:`~.RelationshipProperty.Comparator.contains` + in conjunction with :func:`~.expression.not_`. + * Compared to a scalar one-to-many, will produce a + clause that compares the target columns in the parent to + the given target. + * Compared to a scalar many-to-many, an alias + of the association table will be rendered as + well, forming a natural join that is part of the + main body of the query. This will not work for + queries that go beyond simple AND conjunctions of + comparisons, such as those which use OR. Use + explicit joins, outerjoins, or + :meth:`~.RelationshipProperty.Comparator.has` in + conjunction with :func:`~.expression.not_` for + more comprehensive non-many-to-one scalar + membership tests. + * Comparisons against ``None`` given in a one-to-many + or many-to-many context produce an EXISTS clause. + + """ + if isinstance(other, (util.NoneType, expression.Null)): + if self.property.direction == MANYTOONE: + return _orm_annotate(~self.property._optimized_compare( + None, adapt_source=self.adapter)) + + else: + return self._criterion_exists() + elif self.property.uselist: + raise sa_exc.InvalidRequestError( + "Can't compare a collection" + " to an object or collection; use " + "contains() to test for membership.") + else: + return _orm_annotate(self.__negated_contains_or_equals(other)) + + @util.memoized_property + def property(self): + if mapperlib.Mapper._new_mappers: + mapperlib.Mapper._configure_all() + return self.prop + + def _with_parent(self, instance, alias_secondary=True): + assert instance is not None + return self._optimized_compare( + instance, value_is_parent=True, alias_secondary=alias_secondary) + + def _optimized_compare(self, state, value_is_parent=False, + adapt_source=None, + alias_secondary=True): + if state is not None: + state = attributes.instance_state(state) + + reverse_direction = not value_is_parent + + if state is None: + return self._lazy_none_clause( + reverse_direction, + adapt_source=adapt_source) + + if not reverse_direction: + criterion, bind_to_col = \ + self._lazy_strategy._lazywhere, \ + self._lazy_strategy._bind_to_col + else: + criterion, bind_to_col = \ + self._lazy_strategy._rev_lazywhere, \ + self._lazy_strategy._rev_bind_to_col + + if reverse_direction: + mapper = self.mapper + else: + mapper = self.parent + + dict_ = attributes.instance_dict(state.obj()) + + def visit_bindparam(bindparam): + if bindparam._identifying_key in bind_to_col: + bindparam.callable = self._get_attr_w_warn_on_none( + bind_to_col[bindparam._identifying_key], + mapper._get_state_attr_by_column, + state, dict_, + bind_to_col[bindparam._identifying_key], + passive=attributes.PASSIVE_OFF) + + if self.secondary is not None and alias_secondary: + criterion = ClauseAdapter( + self.secondary.alias()).\ + traverse(criterion) + + criterion = visitors.cloned_traverse( + criterion, {}, {'bindparam': visit_bindparam}) + + if adapt_source: + criterion = adapt_source(criterion) + return criterion + + def _get_attr_w_warn_on_none(self, column, fn, *arg, **kw): + def _go(): + value = fn(*arg, **kw) + if value is None: + util.warn( + "Got None for value of column %s; this is unsupported " + "for a relationship comparison and will not " + "currently produce an IS comparison " + "(but may in a future release)" % column) + return value + return _go + + def _lazy_none_clause(self, reverse_direction=False, adapt_source=None): + if not reverse_direction: + criterion, bind_to_col = \ + self._lazy_strategy._lazywhere, \ + self._lazy_strategy._bind_to_col + else: + criterion, bind_to_col = \ + self._lazy_strategy._rev_lazywhere, \ + self._lazy_strategy._rev_bind_to_col + + criterion = adapt_criterion_to_null(criterion, bind_to_col) + + if adapt_source: + criterion = adapt_source(criterion) + return criterion + + def __str__(self): + return str(self.parent.class_.__name__) + "." + self.key + + def merge(self, + session, + source_state, + source_dict, + dest_state, + dest_dict, + load, _recursive): + + if load: + for r in self._reverse_property: + if (source_state, r) in _recursive: + return + + if "merge" not in self._cascade: + return + + if self.key not in source_dict: + return + + if self.uselist: + instances = source_state.get_impl(self.key).\ + get(source_state, source_dict) + if hasattr(instances, '_sa_adapter'): + # convert collections to adapters to get a true iterator + instances = instances._sa_adapter + + if load: + # for a full merge, pre-load the destination collection, + # so that individual _merge of each item pulls from identity + # map for those already present. + # also assumes CollectionAttrbiuteImpl behavior of loading + # "old" list in any case + dest_state.get_impl(self.key).get(dest_state, dest_dict) + + dest_list = [] + for current in instances: + current_state = attributes.instance_state(current) + current_dict = attributes.instance_dict(current) + _recursive[(current_state, self)] = True + obj = session._merge(current_state, current_dict, + load=load, _recursive=_recursive) + if obj is not None: + dest_list.append(obj) + + if not load: + coll = attributes.init_state_collection(dest_state, + dest_dict, self.key) + for c in dest_list: + coll.append_without_event(c) + else: + dest_state.get_impl(self.key)._set_iterable( + dest_state, dest_dict, dest_list) + else: + current = source_dict[self.key] + if current is not None: + current_state = attributes.instance_state(current) + current_dict = attributes.instance_dict(current) + _recursive[(current_state, self)] = True + obj = session._merge(current_state, current_dict, + load=load, _recursive=_recursive) + else: + obj = None + + if not load: + dest_dict[self.key] = obj + else: + dest_state.get_impl(self.key).set(dest_state, + dest_dict, obj, None) + + def _value_as_iterable(self, state, dict_, key, + passive=attributes.PASSIVE_OFF): + """Return a list of tuples (state, obj) for the given + key. + + returns an empty list if the value is None/empty/PASSIVE_NO_RESULT + """ + + impl = state.manager[key].impl + x = impl.get(state, dict_, passive=passive) + if x is attributes.PASSIVE_NO_RESULT or x is None: + return [] + elif hasattr(impl, 'get_collection'): + return [ + (attributes.instance_state(o), o) for o in + impl.get_collection(state, dict_, x, passive=passive) + ] + else: + return [(attributes.instance_state(x), x)] + + def cascade_iterator(self, type_, state, dict_, + visited_states, halt_on=None): + # assert type_ in self._cascade + + # only actively lazy load on the 'delete' cascade + if type_ != 'delete' or self.passive_deletes: + passive = attributes.PASSIVE_NO_INITIALIZE + else: + passive = attributes.PASSIVE_OFF + + if type_ == 'save-update': + tuples = state.manager[self.key].impl.\ + get_all_pending(state, dict_) + + else: + tuples = self._value_as_iterable(state, dict_, self.key, + passive=passive) + + skip_pending = type_ == 'refresh-expire' and 'delete-orphan' \ + not in self._cascade + + for instance_state, c in tuples: + if instance_state in visited_states: + continue + + if c is None: + # would like to emit a warning here, but + # would not be consistent with collection.append(None) + # current behavior of silently skipping. + # see [ticket:2229] + continue + + instance_dict = attributes.instance_dict(c) + + if halt_on and halt_on(instance_state): + continue + + if skip_pending and not instance_state.key: + continue + + instance_mapper = instance_state.manager.mapper + + if not instance_mapper.isa(self.mapper.class_manager.mapper): + raise AssertionError("Attribute '%s' on class '%s' " + "doesn't handle objects " + "of type '%s'" % ( + self.key, + self.parent.class_, + c.__class__ + )) + + visited_states.add(instance_state) + + yield c, instance_mapper, instance_state, instance_dict + + def _add_reverse_property(self, key): + other = self.mapper.get_property(key, _configure_mappers=False) + self._reverse_property.add(other) + other._reverse_property.add(self) + + if not other.mapper.common_parent(self.parent): + raise sa_exc.ArgumentError( + 'reverse_property %r on ' + 'relationship %s references relationship %s, which ' + 'does not reference mapper %s' % + (key, self, other, self.parent)) + + if self.direction in (ONETOMANY, MANYTOONE) and self.direction \ + == other.direction: + raise sa_exc.ArgumentError( + '%s and back-reference %s are ' + 'both of the same direction %r. Did you mean to ' + 'set remote_side on the many-to-one side ?' % + (other, self, self.direction)) + + @util.memoized_property + def mapper(self): + """Return the targeted :class:`.Mapper` for this + :class:`.RelationshipProperty`. + + This is a lazy-initializing static attribute. + + """ + if util.callable(self.argument) and \ + not isinstance(self.argument, (type, mapperlib.Mapper)): + argument = self.argument() + else: + argument = self.argument + + if isinstance(argument, type): + mapper_ = mapperlib.class_mapper(argument, + configure=False) + elif isinstance(self.argument, mapperlib.Mapper): + mapper_ = argument + else: + raise sa_exc.ArgumentError( + "relationship '%s' expects " + "a class or a mapper argument (received: %s)" + % (self.key, type(argument))) + return mapper_ + + @util.memoized_property + @util.deprecated("0.7", "Use .target") + def table(self): + """Return the selectable linked to this + :class:`.RelationshipProperty` object's target + :class:`.Mapper`. + """ + return self.target + + def do_init(self): + self._check_conflicts() + self._process_dependent_arguments() + self._setup_join_conditions() + self._check_cascade_settings(self._cascade) + self._post_init() + self._generate_backref() + self._join_condition._warn_for_conflicting_sync_targets() + super(RelationshipProperty, self).do_init() + self._lazy_strategy = self._get_strategy((("lazy", "select"),)) + + def _process_dependent_arguments(self): + """Convert incoming configuration arguments to their + proper form. + + Callables are resolved, ORM annotations removed. + + """ + # accept callables for other attributes which may require + # deferred initialization. This technique is used + # by declarative "string configs" and some recipes. + for attr in ( + 'order_by', 'primaryjoin', 'secondaryjoin', + 'secondary', '_user_defined_foreign_keys', 'remote_side', + ): + attr_value = getattr(self, attr) + if util.callable(attr_value): + setattr(self, attr, attr_value()) + + # remove "annotations" which are present if mapped class + # descriptors are used to create the join expression. + for attr in 'primaryjoin', 'secondaryjoin': + val = getattr(self, attr) + if val is not None: + setattr(self, attr, _orm_deannotate( + expression._only_column_elements(val, attr)) + ) + + # ensure expressions in self.order_by, foreign_keys, + # remote_side are all columns, not strings. + if self.order_by is not False and self.order_by is not None: + self.order_by = [ + expression._only_column_elements(x, "order_by") + for x in + util.to_list(self.order_by)] + + self._user_defined_foreign_keys = \ + util.column_set( + expression._only_column_elements(x, "foreign_keys") + for x in util.to_column_set( + self._user_defined_foreign_keys + )) + + self.remote_side = \ + util.column_set( + expression._only_column_elements(x, "remote_side") + for x in + util.to_column_set(self.remote_side)) + + self.target = self.mapper.mapped_table + + def _setup_join_conditions(self): + self._join_condition = jc = JoinCondition( + parent_selectable=self.parent.mapped_table, + child_selectable=self.mapper.mapped_table, + parent_local_selectable=self.parent.local_table, + child_local_selectable=self.mapper.local_table, + primaryjoin=self.primaryjoin, + secondary=self.secondary, + secondaryjoin=self.secondaryjoin, + parent_equivalents=self.parent._equivalent_columns, + child_equivalents=self.mapper._equivalent_columns, + consider_as_foreign_keys=self._user_defined_foreign_keys, + local_remote_pairs=self.local_remote_pairs, + remote_side=self.remote_side, + self_referential=self._is_self_referential, + prop=self, + support_sync=not self.viewonly, + can_be_synced_fn=self._columns_are_mapped + ) + self.primaryjoin = jc.deannotated_primaryjoin + self.secondaryjoin = jc.deannotated_secondaryjoin + self.direction = jc.direction + self.local_remote_pairs = jc.local_remote_pairs + self.remote_side = jc.remote_columns + self.local_columns = jc.local_columns + self.synchronize_pairs = jc.synchronize_pairs + self._calculated_foreign_keys = jc.foreign_key_columns + self.secondary_synchronize_pairs = jc.secondary_synchronize_pairs + + def _check_conflicts(self): + """Test that this relationship is legal, warn about + inheritance conflicts.""" + + if self.parent.non_primary and not mapperlib.class_mapper( + self.parent.class_, + configure=False).has_property(self.key): + raise sa_exc.ArgumentError( + "Attempting to assign a new " + "relationship '%s' to a non-primary mapper on " + "class '%s'. New relationships can only be added " + "to the primary mapper, i.e. the very first mapper " + "created for class '%s' " % + (self.key, self.parent.class_.__name__, + self.parent.class_.__name__)) + + # check for conflicting relationship() on superclass + if not self.parent.concrete: + for inheriting in self.parent.iterate_to_root(): + if inheriting is not self.parent \ + and inheriting.has_property(self.key): + util.warn("Warning: relationship '%s' on mapper " + "'%s' supersedes the same relationship " + "on inherited mapper '%s'; this can " + "cause dependency issues during flush" + % (self.key, self.parent, inheriting)) + + def _get_cascade(self): + """Return the current cascade setting for this + :class:`.RelationshipProperty`. + """ + return self._cascade + + def _set_cascade(self, cascade): + cascade = CascadeOptions(cascade) + if 'mapper' in self.__dict__: + self._check_cascade_settings(cascade) + self._cascade = cascade + + if self._dependency_processor: + self._dependency_processor.cascade = cascade + + cascade = property(_get_cascade, _set_cascade) + + def _check_cascade_settings(self, cascade): + if cascade.delete_orphan and not self.single_parent \ + and (self.direction is MANYTOMANY or self.direction + is MANYTOONE): + raise sa_exc.ArgumentError( + 'On %s, delete-orphan cascade is not supported ' + 'on a many-to-many or many-to-one relationship ' + 'when single_parent is not set. Set ' + 'single_parent=True on the relationship().' + % self) + if self.direction is MANYTOONE and self.passive_deletes: + util.warn("On %s, 'passive_deletes' is normally configured " + "on one-to-many, one-to-one, many-to-many " + "relationships only." + % self) + + if self.passive_deletes == 'all' and \ + ("delete" in cascade or + "delete-orphan" in cascade): + raise sa_exc.ArgumentError( + "On %s, can't set passive_deletes='all' in conjunction " + "with 'delete' or 'delete-orphan' cascade" % self) + + if cascade.delete_orphan: + self.mapper.primary_mapper()._delete_orphans.append( + (self.key, self.parent.class_) + ) + + def _columns_are_mapped(self, *cols): + """Return True if all columns in the given collection are + mapped by the tables referenced by this :class:`.Relationship`. + + """ + for c in cols: + if self.secondary is not None \ + and self.secondary.c.contains_column(c): + continue + if not self.parent.mapped_table.c.contains_column(c) and \ + not self.target.c.contains_column(c): + return False + return True + + def _generate_backref(self): + """Interpret the 'backref' instruction to create a + :func:`.relationship` complementary to this one.""" + + if self.parent.non_primary: + return + if self.backref is not None and not self.back_populates: + if isinstance(self.backref, util.string_types): + backref_key, kwargs = self.backref, {} + else: + backref_key, kwargs = self.backref + mapper = self.mapper.primary_mapper() + + check = set(mapper.iterate_to_root()).\ + union(mapper.self_and_descendants) + for m in check: + if m.has_property(backref_key): + raise sa_exc.ArgumentError( + "Error creating backref " + "'%s' on relationship '%s': property of that " + "name exists on mapper '%s'" % + (backref_key, self, m)) + + # determine primaryjoin/secondaryjoin for the + # backref. Use the one we had, so that + # a custom join doesn't have to be specified in + # both directions. + if self.secondary is not None: + # for many to many, just switch primaryjoin/ + # secondaryjoin. use the annotated + # pj/sj on the _join_condition. + pj = kwargs.pop( + 'primaryjoin', + self._join_condition.secondaryjoin_minus_local) + sj = kwargs.pop( + 'secondaryjoin', + self._join_condition.primaryjoin_minus_local) + else: + pj = kwargs.pop( + 'primaryjoin', + self._join_condition.primaryjoin_reverse_remote) + sj = kwargs.pop('secondaryjoin', None) + if sj: + raise sa_exc.InvalidRequestError( + "Can't assign 'secondaryjoin' on a backref " + "against a non-secondary relationship." + ) + + foreign_keys = kwargs.pop('foreign_keys', + self._user_defined_foreign_keys) + parent = self.parent.primary_mapper() + kwargs.setdefault('viewonly', self.viewonly) + kwargs.setdefault('post_update', self.post_update) + kwargs.setdefault('passive_updates', self.passive_updates) + self.back_populates = backref_key + relationship = RelationshipProperty( + parent, self.secondary, + pj, sj, + foreign_keys=foreign_keys, + back_populates=self.key, + **kwargs) + mapper._configure_property(backref_key, relationship) + + if self.back_populates: + self._add_reverse_property(self.back_populates) + + def _post_init(self): + if self.uselist is None: + self.uselist = self.direction is not MANYTOONE + if not self.viewonly: + self._dependency_processor = \ + dependency.DependencyProcessor.from_relationship(self) + + @util.memoized_property + def _use_get(self): + """memoize the 'use_get' attribute of this RelationshipLoader's + lazyloader.""" + + strategy = self._lazy_strategy + return strategy.use_get + + @util.memoized_property + def _is_self_referential(self): + return self.mapper.common_parent(self.parent) + + def _create_joins(self, source_polymorphic=False, + source_selectable=None, dest_polymorphic=False, + dest_selectable=None, of_type=None): + if source_selectable is None: + if source_polymorphic and self.parent.with_polymorphic: + source_selectable = self.parent._with_polymorphic_selectable + + aliased = False + if dest_selectable is None: + if dest_polymorphic and self.mapper.with_polymorphic: + dest_selectable = self.mapper._with_polymorphic_selectable + aliased = True + else: + dest_selectable = self.mapper.mapped_table + + if self._is_self_referential and source_selectable is None: + dest_selectable = dest_selectable.alias() + aliased = True + else: + aliased = True + + dest_mapper = of_type or self.mapper + + single_crit = dest_mapper._single_table_criterion + aliased = aliased or (source_selectable is not None) + + primaryjoin, secondaryjoin, secondary, target_adapter, dest_selectable = \ + self._join_condition.join_targets( + source_selectable, dest_selectable, aliased, single_crit + ) + if source_selectable is None: + source_selectable = self.parent.local_table + if dest_selectable is None: + dest_selectable = self.mapper.local_table + return (primaryjoin, secondaryjoin, source_selectable, + dest_selectable, secondary, target_adapter) + + +def _annotate_columns(element, annotations): + def clone(elem): + if isinstance(elem, expression.ColumnClause): + elem = elem._annotate(annotations.copy()) + elem._copy_internals(clone=clone) + return elem + + if element is not None: + element = clone(element) + return element + + +class JoinCondition(object): + def __init__(self, + parent_selectable, + child_selectable, + parent_local_selectable, + child_local_selectable, + primaryjoin=None, + secondary=None, + secondaryjoin=None, + parent_equivalents=None, + child_equivalents=None, + consider_as_foreign_keys=None, + local_remote_pairs=None, + remote_side=None, + self_referential=False, + prop=None, + support_sync=True, + can_be_synced_fn=lambda *c: True + ): + self.parent_selectable = parent_selectable + self.parent_local_selectable = parent_local_selectable + self.child_selectable = child_selectable + self.child_local_selectable = child_local_selectable + self.parent_equivalents = parent_equivalents + self.child_equivalents = child_equivalents + self.primaryjoin = primaryjoin + self.secondaryjoin = secondaryjoin + self.secondary = secondary + self.consider_as_foreign_keys = consider_as_foreign_keys + self._local_remote_pairs = local_remote_pairs + self._remote_side = remote_side + self.prop = prop + self.self_referential = self_referential + self.support_sync = support_sync + self.can_be_synced_fn = can_be_synced_fn + self._determine_joins() + self._annotate_fks() + self._annotate_remote() + self._annotate_local() + self._setup_pairs() + self._check_foreign_cols(self.primaryjoin, True) + if self.secondaryjoin is not None: + self._check_foreign_cols(self.secondaryjoin, False) + self._determine_direction() + self._check_remote_side() + self._log_joins() + + def _log_joins(self): + if self.prop is None: + return + log = self.prop.logger + log.info('%s setup primary join %s', self.prop, + self.primaryjoin) + log.info('%s setup secondary join %s', self.prop, + self.secondaryjoin) + log.info('%s synchronize pairs [%s]', self.prop, + ','.join('(%s => %s)' % (l, r) for (l, r) in + self.synchronize_pairs)) + log.info('%s secondary synchronize pairs [%s]', self.prop, + ','.join('(%s => %s)' % (l, r) for (l, r) in + self.secondary_synchronize_pairs or [])) + log.info('%s local/remote pairs [%s]', self.prop, + ','.join('(%s / %s)' % (l, r) for (l, r) in + self.local_remote_pairs)) + log.info('%s remote columns [%s]', self.prop, + ','.join('%s' % col for col in self.remote_columns) + ) + log.info('%s local columns [%s]', self.prop, + ','.join('%s' % col for col in self.local_columns) + ) + log.info('%s relationship direction %s', self.prop, + self.direction) + + def _determine_joins(self): + """Determine the 'primaryjoin' and 'secondaryjoin' attributes, + if not passed to the constructor already. + + This is based on analysis of the foreign key relationships + between the parent and target mapped selectables. + + """ + if self.secondaryjoin is not None and self.secondary is None: + raise sa_exc.ArgumentError( + "Property %s specified with secondary " + "join condition but " + "no secondary argument" % self.prop) + + # find a join between the given mapper's mapped table and + # the given table. will try the mapper's local table first + # for more specificity, then if not found will try the more + # general mapped table, which in the case of inheritance is + # a join. + try: + consider_as_foreign_keys = self.consider_as_foreign_keys or None + if self.secondary is not None: + if self.secondaryjoin is None: + self.secondaryjoin = \ + join_condition( + self.child_selectable, + self.secondary, + a_subset=self.child_local_selectable, + consider_as_foreign_keys=consider_as_foreign_keys + ) + if self.primaryjoin is None: + self.primaryjoin = \ + join_condition( + self.parent_selectable, + self.secondary, + a_subset=self.parent_local_selectable, + consider_as_foreign_keys=consider_as_foreign_keys + ) + else: + if self.primaryjoin is None: + self.primaryjoin = \ + join_condition( + self.parent_selectable, + self.child_selectable, + a_subset=self.parent_local_selectable, + consider_as_foreign_keys=consider_as_foreign_keys + ) + except sa_exc.NoForeignKeysError: + if self.secondary is not None: + raise sa_exc.NoForeignKeysError( + "Could not determine join " + "condition between parent/child tables on " + "relationship %s - there are no foreign keys " + "linking these tables via secondary table '%s'. " + "Ensure that referencing columns are associated " + "with a ForeignKey or ForeignKeyConstraint, or " + "specify 'primaryjoin' and 'secondaryjoin' " + "expressions." % (self.prop, self.secondary)) + else: + raise sa_exc.NoForeignKeysError( + "Could not determine join " + "condition between parent/child tables on " + "relationship %s - there are no foreign keys " + "linking these tables. " + "Ensure that referencing columns are associated " + "with a ForeignKey or ForeignKeyConstraint, or " + "specify a 'primaryjoin' expression." % self.prop) + except sa_exc.AmbiguousForeignKeysError: + if self.secondary is not None: + raise sa_exc.AmbiguousForeignKeysError( + "Could not determine join " + "condition between parent/child tables on " + "relationship %s - there are multiple foreign key " + "paths linking the tables via secondary table '%s'. " + "Specify the 'foreign_keys' " + "argument, providing a list of those columns which " + "should be counted as containing a foreign key " + "reference from the secondary table to each of the " + "parent and child tables." + % (self.prop, self.secondary)) + else: + raise sa_exc.AmbiguousForeignKeysError( + "Could not determine join " + "condition between parent/child tables on " + "relationship %s - there are multiple foreign key " + "paths linking the tables. Specify the " + "'foreign_keys' argument, providing a list of those " + "columns which should be counted as containing a " + "foreign key reference to the parent table." + % self.prop) + + @property + def primaryjoin_minus_local(self): + return _deep_deannotate(self.primaryjoin, values=("local", "remote")) + + @property + def secondaryjoin_minus_local(self): + return _deep_deannotate(self.secondaryjoin, + values=("local", "remote")) + + @util.memoized_property + def primaryjoin_reverse_remote(self): + """Return the primaryjoin condition suitable for the + "reverse" direction. + + If the primaryjoin was delivered here with pre-existing + "remote" annotations, the local/remote annotations + are reversed. Otherwise, the local/remote annotations + are removed. + + """ + if self._has_remote_annotations: + def replace(element): + if "remote" in element._annotations: + v = element._annotations.copy() + del v['remote'] + v['local'] = True + return element._with_annotations(v) + elif "local" in element._annotations: + v = element._annotations.copy() + del v['local'] + v['remote'] = True + return element._with_annotations(v) + return visitors.replacement_traverse( + self.primaryjoin, {}, replace) + else: + if self._has_foreign_annotations: + # TODO: coverage + return _deep_deannotate(self.primaryjoin, + values=("local", "remote")) + else: + return _deep_deannotate(self.primaryjoin) + + def _has_annotation(self, clause, annotation): + for col in visitors.iterate(clause, {}): + if annotation in col._annotations: + return True + else: + return False + + @util.memoized_property + def _has_foreign_annotations(self): + return self._has_annotation(self.primaryjoin, "foreign") + + @util.memoized_property + def _has_remote_annotations(self): + return self._has_annotation(self.primaryjoin, "remote") + + def _annotate_fks(self): + """Annotate the primaryjoin and secondaryjoin + structures with 'foreign' annotations marking columns + considered as foreign. + + """ + if self._has_foreign_annotations: + return + + if self.consider_as_foreign_keys: + self._annotate_from_fk_list() + else: + self._annotate_present_fks() + + def _annotate_from_fk_list(self): + def check_fk(col): + if col in self.consider_as_foreign_keys: + return col._annotate({"foreign": True}) + self.primaryjoin = visitors.replacement_traverse( + self.primaryjoin, + {}, + check_fk + ) + if self.secondaryjoin is not None: + self.secondaryjoin = visitors.replacement_traverse( + self.secondaryjoin, + {}, + check_fk + ) + + def _annotate_present_fks(self): + if self.secondary is not None: + secondarycols = util.column_set(self.secondary.c) + else: + secondarycols = set() + + def is_foreign(a, b): + if isinstance(a, schema.Column) and \ + isinstance(b, schema.Column): + if a.references(b): + return a + elif b.references(a): + return b + + if secondarycols: + if a in secondarycols and b not in secondarycols: + return a + elif b in secondarycols and a not in secondarycols: + return b + + def visit_binary(binary): + if not isinstance(binary.left, sql.ColumnElement) or \ + not isinstance(binary.right, sql.ColumnElement): + return + + if "foreign" not in binary.left._annotations and \ + "foreign" not in binary.right._annotations: + col = is_foreign(binary.left, binary.right) + if col is not None: + if col.compare(binary.left): + binary.left = binary.left._annotate( + {"foreign": True}) + elif col.compare(binary.right): + binary.right = binary.right._annotate( + {"foreign": True}) + + self.primaryjoin = visitors.cloned_traverse( + self.primaryjoin, + {}, + {"binary": visit_binary} + ) + if self.secondaryjoin is not None: + self.secondaryjoin = visitors.cloned_traverse( + self.secondaryjoin, + {}, + {"binary": visit_binary} + ) + + def _refers_to_parent_table(self): + """Return True if the join condition contains column + comparisons where both columns are in both tables. + + """ + pt = self.parent_selectable + mt = self.child_selectable + result = [False] + + def visit_binary(binary): + c, f = binary.left, binary.right + if ( + isinstance(c, expression.ColumnClause) and + isinstance(f, expression.ColumnClause) and + pt.is_derived_from(c.table) and + pt.is_derived_from(f.table) and + mt.is_derived_from(c.table) and + mt.is_derived_from(f.table) + ): + result[0] = True + visitors.traverse( + self.primaryjoin, + {}, + {"binary": visit_binary} + ) + return result[0] + + def _tables_overlap(self): + """Return True if parent/child tables have some overlap.""" + + return selectables_overlap( + self.parent_selectable, self.child_selectable) + + def _annotate_remote(self): + """Annotate the primaryjoin and secondaryjoin + structures with 'remote' annotations marking columns + considered as part of the 'remote' side. + + """ + if self._has_remote_annotations: + return + + if self.secondary is not None: + self._annotate_remote_secondary() + elif self._local_remote_pairs or self._remote_side: + self._annotate_remote_from_args() + elif self._refers_to_parent_table(): + self._annotate_selfref(lambda col: "foreign" in col._annotations, False) + elif self._tables_overlap(): + self._annotate_remote_with_overlap() + else: + self._annotate_remote_distinct_selectables() + + def _annotate_remote_secondary(self): + """annotate 'remote' in primaryjoin, secondaryjoin + when 'secondary' is present. + + """ + def repl(element): + if self.secondary.c.contains_column(element): + return element._annotate({"remote": True}) + self.primaryjoin = visitors.replacement_traverse( + self.primaryjoin, {}, repl) + self.secondaryjoin = visitors.replacement_traverse( + self.secondaryjoin, {}, repl) + + def _annotate_selfref(self, fn, remote_side_given): + """annotate 'remote' in primaryjoin, secondaryjoin + when the relationship is detected as self-referential. + + """ + def visit_binary(binary): + equated = binary.left.compare(binary.right) + if isinstance(binary.left, expression.ColumnClause) and \ + isinstance(binary.right, expression.ColumnClause): + # assume one to many - FKs are "remote" + if fn(binary.left): + binary.left = binary.left._annotate({"remote": True}) + if fn(binary.right) and not equated: + binary.right = binary.right._annotate( + {"remote": True}) + elif not remote_side_given: + self._warn_non_column_elements() + + self.primaryjoin = visitors.cloned_traverse( + self.primaryjoin, {}, + {"binary": visit_binary}) + + def _annotate_remote_from_args(self): + """annotate 'remote' in primaryjoin, secondaryjoin + when the 'remote_side' or '_local_remote_pairs' + arguments are used. + + """ + if self._local_remote_pairs: + if self._remote_side: + raise sa_exc.ArgumentError( + "remote_side argument is redundant " + "against more detailed _local_remote_side " + "argument.") + + remote_side = [r for (l, r) in self._local_remote_pairs] + else: + remote_side = self._remote_side + + if self._refers_to_parent_table(): + self._annotate_selfref(lambda col: col in remote_side, True) + else: + def repl(element): + if element in remote_side: + return element._annotate({"remote": True}) + self.primaryjoin = visitors.replacement_traverse( + self.primaryjoin, {}, repl) + + def _annotate_remote_with_overlap(self): + """annotate 'remote' in primaryjoin, secondaryjoin + when the parent/child tables have some set of + tables in common, though is not a fully self-referential + relationship. + + """ + def visit_binary(binary): + binary.left, binary.right = proc_left_right(binary.left, + binary.right) + binary.right, binary.left = proc_left_right(binary.right, + binary.left) + + check_entities = self.prop is not None and \ + self.prop.mapper is not self.prop.parent + + def proc_left_right(left, right): + if isinstance(left, expression.ColumnClause) and \ + isinstance(right, expression.ColumnClause): + if self.child_selectable.c.contains_column(right) and \ + self.parent_selectable.c.contains_column(left): + right = right._annotate({"remote": True}) + elif check_entities and \ + right._annotations.get('parentmapper') is self.prop.mapper: + right = right._annotate({"remote": True}) + elif check_entities and \ + left._annotations.get('parentmapper') is self.prop.mapper: + left = left._annotate({"remote": True}) + else: + self._warn_non_column_elements() + + return left, right + + self.primaryjoin = visitors.cloned_traverse( + self.primaryjoin, {}, + {"binary": visit_binary}) + + def _annotate_remote_distinct_selectables(self): + """annotate 'remote' in primaryjoin, secondaryjoin + when the parent/child tables are entirely + separate. + + """ + def repl(element): + if self.child_selectable.c.contains_column(element) and \ + (not self.parent_local_selectable.c. + contains_column(element) or + self.child_local_selectable.c. + contains_column(element)): + return element._annotate({"remote": True}) + self.primaryjoin = visitors.replacement_traverse( + self.primaryjoin, {}, repl) + + def _warn_non_column_elements(self): + util.warn( + "Non-simple column elements in primary " + "join condition for property %s - consider using " + "remote() annotations to mark the remote side." + % self.prop + ) + + def _annotate_local(self): + """Annotate the primaryjoin and secondaryjoin + structures with 'local' annotations. + + This annotates all column elements found + simultaneously in the parent table + and the join condition that don't have a + 'remote' annotation set up from + _annotate_remote() or user-defined. + + """ + if self._has_annotation(self.primaryjoin, "local"): + return + + if self._local_remote_pairs: + local_side = util.column_set([l for (l, r) + in self._local_remote_pairs]) + else: + local_side = util.column_set(self.parent_selectable.c) + + def locals_(elem): + if "remote" not in elem._annotations and \ + elem in local_side: + return elem._annotate({"local": True}) + self.primaryjoin = visitors.replacement_traverse( + self.primaryjoin, {}, locals_ + ) + + def _check_remote_side(self): + if not self.local_remote_pairs: + raise sa_exc.ArgumentError( + 'Relationship %s could ' + 'not determine any unambiguous local/remote column ' + 'pairs based on join condition and remote_side ' + 'arguments. ' + 'Consider using the remote() annotation to ' + 'accurately mark those elements of the join ' + 'condition that are on the remote side of ' + 'the relationship.' % (self.prop, )) + + def _check_foreign_cols(self, join_condition, primary): + """Check the foreign key columns collected and emit error + messages.""" + + can_sync = False + + foreign_cols = self._gather_columns_with_annotation( + join_condition, "foreign") + + has_foreign = bool(foreign_cols) + + if primary: + can_sync = bool(self.synchronize_pairs) + else: + can_sync = bool(self.secondary_synchronize_pairs) + + if self.support_sync and can_sync or \ + (not self.support_sync and has_foreign): + return + + # from here below is just determining the best error message + # to report. Check for a join condition using any operator + # (not just ==), perhaps they need to turn on "viewonly=True". + if self.support_sync and has_foreign and not can_sync: + err = "Could not locate any simple equality expressions "\ + "involving locally mapped foreign key columns for "\ + "%s join condition "\ + "'%s' on relationship %s." % ( + primary and 'primary' or 'secondary', + join_condition, + self.prop + ) + err += \ + " Ensure that referencing columns are associated "\ + "with a ForeignKey or ForeignKeyConstraint, or are "\ + "annotated in the join condition with the foreign() "\ + "annotation. To allow comparison operators other than "\ + "'==', the relationship can be marked as viewonly=True." + + raise sa_exc.ArgumentError(err) + else: + err = "Could not locate any relevant foreign key columns "\ + "for %s join condition '%s' on relationship %s." % ( + primary and 'primary' or 'secondary', + join_condition, + self.prop + ) + err += \ + ' Ensure that referencing columns are associated '\ + 'with a ForeignKey or ForeignKeyConstraint, or are '\ + 'annotated in the join condition with the foreign() '\ + 'annotation.' + raise sa_exc.ArgumentError(err) + + def _determine_direction(self): + """Determine if this relationship is one to many, many to one, + many to many. + + """ + if self.secondaryjoin is not None: + self.direction = MANYTOMANY + else: + parentcols = util.column_set(self.parent_selectable.c) + targetcols = util.column_set(self.child_selectable.c) + + # fk collection which suggests ONETOMANY. + onetomany_fk = targetcols.intersection( + self.foreign_key_columns) + + # fk collection which suggests MANYTOONE. + + manytoone_fk = parentcols.intersection( + self.foreign_key_columns) + + if onetomany_fk and manytoone_fk: + # fks on both sides. test for overlap of local/remote + # with foreign key. + # we will gather columns directly from their annotations + # without deannotating, so that we can distinguish on a column + # that refers to itself. + + # 1. columns that are both remote and FK suggest + # onetomany. + onetomany_local = self._gather_columns_with_annotation( + self.primaryjoin, "remote", "foreign") + + # 2. columns that are FK but are not remote (e.g. local) + # suggest manytoone. + manytoone_local = set([c for c in + self._gather_columns_with_annotation( + self.primaryjoin, + "foreign") + if "remote" not in c._annotations]) + + # 3. if both collections are present, remove columns that + # refer to themselves. This is for the case of + # and_(Me.id == Me.remote_id, Me.version == Me.version) + if onetomany_local and manytoone_local: + self_equated = self.remote_columns.intersection( + self.local_columns + ) + onetomany_local = onetomany_local.difference(self_equated) + manytoone_local = manytoone_local.difference(self_equated) + + # at this point, if only one or the other collection is + # present, we know the direction, otherwise it's still + # ambiguous. + + if onetomany_local and not manytoone_local: + self.direction = ONETOMANY + elif manytoone_local and not onetomany_local: + self.direction = MANYTOONE + else: + raise sa_exc.ArgumentError( + "Can't determine relationship" + " direction for relationship '%s' - foreign " + "key columns within the join condition are present " + "in both the parent and the child's mapped tables. " + "Ensure that only those columns referring " + "to a parent column are marked as foreign, " + "either via the foreign() annotation or " + "via the foreign_keys argument." % self.prop) + elif onetomany_fk: + self.direction = ONETOMANY + elif manytoone_fk: + self.direction = MANYTOONE + else: + raise sa_exc.ArgumentError( + "Can't determine relationship " + "direction for relationship '%s' - foreign " + "key columns are present in neither the parent " + "nor the child's mapped tables" % self.prop) + + def _deannotate_pairs(self, collection): + """provide deannotation for the various lists of + pairs, so that using them in hashes doesn't incur + high-overhead __eq__() comparisons against + original columns mapped. + + """ + return [(x._deannotate(), y._deannotate()) + for x, y in collection] + + def _setup_pairs(self): + sync_pairs = [] + lrp = util.OrderedSet([]) + secondary_sync_pairs = [] + + def go(joincond, collection): + def visit_binary(binary, left, right): + if "remote" in right._annotations and \ + "remote" not in left._annotations and \ + self.can_be_synced_fn(left): + lrp.add((left, right)) + elif "remote" in left._annotations and \ + "remote" not in right._annotations and \ + self.can_be_synced_fn(right): + lrp.add((right, left)) + if binary.operator is operators.eq and \ + self.can_be_synced_fn(left, right): + if "foreign" in right._annotations: + collection.append((left, right)) + elif "foreign" in left._annotations: + collection.append((right, left)) + visit_binary_product(visit_binary, joincond) + + for joincond, collection in [ + (self.primaryjoin, sync_pairs), + (self.secondaryjoin, secondary_sync_pairs) + ]: + if joincond is None: + continue + go(joincond, collection) + + self.local_remote_pairs = self._deannotate_pairs(lrp) + self.synchronize_pairs = self._deannotate_pairs(sync_pairs) + self.secondary_synchronize_pairs = \ + self._deannotate_pairs(secondary_sync_pairs) + + _track_overlapping_sync_targets = weakref.WeakKeyDictionary() + + def _warn_for_conflicting_sync_targets(self): + if not self.support_sync: + return + + # we would like to detect if we are synchronizing any column + # pairs in conflict with another relationship that wishes to sync + # an entirely different column to the same target. This is a + # very rare edge case so we will try to minimize the memory/overhead + # impact of this check + for from_, to_ in [ + (from_, to_) for (from_, to_) in self.synchronize_pairs + ] + [ + (from_, to_) for (from_, to_) in self.secondary_synchronize_pairs + ]: + # save ourselves a ton of memory and overhead by only + # considering columns that are subject to a overlapping + # FK constraints at the core level. This condition can arise + # if multiple relationships overlap foreign() directly, but + # we're going to assume it's typically a ForeignKeyConstraint- + # level configuration that benefits from this warning. + if len(to_.foreign_keys) < 2: + continue + + if to_ not in self._track_overlapping_sync_targets: + self._track_overlapping_sync_targets[to_] = \ + weakref.WeakKeyDictionary({self.prop: from_}) + else: + other_props = [] + prop_to_from = self._track_overlapping_sync_targets[to_] + for pr, fr_ in prop_to_from.items(): + if pr.mapper in mapperlib._mapper_registry and \ + fr_ is not from_ and \ + pr not in self.prop._reverse_property: + other_props.append((pr, fr_)) + + if other_props: + util.warn( + "relationship '%s' will copy column %s to column %s, " + "which conflicts with relationship(s): %s. " + "Consider applying " + "viewonly=True to read-only relationships, or provide " + "a primaryjoin condition marking writable columns " + "with the foreign() annotation." % ( + self.prop, + from_, to_, + ", ".join( + "'%s' (copies %s to %s)" % (pr, fr_, to_) + for (pr, fr_) in other_props) + ) + ) + self._track_overlapping_sync_targets[to_][self.prop] = from_ + + @util.memoized_property + def remote_columns(self): + return self._gather_join_annotations("remote") + + @util.memoized_property + def local_columns(self): + return self._gather_join_annotations("local") + + @util.memoized_property + def foreign_key_columns(self): + return self._gather_join_annotations("foreign") + + @util.memoized_property + def deannotated_primaryjoin(self): + return _deep_deannotate(self.primaryjoin) + + @util.memoized_property + def deannotated_secondaryjoin(self): + if self.secondaryjoin is not None: + return _deep_deannotate(self.secondaryjoin) + else: + return None + + def _gather_join_annotations(self, annotation): + s = set( + self._gather_columns_with_annotation( + self.primaryjoin, annotation) + ) + if self.secondaryjoin is not None: + s.update( + self._gather_columns_with_annotation( + self.secondaryjoin, annotation) + ) + return set([x._deannotate() for x in s]) + + def _gather_columns_with_annotation(self, clause, *annotation): + annotation = set(annotation) + return set([ + col for col in visitors.iterate(clause, {}) + if annotation.issubset(col._annotations) + ]) + + def join_targets(self, source_selectable, + dest_selectable, + aliased, + single_crit=None): + """Given a source and destination selectable, create a + join between them. + + This takes into account aliasing the join clause + to reference the appropriate corresponding columns + in the target objects, as well as the extra child + criterion, equivalent column sets, etc. + + """ + + # place a barrier on the destination such that + # replacement traversals won't ever dig into it. + # its internal structure remains fixed + # regardless of context. + dest_selectable = _shallow_annotate( + dest_selectable, + {'no_replacement_traverse': True}) + + primaryjoin, secondaryjoin, secondary = self.primaryjoin, \ + self.secondaryjoin, self.secondary + + # adjust the join condition for single table inheritance, + # in the case that the join is to a subclass + # this is analogous to the + # "_adjust_for_single_table_inheritance()" method in Query. + + if single_crit is not None: + if secondaryjoin is not None: + secondaryjoin = secondaryjoin & single_crit + else: + primaryjoin = primaryjoin & single_crit + + if aliased: + if secondary is not None: + secondary = secondary.alias(flat=True) + primary_aliasizer = ClauseAdapter(secondary) + secondary_aliasizer = \ + ClauseAdapter(dest_selectable, + equivalents=self.child_equivalents).\ + chain(primary_aliasizer) + if source_selectable is not None: + primary_aliasizer = \ + ClauseAdapter(secondary).\ + chain(ClauseAdapter( + source_selectable, + equivalents=self.parent_equivalents)) + secondaryjoin = \ + secondary_aliasizer.traverse(secondaryjoin) + else: + primary_aliasizer = ClauseAdapter( + dest_selectable, + exclude_fn=_ColInAnnotations("local"), + equivalents=self.child_equivalents) + if source_selectable is not None: + primary_aliasizer.chain( + ClauseAdapter(source_selectable, + exclude_fn=_ColInAnnotations("remote"), + equivalents=self.parent_equivalents)) + secondary_aliasizer = None + + primaryjoin = primary_aliasizer.traverse(primaryjoin) + target_adapter = secondary_aliasizer or primary_aliasizer + target_adapter.exclude_fn = None + else: + target_adapter = None + return primaryjoin, secondaryjoin, secondary, \ + target_adapter, dest_selectable + + def create_lazy_clause(self, reverse_direction=False): + binds = util.column_dict() + equated_columns = util.column_dict() + + has_secondary = self.secondaryjoin is not None + + if has_secondary: + lookup = collections.defaultdict(list) + for l, r in self.local_remote_pairs: + lookup[l].append((l, r)) + equated_columns[r] = l + elif not reverse_direction: + for l, r in self.local_remote_pairs: + equated_columns[r] = l + else: + for l, r in self.local_remote_pairs: + equated_columns[l] = r + + def col_to_bind(col): + + if ( + (not reverse_direction and 'local' in col._annotations) or + reverse_direction and ( + (has_secondary and col in lookup) or + (not has_secondary and 'remote' in col._annotations) + ) + ): + if col not in binds: + binds[col] = sql.bindparam( + None, None, type_=col.type, unique=True) + return binds[col] + return None + + lazywhere = self.primaryjoin + if self.secondaryjoin is None or not reverse_direction: + lazywhere = visitors.replacement_traverse( + lazywhere, {}, col_to_bind) + + if self.secondaryjoin is not None: + secondaryjoin = self.secondaryjoin + if reverse_direction: + secondaryjoin = visitors.replacement_traverse( + secondaryjoin, {}, col_to_bind) + lazywhere = sql.and_(lazywhere, secondaryjoin) + + bind_to_col = dict((binds[col].key, col) for col in binds) + + # this is probably not necessary + lazywhere = _deep_deannotate(lazywhere) + + return lazywhere, bind_to_col, equated_columns + + +class _ColInAnnotations(object): + """Seralizable equivalent to: + + lambda c: "name" in c._annotations + """ + + def __init__(self, name): + self.name = name + + def __call__(self, c): + return self.name in c._annotations diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/scoping.py b/lib/python3.4/site-packages/sqlalchemy/orm/scoping.py new file mode 100644 index 0000000..6306514 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/orm/scoping.py @@ -0,0 +1,184 @@ +# orm/scoping.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from .. import exc as sa_exc +from ..util import ScopedRegistry, ThreadLocalRegistry, warn +from . import class_mapper, exc as orm_exc +from .session import Session + + +__all__ = ['scoped_session'] + + +class scoped_session(object): + """Provides scoped management of :class:`.Session` objects. + + See :ref:`unitofwork_contextual` for a tutorial. + + """ + + session_factory = None + """The `session_factory` provided to `__init__` is stored in this + attribute and may be accessed at a later time. This can be useful when + a new non-scoped :class:`.Session` or :class:`.Connection` to the + database is needed.""" + + def __init__(self, session_factory, scopefunc=None): + """Construct a new :class:`.scoped_session`. + + :param session_factory: a factory to create new :class:`.Session` + instances. This is usually, but not necessarily, an instance + of :class:`.sessionmaker`. + :param scopefunc: optional function which defines + the current scope. If not passed, the :class:`.scoped_session` + object assumes "thread-local" scope, and will use + a Python ``threading.local()`` in order to maintain the current + :class:`.Session`. If passed, the function should return + a hashable token; this token will be used as the key in a + dictionary in order to store and retrieve the current + :class:`.Session`. + + """ + self.session_factory = session_factory + + if scopefunc: + self.registry = ScopedRegistry(session_factory, scopefunc) + else: + self.registry = ThreadLocalRegistry(session_factory) + + def __call__(self, **kw): + """Return the current :class:`.Session`, creating it + using the :attr:`.scoped_session.session_factory` if not present. + + :param \**kw: Keyword arguments will be passed to the + :attr:`.scoped_session.session_factory` callable, if an existing + :class:`.Session` is not present. If the :class:`.Session` is present + and keyword arguments have been passed, + :exc:`~sqlalchemy.exc.InvalidRequestError` is raised. + + """ + if kw: + scope = kw.pop('scope', False) + if scope is not None: + if self.registry.has(): + raise sa_exc.InvalidRequestError( + "Scoped session is already present; " + "no new arguments may be specified.") + else: + sess = self.session_factory(**kw) + self.registry.set(sess) + return sess + else: + return self.session_factory(**kw) + else: + return self.registry() + + def remove(self): + """Dispose of the current :class:`.Session`, if present. + + This will first call :meth:`.Session.close` method + on the current :class:`.Session`, which releases any existing + transactional/connection resources still being held; transactions + specifically are rolled back. The :class:`.Session` is then + discarded. Upon next usage within the same scope, + the :class:`.scoped_session` will produce a new + :class:`.Session` object. + + """ + + if self.registry.has(): + self.registry().close() + self.registry.clear() + + def configure(self, **kwargs): + """reconfigure the :class:`.sessionmaker` used by this + :class:`.scoped_session`. + + See :meth:`.sessionmaker.configure`. + + """ + + if self.registry.has(): + warn('At least one scoped session is already present. ' + ' configure() can not affect sessions that have ' + 'already been created.') + + self.session_factory.configure(**kwargs) + + def query_property(self, query_cls=None): + """return a class property which produces a :class:`.Query` object + against the class and the current :class:`.Session` when called. + + e.g.:: + + Session = scoped_session(sessionmaker()) + + class MyClass(object): + query = Session.query_property() + + # after mappers are defined + result = MyClass.query.filter(MyClass.name=='foo').all() + + Produces instances of the session's configured query class by + default. To override and use a custom implementation, provide + a ``query_cls`` callable. The callable will be invoked with + the class's mapper as a positional argument and a session + keyword argument. + + There is no limit to the number of query properties placed on + a class. + + """ + class query(object): + def __get__(s, instance, owner): + try: + mapper = class_mapper(owner) + if mapper: + if query_cls: + # custom query class + return query_cls(mapper, session=self.registry()) + else: + # session's configured query class + return self.registry().query(mapper) + except orm_exc.UnmappedClassError: + return None + return query() + +ScopedSession = scoped_session +"""Old name for backwards compatibility.""" + + +def instrument(name): + def do(self, *args, **kwargs): + return getattr(self.registry(), name)(*args, **kwargs) + return do + +for meth in Session.public_methods: + setattr(scoped_session, meth, instrument(meth)) + + +def makeprop(name): + def set(self, attr): + setattr(self.registry(), name, attr) + + def get(self): + return getattr(self.registry(), name) + + return property(get, set) + +for prop in ('bind', 'dirty', 'deleted', 'new', 'identity_map', + 'is_active', 'autoflush', 'no_autoflush', 'info'): + setattr(scoped_session, prop, makeprop(prop)) + + +def clslevel(name): + def do(cls, *args, **kwargs): + return getattr(Session, name)(*args, **kwargs) + return classmethod(do) + +for prop in ('close_all', 'object_session', 'identity_key'): + setattr(scoped_session, prop, clslevel(prop)) diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/session.py b/lib/python3.4/site-packages/sqlalchemy/orm/session.py new file mode 100644 index 0000000..000441f --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/orm/session.py @@ -0,0 +1,2820 @@ +# orm/session.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +"""Provides the Session class and related utilities.""" + + +import weakref +from .. import util, sql, engine, exc as sa_exc +from ..sql import util as sql_util, expression +from . import ( + SessionExtension, attributes, exc, query, + loading, identity +) +from ..inspection import inspect +from .base import ( + object_mapper, class_mapper, + _class_to_mapper, _state_mapper, object_state, + _none_set, state_str, instance_str +) +import itertools +from . import persistence +from .unitofwork import UOWTransaction +from . import state as statelib +import sys + +__all__ = ['Session', 'SessionTransaction', + 'SessionExtension', 'sessionmaker'] + +_sessions = weakref.WeakValueDictionary() +"""Weak-referencing dictionary of :class:`.Session` objects. +""" + + +def _state_session(state): + """Given an :class:`.InstanceState`, return the :class:`.Session` + associated, if any. + """ + if state.session_id: + try: + return _sessions[state.session_id] + except KeyError: + pass + return None + + +class _SessionClassMethods(object): + """Class-level methods for :class:`.Session`, :class:`.sessionmaker`.""" + + @classmethod + def close_all(cls): + """Close *all* sessions in memory.""" + + for sess in _sessions.values(): + sess.close() + + @classmethod + @util.dependencies("sqlalchemy.orm.util") + def identity_key(cls, orm_util, *args, **kwargs): + """Return an identity key. + + This is an alias of :func:`.util.identity_key`. + + """ + return orm_util.identity_key(*args, **kwargs) + + @classmethod + def object_session(cls, instance): + """Return the :class:`.Session` to which an object belongs. + + This is an alias of :func:`.object_session`. + + """ + + return object_session(instance) + + +ACTIVE = util.symbol('ACTIVE') +PREPARED = util.symbol('PREPARED') +COMMITTED = util.symbol('COMMITTED') +DEACTIVE = util.symbol('DEACTIVE') +CLOSED = util.symbol('CLOSED') + + +class SessionTransaction(object): + """A :class:`.Session`-level transaction. + + :class:`.SessionTransaction` is a mostly behind-the-scenes object + not normally referenced directly by application code. It coordinates + among multiple :class:`.Connection` objects, maintaining a database + transaction for each one individually, committing or rolling them + back all at once. It also provides optional two-phase commit behavior + which can augment this coordination operation. + + The :attr:`.Session.transaction` attribute of :class:`.Session` + refers to the current :class:`.SessionTransaction` object in use, if any. + + + A :class:`.SessionTransaction` is associated with a :class:`.Session` + in its default mode of ``autocommit=False`` immediately, associated + with no database connections. As the :class:`.Session` is called upon + to emit SQL on behalf of various :class:`.Engine` or :class:`.Connection` + objects, a corresponding :class:`.Connection` and associated + :class:`.Transaction` is added to a collection within the + :class:`.SessionTransaction` object, becoming one of the + connection/transaction pairs maintained by the + :class:`.SessionTransaction`. + + The lifespan of the :class:`.SessionTransaction` ends when the + :meth:`.Session.commit`, :meth:`.Session.rollback` or + :meth:`.Session.close` methods are called. At this point, the + :class:`.SessionTransaction` removes its association with its parent + :class:`.Session`. A :class:`.Session` that is in ``autocommit=False`` + mode will create a new :class:`.SessionTransaction` to replace it + immediately, whereas a :class:`.Session` that's in ``autocommit=True`` + mode will remain without a :class:`.SessionTransaction` until the + :meth:`.Session.begin` method is called. + + Another detail of :class:`.SessionTransaction` behavior is that it is + capable of "nesting". This means that the :meth:`.Session.begin` method + can be called while an existing :class:`.SessionTransaction` is already + present, producing a new :class:`.SessionTransaction` that temporarily + replaces the parent :class:`.SessionTransaction`. When a + :class:`.SessionTransaction` is produced as nested, it assigns itself to + the :attr:`.Session.transaction` attribute. When it is ended via + :meth:`.Session.commit` or :meth:`.Session.rollback`, it restores its + parent :class:`.SessionTransaction` back onto the + :attr:`.Session.transaction` attribute. The behavior is effectively a + stack, where :attr:`.Session.transaction` refers to the current head of + the stack. + + The purpose of this stack is to allow nesting of + :meth:`.Session.rollback` or :meth:`.Session.commit` calls in context + with various flavors of :meth:`.Session.begin`. This nesting behavior + applies to when :meth:`.Session.begin_nested` is used to emit a + SAVEPOINT transaction, and is also used to produce a so-called + "subtransaction" which allows a block of code to use a + begin/rollback/commit sequence regardless of whether or not its enclosing + code block has begun a transaction. The :meth:`.flush` method, whether + called explicitly or via autoflush, is the primary consumer of the + "subtransaction" feature, in that it wishes to guarantee that it works + within in a transaction block regardless of whether or not the + :class:`.Session` is in transactional mode when the method is called. + + See also: + + :meth:`.Session.rollback` + + :meth:`.Session.commit` + + :meth:`.Session.begin` + + :meth:`.Session.begin_nested` + + :attr:`.Session.is_active` + + :meth:`.SessionEvents.after_commit` + + :meth:`.SessionEvents.after_rollback` + + :meth:`.SessionEvents.after_soft_rollback` + + """ + + _rollback_exception = None + + def __init__(self, session, parent=None, nested=False): + self.session = session + self._connections = {} + self._parent = parent + self.nested = nested + self._state = ACTIVE + if not parent and nested: + raise sa_exc.InvalidRequestError( + "Can't start a SAVEPOINT transaction when no existing " + "transaction is in progress") + + if self.session._enable_transaction_accounting: + self._take_snapshot() + + if self.session.dispatch.after_transaction_create: + self.session.dispatch.after_transaction_create(self.session, self) + + @property + def is_active(self): + return self.session is not None and self._state is ACTIVE + + def _assert_active(self, prepared_ok=False, + rollback_ok=False, + deactive_ok=False, + closed_msg="This transaction is closed"): + if self._state is COMMITTED: + raise sa_exc.InvalidRequestError( + "This session is in 'committed' state; no further " + "SQL can be emitted within this transaction." + ) + elif self._state is PREPARED: + if not prepared_ok: + raise sa_exc.InvalidRequestError( + "This session is in 'prepared' state; no further " + "SQL can be emitted within this transaction." + ) + elif self._state is DEACTIVE: + if not deactive_ok and not rollback_ok: + if self._rollback_exception: + raise sa_exc.InvalidRequestError( + "This Session's transaction has been rolled back " + "due to a previous exception during flush." + " To begin a new transaction with this Session, " + "first issue Session.rollback()." + " Original exception was: %s" + % self._rollback_exception + ) + elif not deactive_ok: + raise sa_exc.InvalidRequestError( + "This Session's transaction has been rolled back " + "by a nested rollback() call. To begin a new " + "transaction, issue Session.rollback() first." + ) + elif self._state is CLOSED: + raise sa_exc.ResourceClosedError(closed_msg) + + @property + def _is_transaction_boundary(self): + return self.nested or not self._parent + + def connection(self, bindkey, execution_options=None, **kwargs): + self._assert_active() + bind = self.session.get_bind(bindkey, **kwargs) + return self._connection_for_bind(bind, execution_options) + + def _begin(self, nested=False): + self._assert_active() + return SessionTransaction( + self.session, self, nested=nested) + + def _iterate_parents(self, upto=None): + + current = self + result = () + while current: + result += (current, ) + if current._parent is upto: + break + elif current._parent is None: + raise sa_exc.InvalidRequestError( + "Transaction %s is not on the active transaction list" % ( + upto)) + else: + current = current._parent + + return result + + def _take_snapshot(self): + if not self._is_transaction_boundary: + self._new = self._parent._new + self._deleted = self._parent._deleted + self._dirty = self._parent._dirty + self._key_switches = self._parent._key_switches + return + + if not self.session._flushing: + self.session.flush() + + self._new = weakref.WeakKeyDictionary() + self._deleted = weakref.WeakKeyDictionary() + self._dirty = weakref.WeakKeyDictionary() + self._key_switches = weakref.WeakKeyDictionary() + + def _restore_snapshot(self, dirty_only=False): + assert self._is_transaction_boundary + + for s in set(self._new).union(self.session._new): + self.session._expunge_state(s) + if s.key: + del s.key + + for s, (oldkey, newkey) in self._key_switches.items(): + self.session.identity_map.safe_discard(s) + s.key = oldkey + self.session.identity_map.replace(s) + + for s in set(self._deleted).union(self.session._deleted): + if s.deleted: + # assert s in self._deleted + del s.deleted + self.session._update_impl(s, discard_existing=True) + + assert not self.session._deleted + + for s in self.session.identity_map.all_states(): + if not dirty_only or s.modified or s in self._dirty: + s._expire(s.dict, self.session.identity_map._modified) + + def _remove_snapshot(self): + assert self._is_transaction_boundary + + if not self.nested and self.session.expire_on_commit: + for s in self.session.identity_map.all_states(): + s._expire(s.dict, self.session.identity_map._modified) + for s in list(self._deleted): + s._detach() + self._deleted.clear() + elif self.nested: + self._parent._new.update(self._new) + self._parent._dirty.update(self._dirty) + self._parent._deleted.update(self._deleted) + self._parent._key_switches.update(self._key_switches) + + def _connection_for_bind(self, bind, execution_options): + self._assert_active() + + if bind in self._connections: + if execution_options: + util.warn( + "Connection is already established for the " + "given bind; execution_options ignored") + return self._connections[bind][0] + + if self._parent: + conn = self._parent._connection_for_bind(bind, execution_options) + if not self.nested: + return conn + else: + if isinstance(bind, engine.Connection): + conn = bind + if conn.engine in self._connections: + raise sa_exc.InvalidRequestError( + "Session already has a Connection associated for the " + "given Connection's Engine") + else: + conn = bind.contextual_connect() + + if execution_options: + conn = conn.execution_options(**execution_options) + + if self.session.twophase and self._parent is None: + transaction = conn.begin_twophase() + elif self.nested: + transaction = conn.begin_nested() + else: + transaction = conn.begin() + + self._connections[conn] = self._connections[conn.engine] = \ + (conn, transaction, conn is not bind) + self.session.dispatch.after_begin(self.session, self, conn) + return conn + + def prepare(self): + if self._parent is not None or not self.session.twophase: + raise sa_exc.InvalidRequestError( + "'twophase' mode not enabled, or not root transaction; " + "can't prepare.") + self._prepare_impl() + + def _prepare_impl(self): + self._assert_active() + if self._parent is None or self.nested: + self.session.dispatch.before_commit(self.session) + + stx = self.session.transaction + if stx is not self: + for subtransaction in stx._iterate_parents(upto=self): + subtransaction.commit() + + if not self.session._flushing: + for _flush_guard in range(100): + if self.session._is_clean(): + break + self.session.flush() + else: + raise exc.FlushError( + "Over 100 subsequent flushes have occurred within " + "session.commit() - is an after_flush() hook " + "creating new objects?") + + if self._parent is None and self.session.twophase: + try: + for t in set(self._connections.values()): + t[1].prepare() + except: + with util.safe_reraise(): + self.rollback() + + self._state = PREPARED + + def commit(self): + self._assert_active(prepared_ok=True) + if self._state is not PREPARED: + self._prepare_impl() + + if self._parent is None or self.nested: + for t in set(self._connections.values()): + t[1].commit() + + self._state = COMMITTED + self.session.dispatch.after_commit(self.session) + + if self.session._enable_transaction_accounting: + self._remove_snapshot() + + self.close() + return self._parent + + def rollback(self, _capture_exception=False): + self._assert_active(prepared_ok=True, rollback_ok=True) + + stx = self.session.transaction + if stx is not self: + for subtransaction in stx._iterate_parents(upto=self): + subtransaction.close() + + boundary = self + if self._state in (ACTIVE, PREPARED): + for transaction in self._iterate_parents(): + if transaction._parent is None or transaction.nested: + transaction._rollback_impl() + transaction._state = DEACTIVE + boundary = transaction + break + else: + transaction._state = DEACTIVE + + sess = self.session + + if sess._enable_transaction_accounting and \ + not sess._is_clean(): + + # if items were added, deleted, or mutated + # here, we need to re-restore the snapshot + util.warn( + "Session's state has been changed on " + "a non-active transaction - this state " + "will be discarded.") + boundary._restore_snapshot(dirty_only=boundary.nested) + + self.close() + if self._parent and _capture_exception: + self._parent._rollback_exception = sys.exc_info()[1] + + sess.dispatch.after_soft_rollback(sess, self) + + return self._parent + + def _rollback_impl(self): + for t in set(self._connections.values()): + t[1].rollback() + + if self.session._enable_transaction_accounting: + self._restore_snapshot(dirty_only=self.nested) + + self.session.dispatch.after_rollback(self.session) + + def close(self, invalidate=False): + self.session.transaction = self._parent + if self._parent is None: + for connection, transaction, autoclose in \ + set(self._connections.values()): + if invalidate: + connection.invalidate() + if autoclose: + connection.close() + else: + transaction.close() + + self._state = CLOSED + if self.session.dispatch.after_transaction_end: + self.session.dispatch.after_transaction_end(self.session, self) + + if self._parent is None: + if not self.session.autocommit: + self.session.begin() + self.session = None + self._connections = None + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + self._assert_active(deactive_ok=True, prepared_ok=True) + if self.session.transaction is None: + return + if type is None: + try: + self.commit() + except: + with util.safe_reraise(): + self.rollback() + else: + self.rollback() + + +class Session(_SessionClassMethods): + """Manages persistence operations for ORM-mapped objects. + + The Session's usage paradigm is described at :doc:`/orm/session`. + + + """ + + public_methods = ( + '__contains__', '__iter__', 'add', 'add_all', 'begin', 'begin_nested', + 'close', 'commit', 'connection', 'delete', 'execute', 'expire', + 'expire_all', 'expunge', 'expunge_all', 'flush', 'get_bind', + 'is_modified', 'bulk_save_objects', 'bulk_insert_mappings', + 'bulk_update_mappings', + 'merge', 'query', 'refresh', 'rollback', + 'scalar') + + def __init__(self, bind=None, autoflush=True, expire_on_commit=True, + _enable_transaction_accounting=True, + autocommit=False, twophase=False, + weak_identity_map=True, binds=None, extension=None, + info=None, + query_cls=query.Query): + """Construct a new Session. + + See also the :class:`.sessionmaker` function which is used to + generate a :class:`.Session`-producing callable with a given + set of arguments. + + :param autocommit: + + .. warning:: + + The autocommit flag is **not for general use**, and if it is + used, queries should only be invoked within the span of a + :meth:`.Session.begin` / :meth:`.Session.commit` pair. Executing + queries outside of a demarcated transaction is a legacy mode + of usage, and can in some cases lead to concurrent connection + checkouts. + + Defaults to ``False``. When ``True``, the + :class:`.Session` does not keep a persistent transaction running, + and will acquire connections from the engine on an as-needed basis, + returning them immediately after their use. Flushes will begin and + commit (or possibly rollback) their own transaction if no + transaction is present. When using this mode, the + :meth:`.Session.begin` method is used to explicitly start + transactions. + + .. seealso:: + + :ref:`session_autocommit` + + :param autoflush: When ``True``, all query operations will issue a + :meth:`~.Session.flush` call to this ``Session`` before proceeding. + This is a convenience feature so that :meth:`~.Session.flush` need + not be called repeatedly in order for database queries to retrieve + results. It's typical that ``autoflush`` is used in conjunction + with ``autocommit=False``. In this scenario, explicit calls to + :meth:`~.Session.flush` are rarely needed; you usually only need to + call :meth:`~.Session.commit` (which flushes) to finalize changes. + + :param bind: An optional :class:`.Engine` or :class:`.Connection` to + which this ``Session`` should be bound. When specified, all SQL + operations performed by this session will execute via this + connectable. + + :param binds: An optional dictionary which contains more granular + "bind" information than the ``bind`` parameter provides. This + dictionary can map individual :class`.Table` + instances as well as :class:`~.Mapper` instances to individual + :class:`.Engine` or :class:`.Connection` objects. Operations which + proceed relative to a particular :class:`.Mapper` will consult this + dictionary for the direct :class:`.Mapper` instance as + well as the mapper's ``mapped_table`` attribute in order to locate + a connectable to use. The full resolution is described in the + :meth:`.Session.get_bind`. + Usage looks like:: + + Session = sessionmaker(binds={ + SomeMappedClass: create_engine('postgresql://engine1'), + somemapper: create_engine('postgresql://engine2'), + some_table: create_engine('postgresql://engine3'), + }) + + Also see the :meth:`.Session.bind_mapper` + and :meth:`.Session.bind_table` methods. + + :param \class_: Specify an alternate class other than + ``sqlalchemy.orm.session.Session`` which should be used by the + returned class. This is the only argument that is local to the + :class:`.sessionmaker` function, and is not sent directly to the + constructor for ``Session``. + + :param _enable_transaction_accounting: Defaults to ``True``. A + legacy-only flag which when ``False`` disables *all* 0.5-style + object accounting on transaction boundaries, including auto-expiry + of instances on rollback and commit, maintenance of the "new" and + "deleted" lists upon rollback, and autoflush of pending changes + upon :meth:`~.Session.begin`, all of which are interdependent. + + :param expire_on_commit: Defaults to ``True``. When ``True``, all + instances will be fully expired after each :meth:`~.commit`, + so that all attribute/object access subsequent to a completed + transaction will load from the most recent database state. + + :param extension: An optional + :class:`~.SessionExtension` instance, or a list + of such instances, which will receive pre- and post- commit and + flush events, as well as a post-rollback event. **Deprecated.** + Please see :class:`.SessionEvents`. + + :param info: optional dictionary of arbitrary data to be associated + with this :class:`.Session`. Is available via the + :attr:`.Session.info` attribute. Note the dictionary is copied at + construction time so that modifications to the per- + :class:`.Session` dictionary will be local to that + :class:`.Session`. + + .. versionadded:: 0.9.0 + + :param query_cls: Class which should be used to create new Query + objects, as returned by the :meth:`~.Session.query` method. + Defaults to :class:`.Query`. + + :param twophase: When ``True``, all transactions will be started as + a "two phase" transaction, i.e. using the "two phase" semantics + of the database in use along with an XID. During a + :meth:`~.commit`, after :meth:`~.flush` has been issued for all + attached databases, the :meth:`~.TwoPhaseTransaction.prepare` + method on each database's :class:`.TwoPhaseTransaction` will be + called. This allows each database to roll back the entire + transaction, before each transaction is committed. + + :param weak_identity_map: Defaults to ``True`` - when set to + ``False``, objects placed in the :class:`.Session` will be + strongly referenced until explicitly removed or the + :class:`.Session` is closed. **Deprecated** - this option + is present to allow compatibility with older applications, but + it is recommended that strong references to objects + be maintained by the calling application + externally to the :class:`.Session` itself, + to the extent that is required by the application. + + """ + + if weak_identity_map: + self._identity_cls = identity.WeakInstanceDict + else: + util.warn_deprecated( + "weak_identity_map=False is deprecated. " + "It is present to allow compatibility with older " + "applications, but " + "it is recommended that strong references to " + "objects be maintained by the calling application " + "externally to the :class:`.Session` itself, " + "to the extent that is required by the application.") + + self._identity_cls = identity.StrongInstanceDict + self.identity_map = self._identity_cls() + + self._new = {} # InstanceState->object, strong refs object + self._deleted = {} # same + self.bind = bind + self.__binds = {} + self._flushing = False + self._warn_on_events = False + self.transaction = None + self.hash_key = _new_sessionid() + self.autoflush = autoflush + self.autocommit = autocommit + self.expire_on_commit = expire_on_commit + self._enable_transaction_accounting = _enable_transaction_accounting + self.twophase = twophase + self._query_cls = query_cls + if info: + self.info.update(info) + + if extension: + for ext in util.to_list(extension): + SessionExtension._adapt_listener(self, ext) + + if binds is not None: + for key, bind in binds.items(): + self._add_bind(key, bind) + + if not self.autocommit: + self.begin() + _sessions[self.hash_key] = self + + connection_callable = None + + transaction = None + """The current active or inactive :class:`.SessionTransaction`.""" + + @util.memoized_property + def info(self): + """A user-modifiable dictionary. + + The initial value of this dictionary can be populated using the + ``info`` argument to the :class:`.Session` constructor or + :class:`.sessionmaker` constructor or factory methods. The dictionary + here is always local to this :class:`.Session` and can be modified + independently of all other :class:`.Session` objects. + + .. versionadded:: 0.9.0 + + """ + return {} + + def begin(self, subtransactions=False, nested=False): + """Begin a transaction on this :class:`.Session`. + + If this Session is already within a transaction, either a plain + transaction or nested transaction, an error is raised, unless + ``subtransactions=True`` or ``nested=True`` is specified. + + The ``subtransactions=True`` flag indicates that this + :meth:`~.Session.begin` can create a subtransaction if a transaction + is already in progress. For documentation on subtransactions, please + see :ref:`session_subtransactions`. + + The ``nested`` flag begins a SAVEPOINT transaction and is equivalent + to calling :meth:`~.Session.begin_nested`. For documentation on + SAVEPOINT transactions, please see :ref:`session_begin_nested`. + + """ + if self.transaction is not None: + if subtransactions or nested: + self.transaction = self.transaction._begin( + nested=nested) + else: + raise sa_exc.InvalidRequestError( + "A transaction is already begun. Use " + "subtransactions=True to allow subtransactions.") + else: + self.transaction = SessionTransaction( + self, nested=nested) + return self.transaction # needed for __enter__/__exit__ hook + + def begin_nested(self): + """Begin a `nested` transaction on this Session. + + The target database(s) must support SQL SAVEPOINTs or a + SQLAlchemy-supported vendor implementation of the idea. + + For documentation on SAVEPOINT + transactions, please see :ref:`session_begin_nested`. + + """ + return self.begin(nested=True) + + def rollback(self): + """Rollback the current transaction in progress. + + If no transaction is in progress, this method is a pass-through. + + This method rolls back the current transaction or nested transaction + regardless of subtransactions being in effect. All subtransactions up + to the first real transaction are closed. Subtransactions occur when + :meth:`.begin` is called multiple times. + + .. seealso:: + + :ref:`session_rollback` + + """ + if self.transaction is None: + pass + else: + self.transaction.rollback() + + def commit(self): + """Flush pending changes and commit the current transaction. + + If no transaction is in progress, this method raises an + :exc:`~sqlalchemy.exc.InvalidRequestError`. + + By default, the :class:`.Session` also expires all database + loaded state on all ORM-managed attributes after transaction commit. + This so that subsequent operations load the most recent + data from the database. This behavior can be disabled using + the ``expire_on_commit=False`` option to :class:`.sessionmaker` or + the :class:`.Session` constructor. + + If a subtransaction is in effect (which occurs when begin() is called + multiple times), the subtransaction will be closed, and the next call + to ``commit()`` will operate on the enclosing transaction. + + When using the :class:`.Session` in its default mode of + ``autocommit=False``, a new transaction will + be begun immediately after the commit, but note that the newly begun + transaction does *not* use any connection resources until the first + SQL is actually emitted. + + .. seealso:: + + :ref:`session_committing` + + """ + if self.transaction is None: + if not self.autocommit: + self.begin() + else: + raise sa_exc.InvalidRequestError("No transaction is begun.") + + self.transaction.commit() + + def prepare(self): + """Prepare the current transaction in progress for two phase commit. + + If no transaction is in progress, this method raises an + :exc:`~sqlalchemy.exc.InvalidRequestError`. + + Only root transactions of two phase sessions can be prepared. If the + current transaction is not such, an + :exc:`~sqlalchemy.exc.InvalidRequestError` is raised. + + """ + if self.transaction is None: + if not self.autocommit: + self.begin() + else: + raise sa_exc.InvalidRequestError("No transaction is begun.") + + self.transaction.prepare() + + def connection(self, mapper=None, clause=None, + bind=None, + close_with_result=False, + execution_options=None, + **kw): + """Return a :class:`.Connection` object corresponding to this + :class:`.Session` object's transactional state. + + If this :class:`.Session` is configured with ``autocommit=False``, + either the :class:`.Connection` corresponding to the current + transaction is returned, or if no transaction is in progress, a new + one is begun and the :class:`.Connection` returned (note that no + transactional state is established with the DBAPI until the first + SQL statement is emitted). + + Alternatively, if this :class:`.Session` is configured with + ``autocommit=True``, an ad-hoc :class:`.Connection` is returned + using :meth:`.Engine.contextual_connect` on the underlying + :class:`.Engine`. + + Ambiguity in multi-bind or unbound :class:`.Session` objects can be + resolved through any of the optional keyword arguments. This + ultimately makes usage of the :meth:`.get_bind` method for resolution. + + :param bind: + Optional :class:`.Engine` to be used as the bind. If + this engine is already involved in an ongoing transaction, + that connection will be used. This argument takes precedence + over ``mapper``, ``clause``. + + :param mapper: + Optional :func:`.mapper` mapped class, used to identify + the appropriate bind. This argument takes precedence over + ``clause``. + + :param clause: + A :class:`.ClauseElement` (i.e. :func:`~.sql.expression.select`, + :func:`~.sql.expression.text`, + etc.) which will be used to locate a bind, if a bind + cannot otherwise be identified. + + :param close_with_result: Passed to :meth:`.Engine.connect`, + indicating the :class:`.Connection` should be considered + "single use", automatically closing when the first result set is + closed. This flag only has an effect if this :class:`.Session` is + configured with ``autocommit=True`` and does not already have a + transaction in progress. + + :param execution_options: a dictionary of execution options that will + be passed to :meth:`.Connection.execution_options`, **when the + connection is first procured only**. If the connection is already + present within the :class:`.Session`, a warning is emitted and + the arguments are ignored. + + .. versionadded:: 0.9.9 + + .. seealso:: + + :ref:`session_transaction_isolation` + + :param \**kw: + Additional keyword arguments are sent to :meth:`get_bind()`, + allowing additional arguments to be passed to custom + implementations of :meth:`get_bind`. + + """ + if bind is None: + bind = self.get_bind(mapper, clause=clause, **kw) + + return self._connection_for_bind(bind, + close_with_result=close_with_result, + execution_options=execution_options) + + def _connection_for_bind(self, engine, execution_options=None, **kw): + if self.transaction is not None: + return self.transaction._connection_for_bind( + engine, execution_options) + else: + conn = engine.contextual_connect(**kw) + if execution_options: + conn = conn.execution_options(**execution_options) + return conn + + def execute(self, clause, params=None, mapper=None, bind=None, **kw): + """Execute a SQL expression construct or string statement within + the current transaction. + + Returns a :class:`.ResultProxy` representing + results of the statement execution, in the same manner as that of an + :class:`.Engine` or + :class:`.Connection`. + + E.g.:: + + result = session.execute( + user_table.select().where(user_table.c.id == 5) + ) + + :meth:`~.Session.execute` accepts any executable clause construct, + such as :func:`~.sql.expression.select`, + :func:`~.sql.expression.insert`, + :func:`~.sql.expression.update`, + :func:`~.sql.expression.delete`, and + :func:`~.sql.expression.text`. Plain SQL strings can be passed + as well, which in the case of :meth:`.Session.execute` only + will be interpreted the same as if it were passed via a + :func:`~.expression.text` construct. That is, the following usage:: + + result = session.execute( + "SELECT * FROM user WHERE id=:param", + {"param":5} + ) + + is equivalent to:: + + from sqlalchemy import text + result = session.execute( + text("SELECT * FROM user WHERE id=:param"), + {"param":5} + ) + + The second positional argument to :meth:`.Session.execute` is an + optional parameter set. Similar to that of + :meth:`.Connection.execute`, whether this is passed as a single + dictionary, or a list of dictionaries, determines whether the DBAPI + cursor's ``execute()`` or ``executemany()`` is used to execute the + statement. An INSERT construct may be invoked for a single row:: + + result = session.execute( + users.insert(), {"id": 7, "name": "somename"}) + + or for multiple rows:: + + result = session.execute(users.insert(), [ + {"id": 7, "name": "somename7"}, + {"id": 8, "name": "somename8"}, + {"id": 9, "name": "somename9"} + ]) + + The statement is executed within the current transactional context of + this :class:`.Session`. The :class:`.Connection` which is used + to execute the statement can also be acquired directly by + calling the :meth:`.Session.connection` method. Both methods use + a rule-based resolution scheme in order to determine the + :class:`.Connection`, which in the average case is derived directly + from the "bind" of the :class:`.Session` itself, and in other cases + can be based on the :func:`.mapper` + and :class:`.Table` objects passed to the method; see the + documentation for :meth:`.Session.get_bind` for a full description of + this scheme. + + The :meth:`.Session.execute` method does *not* invoke autoflush. + + The :class:`.ResultProxy` returned by the :meth:`.Session.execute` + method is returned with the "close_with_result" flag set to true; + the significance of this flag is that if this :class:`.Session` is + autocommitting and does not have a transaction-dedicated + :class:`.Connection` available, a temporary :class:`.Connection` is + established for the statement execution, which is closed (meaning, + returned to the connection pool) when the :class:`.ResultProxy` has + consumed all available data. This applies *only* when the + :class:`.Session` is configured with autocommit=True and no + transaction has been started. + + :param clause: + An executable statement (i.e. an :class:`.Executable` expression + such as :func:`.expression.select`) or string SQL statement + to be executed. + + :param params: + Optional dictionary, or list of dictionaries, containing + bound parameter values. If a single dictionary, single-row + execution occurs; if a list of dictionaries, an + "executemany" will be invoked. The keys in each dictionary + must correspond to parameter names present in the statement. + + :param mapper: + Optional :func:`.mapper` or mapped class, used to identify + the appropriate bind. This argument takes precedence over + ``clause`` when locating a bind. See :meth:`.Session.get_bind` + for more details. + + :param bind: + Optional :class:`.Engine` to be used as the bind. If + this engine is already involved in an ongoing transaction, + that connection will be used. This argument takes + precedence over ``mapper`` and ``clause`` when locating + a bind. + + :param \**kw: + Additional keyword arguments are sent to :meth:`.Session.get_bind()` + to allow extensibility of "bind" schemes. + + .. seealso:: + + :ref:`sqlexpression_toplevel` - Tutorial on using Core SQL + constructs. + + :ref:`connections_toplevel` - Further information on direct + statement execution. + + :meth:`.Connection.execute` - core level statement execution + method, which is :meth:`.Session.execute` ultimately uses + in order to execute the statement. + + """ + clause = expression._literal_as_text(clause) + + if bind is None: + bind = self.get_bind(mapper, clause=clause, **kw) + + return self._connection_for_bind( + bind, close_with_result=True).execute(clause, params or {}) + + def scalar(self, clause, params=None, mapper=None, bind=None, **kw): + """Like :meth:`~.Session.execute` but return a scalar result.""" + + return self.execute( + clause, params=params, mapper=mapper, bind=bind, **kw).scalar() + + def close(self): + """Close this Session. + + This clears all items and ends any transaction in progress. + + If this session were created with ``autocommit=False``, a new + transaction is immediately begun. Note that this new transaction does + not use any connection resources until they are first needed. + + """ + self._close_impl(invalidate=False) + + def invalidate(self): + """Close this Session, using connection invalidation. + + This is a variant of :meth:`.Session.close` that will additionally + ensure that the :meth:`.Connection.invalidate` method will be called + on all :class:`.Connection` objects. This can be called when + the database is known to be in a state where the connections are + no longer safe to be used. + + E.g.:: + + try: + sess = Session() + sess.add(User()) + sess.commit() + except gevent.Timeout: + sess.invalidate() + raise + except: + sess.rollback() + raise + + This clears all items and ends any transaction in progress. + + If this session were created with ``autocommit=False``, a new + transaction is immediately begun. Note that this new transaction does + not use any connection resources until they are first needed. + + .. versionadded:: 0.9.9 + + """ + self._close_impl(invalidate=True) + + def _close_impl(self, invalidate): + self.expunge_all() + if self.transaction is not None: + for transaction in self.transaction._iterate_parents(): + transaction.close(invalidate) + + def expunge_all(self): + """Remove all object instances from this ``Session``. + + This is equivalent to calling ``expunge(obj)`` on all objects in this + ``Session``. + + """ + for state in self.identity_map.all_states() + list(self._new): + state._detach() + + self.identity_map = self._identity_cls() + self._new = {} + self._deleted = {} + + # TODO: need much more test coverage for bind_mapper() and similar ! + # TODO: + crystallize + document resolution order + # vis. bind_mapper/bind_table + + def _add_bind(self, key, bind): + try: + insp = inspect(key) + except sa_exc.NoInspectionAvailable: + if not isinstance(key, type): + raise exc.ArgumentError( + "Not acceptable bind target: %s" % + key) + else: + self.__binds[key] = bind + else: + if insp.is_selectable: + self.__binds[insp] = bind + elif insp.is_mapper: + self.__binds[insp.class_] = bind + for selectable in insp._all_tables: + self.__binds[selectable] = bind + else: + raise exc.ArgumentError( + "Not acceptable bind target: %s" % + key) + + def bind_mapper(self, mapper, bind): + """Associate a :class:`.Mapper` with a "bind", e.g. a :class:`.Engine` + or :class:`.Connection`. + + The given mapper is added to a lookup used by the + :meth:`.Session.get_bind` method. + + """ + self._add_bind(mapper, bind) + + def bind_table(self, table, bind): + """Associate a :class:`.Table` with a "bind", e.g. a :class:`.Engine` + or :class:`.Connection`. + + The given mapper is added to a lookup used by the + :meth:`.Session.get_bind` method. + + """ + self._add_bind(table, bind) + + def get_bind(self, mapper=None, clause=None): + """Return a "bind" to which this :class:`.Session` is bound. + + The "bind" is usually an instance of :class:`.Engine`, + except in the case where the :class:`.Session` has been + explicitly bound directly to a :class:`.Connection`. + + For a multiply-bound or unbound :class:`.Session`, the + ``mapper`` or ``clause`` arguments are used to determine the + appropriate bind to return. + + Note that the "mapper" argument is usually present + when :meth:`.Session.get_bind` is called via an ORM + operation such as a :meth:`.Session.query`, each + individual INSERT/UPDATE/DELETE operation within a + :meth:`.Session.flush`, call, etc. + + The order of resolution is: + + 1. if mapper given and session.binds is present, + locate a bind based on mapper. + 2. if clause given and session.binds is present, + locate a bind based on :class:`.Table` objects + found in the given clause present in session.binds. + 3. if session.bind is present, return that. + 4. if clause given, attempt to return a bind + linked to the :class:`.MetaData` ultimately + associated with the clause. + 5. if mapper given, attempt to return a bind + linked to the :class:`.MetaData` ultimately + associated with the :class:`.Table` or other + selectable to which the mapper is mapped. + 6. No bind can be found, :exc:`~sqlalchemy.exc.UnboundExecutionError` + is raised. + + :param mapper: + Optional :func:`.mapper` mapped class or instance of + :class:`.Mapper`. The bind can be derived from a :class:`.Mapper` + first by consulting the "binds" map associated with this + :class:`.Session`, and secondly by consulting the :class:`.MetaData` + associated with the :class:`.Table` to which the :class:`.Mapper` + is mapped for a bind. + + :param clause: + A :class:`.ClauseElement` (i.e. :func:`~.sql.expression.select`, + :func:`~.sql.expression.text`, + etc.). If the ``mapper`` argument is not present or could not + produce a bind, the given expression construct will be searched + for a bound element, typically a :class:`.Table` associated with + bound :class:`.MetaData`. + + """ + + if mapper is clause is None: + if self.bind: + return self.bind + else: + raise sa_exc.UnboundExecutionError( + "This session is not bound to a single Engine or " + "Connection, and no context was provided to locate " + "a binding.") + + if mapper is not None: + try: + mapper = inspect(mapper) + except sa_exc.NoInspectionAvailable: + if isinstance(mapper, type): + raise exc.UnmappedClassError(mapper) + else: + raise + + if self.__binds: + if mapper: + for cls in mapper.class_.__mro__: + if cls in self.__binds: + return self.__binds[cls] + if clause is None: + clause = mapper.mapped_table + + if clause is not None: + for t in sql_util.find_tables(clause, include_crud=True): + if t in self.__binds: + return self.__binds[t] + + if self.bind: + return self.bind + + if isinstance(clause, sql.expression.ClauseElement) and clause.bind: + return clause.bind + + if mapper and mapper.mapped_table.bind: + return mapper.mapped_table.bind + + context = [] + if mapper is not None: + context.append('mapper %s' % mapper) + if clause is not None: + context.append('SQL expression') + + raise sa_exc.UnboundExecutionError( + "Could not locate a bind configured on %s or this Session" % ( + ', '.join(context))) + + def query(self, *entities, **kwargs): + """Return a new :class:`.Query` object corresponding to this + :class:`.Session`.""" + + return self._query_cls(entities, self, **kwargs) + + @property + @util.contextmanager + def no_autoflush(self): + """Return a context manager that disables autoflush. + + e.g.:: + + with session.no_autoflush: + + some_object = SomeClass() + session.add(some_object) + # won't autoflush + some_object.related_thing = session.query(SomeRelated).first() + + Operations that proceed within the ``with:`` block + will not be subject to flushes occurring upon query + access. This is useful when initializing a series + of objects which involve existing database queries, + where the uncompleted object should not yet be flushed. + + .. versionadded:: 0.7.6 + + """ + autoflush = self.autoflush + self.autoflush = False + yield self + self.autoflush = autoflush + + def _autoflush(self): + if self.autoflush and not self._flushing: + try: + self.flush() + except sa_exc.StatementError as e: + # note we are reraising StatementError as opposed to + # raising FlushError with "chaining" to remain compatible + # with code that catches StatementError, IntegrityError, + # etc. + e.add_detail( + "raised as a result of Query-invoked autoflush; " + "consider using a session.no_autoflush block if this " + "flush is occurring prematurely") + util.raise_from_cause(e) + + def refresh(self, instance, attribute_names=None, lockmode=None): + """Expire and refresh the attributes on the given instance. + + A query will be issued to the database and all attributes will be + refreshed with their current database value. + + Lazy-loaded relational attributes will remain lazily loaded, so that + the instance-wide refresh operation will be followed immediately by + the lazy load of that attribute. + + Eagerly-loaded relational attributes will eagerly load within the + single refresh operation. + + Note that a highly isolated transaction will return the same values as + were previously read in that same transaction, regardless of changes + in database state outside of that transaction - usage of + :meth:`~Session.refresh` usually only makes sense if non-ORM SQL + statement were emitted in the ongoing transaction, or if autocommit + mode is turned on. + + :param attribute_names: optional. An iterable collection of + string attribute names indicating a subset of attributes to + be refreshed. + + :param lockmode: Passed to the :class:`~sqlalchemy.orm.query.Query` + as used by :meth:`~sqlalchemy.orm.query.Query.with_lockmode`. + + .. seealso:: + + :ref:`session_expire` - introductory material + + :meth:`.Session.expire` + + :meth:`.Session.expire_all` + + """ + try: + state = attributes.instance_state(instance) + except exc.NO_STATE: + raise exc.UnmappedInstanceError(instance) + + self._expire_state(state, attribute_names) + + if loading.load_on_ident( + self.query(object_mapper(instance)), + state.key, refresh_state=state, + lockmode=lockmode, + only_load_props=attribute_names) is None: + raise sa_exc.InvalidRequestError( + "Could not refresh instance '%s'" % + instance_str(instance)) + + def expire_all(self): + """Expires all persistent instances within this Session. + + When any attributes on a persistent instance is next accessed, + a query will be issued using the + :class:`.Session` object's current transactional context in order to + load all expired attributes for the given instance. Note that + a highly isolated transaction will return the same values as were + previously read in that same transaction, regardless of changes + in database state outside of that transaction. + + To expire individual objects and individual attributes + on those objects, use :meth:`Session.expire`. + + The :class:`.Session` object's default behavior is to + expire all state whenever the :meth:`Session.rollback` + or :meth:`Session.commit` methods are called, so that new + state can be loaded for the new transaction. For this reason, + calling :meth:`Session.expire_all` should not be needed when + autocommit is ``False``, assuming the transaction is isolated. + + .. seealso:: + + :ref:`session_expire` - introductory material + + :meth:`.Session.expire` + + :meth:`.Session.refresh` + + """ + for state in self.identity_map.all_states(): + state._expire(state.dict, self.identity_map._modified) + + def expire(self, instance, attribute_names=None): + """Expire the attributes on an instance. + + Marks the attributes of an instance as out of date. When an expired + attribute is next accessed, a query will be issued to the + :class:`.Session` object's current transactional context in order to + load all expired attributes for the given instance. Note that + a highly isolated transaction will return the same values as were + previously read in that same transaction, regardless of changes + in database state outside of that transaction. + + To expire all objects in the :class:`.Session` simultaneously, + use :meth:`Session.expire_all`. + + The :class:`.Session` object's default behavior is to + expire all state whenever the :meth:`Session.rollback` + or :meth:`Session.commit` methods are called, so that new + state can be loaded for the new transaction. For this reason, + calling :meth:`Session.expire` only makes sense for the specific + case that a non-ORM SQL statement was emitted in the current + transaction. + + :param instance: The instance to be refreshed. + :param attribute_names: optional list of string attribute names + indicating a subset of attributes to be expired. + + .. seealso:: + + :ref:`session_expire` - introductory material + + :meth:`.Session.expire` + + :meth:`.Session.refresh` + + """ + try: + state = attributes.instance_state(instance) + except exc.NO_STATE: + raise exc.UnmappedInstanceError(instance) + self._expire_state(state, attribute_names) + + def _expire_state(self, state, attribute_names): + self._validate_persistent(state) + if attribute_names: + state._expire_attributes(state.dict, attribute_names) + else: + # pre-fetch the full cascade since the expire is going to + # remove associations + cascaded = list(state.manager.mapper.cascade_iterator( + 'refresh-expire', state)) + self._conditional_expire(state) + for o, m, st_, dct_ in cascaded: + self._conditional_expire(st_) + + def _conditional_expire(self, state): + """Expire a state if persistent, else expunge if pending""" + + if state.key: + state._expire(state.dict, self.identity_map._modified) + elif state in self._new: + self._new.pop(state) + state._detach() + + @util.deprecated("0.7", "The non-weak-referencing identity map " + "feature is no longer needed.") + def prune(self): + """Remove unreferenced instances cached in the identity map. + + Note that this method is only meaningful if "weak_identity_map" is set + to False. The default weak identity map is self-pruning. + + Removes any object in this Session's identity map that is not + referenced in user code, modified, new or scheduled for deletion. + Returns the number of objects pruned. + + """ + return self.identity_map.prune() + + def expunge(self, instance): + """Remove the `instance` from this ``Session``. + + This will free all internal references to the instance. Cascading + will be applied according to the *expunge* cascade rule. + + """ + try: + state = attributes.instance_state(instance) + except exc.NO_STATE: + raise exc.UnmappedInstanceError(instance) + if state.session_id is not self.hash_key: + raise sa_exc.InvalidRequestError( + "Instance %s is not present in this Session" % + state_str(state)) + + cascaded = list(state.manager.mapper.cascade_iterator( + 'expunge', state)) + self._expunge_state(state) + for o, m, st_, dct_ in cascaded: + self._expunge_state(st_) + + def _expunge_state(self, state): + if state in self._new: + self._new.pop(state) + state._detach() + elif self.identity_map.contains_state(state): + self.identity_map.safe_discard(state) + self._deleted.pop(state, None) + state._detach() + elif self.transaction: + self.transaction._deleted.pop(state, None) + state._detach() + + def _register_newly_persistent(self, states): + for state in states: + mapper = _state_mapper(state) + + # prevent against last minute dereferences of the object + obj = state.obj() + if obj is not None: + + instance_key = mapper._identity_key_from_state(state) + + if _none_set.intersection(instance_key[1]) and \ + not mapper.allow_partial_pks or \ + _none_set.issuperset(instance_key[1]): + raise exc.FlushError( + "Instance %s has a NULL identity key. If this is an " + "auto-generated value, check that the database table " + "allows generation of new primary key values, and " + "that the mapped Column object is configured to " + "expect these generated values. Ensure also that " + "this flush() is not occurring at an inappropriate " + "time, such aswithin a load() event." + % state_str(state) + ) + + if state.key is None: + state.key = instance_key + elif state.key != instance_key: + # primary key switch. use safe_discard() in case another + # state has already replaced this one in the identity + # map (see test/orm/test_naturalpks.py ReversePKsTest) + self.identity_map.safe_discard(state) + if state in self.transaction._key_switches: + orig_key = self.transaction._key_switches[state][0] + else: + orig_key = state.key + self.transaction._key_switches[state] = ( + orig_key, instance_key) + state.key = instance_key + + self.identity_map.replace(state) + + statelib.InstanceState._commit_all_states( + ((state, state.dict) for state in states), + self.identity_map + ) + + self._register_altered(states) + # remove from new last, might be the last strong ref + for state in set(states).intersection(self._new): + self._new.pop(state) + + def _register_altered(self, states): + if self._enable_transaction_accounting and self.transaction: + for state in states: + if state in self._new: + self.transaction._new[state] = True + else: + self.transaction._dirty[state] = True + + def _remove_newly_deleted(self, states): + for state in states: + if self._enable_transaction_accounting and self.transaction: + self.transaction._deleted[state] = True + + self.identity_map.safe_discard(state) + self._deleted.pop(state, None) + state.deleted = True + + def add(self, instance, _warn=True): + """Place an object in the ``Session``. + + Its state will be persisted to the database on the next flush + operation. + + Repeated calls to ``add()`` will be ignored. The opposite of ``add()`` + is ``expunge()``. + + """ + if _warn and self._warn_on_events: + self._flush_warning("Session.add()") + + try: + state = attributes.instance_state(instance) + except exc.NO_STATE: + raise exc.UnmappedInstanceError(instance) + + self._save_or_update_state(state) + + def add_all(self, instances): + """Add the given collection of instances to this ``Session``.""" + + if self._warn_on_events: + self._flush_warning("Session.add_all()") + + for instance in instances: + self.add(instance, _warn=False) + + def _save_or_update_state(self, state): + self._save_or_update_impl(state) + + mapper = _state_mapper(state) + for o, m, st_, dct_ in mapper.cascade_iterator( + 'save-update', + state, + halt_on=self._contains_state): + self._save_or_update_impl(st_) + + def delete(self, instance): + """Mark an instance as deleted. + + The database delete operation occurs upon ``flush()``. + + """ + if self._warn_on_events: + self._flush_warning("Session.delete()") + + try: + state = attributes.instance_state(instance) + except exc.NO_STATE: + raise exc.UnmappedInstanceError(instance) + + if state.key is None: + raise sa_exc.InvalidRequestError( + "Instance '%s' is not persisted" % + state_str(state)) + + if state in self._deleted: + return + + # ensure object is attached to allow the + # cascade operation to load deferred attributes + # and collections + self._attach(state, include_before=True) + + # grab the cascades before adding the item to the deleted list + # so that autoflush does not delete the item + # the strong reference to the instance itself is significant here + cascade_states = list(state.manager.mapper.cascade_iterator( + 'delete', state)) + + self._deleted[state] = state.obj() + self.identity_map.add(state) + + for o, m, st_, dct_ in cascade_states: + self._delete_impl(st_) + + def merge(self, instance, load=True): + """Copy the state of a given instance into a corresponding instance + within this :class:`.Session`. + + :meth:`.Session.merge` examines the primary key attributes of the + source instance, and attempts to reconcile it with an instance of the + same primary key in the session. If not found locally, it attempts + to load the object from the database based on primary key, and if + none can be located, creates a new instance. The state of each + attribute on the source instance is then copied to the target + instance. The resulting target instance is then returned by the + method; the original source instance is left unmodified, and + un-associated with the :class:`.Session` if not already. + + This operation cascades to associated instances if the association is + mapped with ``cascade="merge"``. + + See :ref:`unitofwork_merging` for a detailed discussion of merging. + + :param instance: Instance to be merged. + :param load: Boolean, when False, :meth:`.merge` switches into + a "high performance" mode which causes it to forego emitting history + events as well as all database access. This flag is used for + cases such as transferring graphs of objects into a :class:`.Session` + from a second level cache, or to transfer just-loaded objects + into the :class:`.Session` owned by a worker thread or process + without re-querying the database. + + The ``load=False`` use case adds the caveat that the given + object has to be in a "clean" state, that is, has no pending changes + to be flushed - even if the incoming object is detached from any + :class:`.Session`. This is so that when + the merge operation populates local attributes and + cascades to related objects and + collections, the values can be "stamped" onto the + target object as is, without generating any history or attribute + events, and without the need to reconcile the incoming data with + any existing related objects or collections that might not + be loaded. The resulting objects from ``load=False`` are always + produced as "clean", so it is only appropriate that the given objects + should be "clean" as well, else this suggests a mis-use of the + method. + + """ + + if self._warn_on_events: + self._flush_warning("Session.merge()") + + _recursive = {} + + if load: + # flush current contents if we expect to load data + self._autoflush() + + object_mapper(instance) # verify mapped + autoflush = self.autoflush + try: + self.autoflush = False + return self._merge( + attributes.instance_state(instance), + attributes.instance_dict(instance), + load=load, _recursive=_recursive) + finally: + self.autoflush = autoflush + + def _merge(self, state, state_dict, load=True, _recursive=None): + mapper = _state_mapper(state) + if state in _recursive: + return _recursive[state] + + new_instance = False + key = state.key + + if key is None: + if not load: + raise sa_exc.InvalidRequestError( + "merge() with load=False option does not support " + "objects transient (i.e. unpersisted) objects. flush() " + "all changes on mapped instances before merging with " + "load=False.") + key = mapper._identity_key_from_state(state) + key_is_persistent = attributes.NEVER_SET not in key[1] + else: + key_is_persistent = True + + if key in self.identity_map: + merged = self.identity_map[key] + + elif not load: + if state.modified: + raise sa_exc.InvalidRequestError( + "merge() with load=False option does not support " + "objects marked as 'dirty'. flush() all changes on " + "mapped instances before merging with load=False.") + merged = mapper.class_manager.new_instance() + merged_state = attributes.instance_state(merged) + merged_state.key = key + self._update_impl(merged_state) + new_instance = True + + elif key_is_persistent and ( + not _none_set.intersection(key[1]) or + (mapper.allow_partial_pks and + not _none_set.issuperset(key[1]))): + merged = self.query(mapper.class_).get(key[1]) + else: + merged = None + + if merged is None: + merged = mapper.class_manager.new_instance() + merged_state = attributes.instance_state(merged) + merged_dict = attributes.instance_dict(merged) + new_instance = True + self._save_or_update_state(merged_state) + else: + merged_state = attributes.instance_state(merged) + merged_dict = attributes.instance_dict(merged) + + _recursive[state] = merged + + # check that we didn't just pull the exact same + # state out. + if state is not merged_state: + # version check if applicable + if mapper.version_id_col is not None: + existing_version = mapper._get_state_attr_by_column( + state, + state_dict, + mapper.version_id_col, + passive=attributes.PASSIVE_NO_INITIALIZE) + + merged_version = mapper._get_state_attr_by_column( + merged_state, + merged_dict, + mapper.version_id_col, + passive=attributes.PASSIVE_NO_INITIALIZE) + + if existing_version is not attributes.PASSIVE_NO_RESULT and \ + merged_version is not attributes.PASSIVE_NO_RESULT and \ + existing_version != merged_version: + raise exc.StaleDataError( + "Version id '%s' on merged state %s " + "does not match existing version '%s'. " + "Leave the version attribute unset when " + "merging to update the most recent version." + % ( + existing_version, + state_str(merged_state), + merged_version + )) + + merged_state.load_path = state.load_path + merged_state.load_options = state.load_options + + for prop in mapper.iterate_properties: + prop.merge(self, state, state_dict, + merged_state, merged_dict, + load, _recursive) + + if not load: + # remove any history + merged_state._commit_all(merged_dict, self.identity_map) + + if new_instance: + merged_state.manager.dispatch.load(merged_state, None) + return merged + + def _validate_persistent(self, state): + if not self.identity_map.contains_state(state): + raise sa_exc.InvalidRequestError( + "Instance '%s' is not persistent within this Session" % + state_str(state)) + + def _save_impl(self, state): + if state.key is not None: + raise sa_exc.InvalidRequestError( + "Object '%s' already has an identity - " + "it can't be registered as pending" % state_str(state)) + + self._before_attach(state) + if state not in self._new: + self._new[state] = state.obj() + state.insert_order = len(self._new) + self._attach(state) + + def _update_impl(self, state, discard_existing=False): + if (self.identity_map.contains_state(state) and + state not in self._deleted): + return + + if state.key is None: + raise sa_exc.InvalidRequestError( + "Instance '%s' is not persisted" % + state_str(state)) + + if state.deleted: + raise sa_exc.InvalidRequestError( + "Instance '%s' has been deleted. Use the make_transient() " + "function to send this object back to the transient state." % + state_str(state) + ) + self._before_attach(state, check_identity_map=False) + self._deleted.pop(state, None) + if discard_existing: + self.identity_map.replace(state) + else: + self.identity_map.add(state) + self._attach(state) + + def _save_or_update_impl(self, state): + if state.key is None: + self._save_impl(state) + else: + self._update_impl(state) + + def _delete_impl(self, state): + if state in self._deleted: + return + + if state.key is None: + return + + self._attach(state, include_before=True) + self._deleted[state] = state.obj() + self.identity_map.add(state) + + def enable_relationship_loading(self, obj): + """Associate an object with this :class:`.Session` for related + object loading. + + .. warning:: + + :meth:`.enable_relationship_loading` exists to serve special + use cases and is not recommended for general use. + + Accesses of attributes mapped with :func:`.relationship` + will attempt to load a value from the database using this + :class:`.Session` as the source of connectivity. The values + will be loaded based on foreign key values present on this + object - it follows that this functionality + generally only works for many-to-one-relationships. + + The object will be attached to this session, but will + **not** participate in any persistence operations; its state + for almost all purposes will remain either "transient" or + "detached", except for the case of relationship loading. + + Also note that backrefs will often not work as expected. + Altering a relationship-bound attribute on the target object + may not fire off a backref event, if the effective value + is what was already loaded from a foreign-key-holding value. + + The :meth:`.Session.enable_relationship_loading` method is + similar to the ``load_on_pending`` flag on :func:`.relationship`. + Unlike that flag, :meth:`.Session.enable_relationship_loading` allows + an object to remain transient while still being able to load + related items. + + To make a transient object associated with a :class:`.Session` + via :meth:`.Session.enable_relationship_loading` pending, add + it to the :class:`.Session` using :meth:`.Session.add` normally. + + :meth:`.Session.enable_relationship_loading` does not improve + behavior when the ORM is used normally - object references should be + constructed at the object level, not at the foreign key level, so + that they are present in an ordinary way before flush() + proceeds. This method is not intended for general use. + + .. versionadded:: 0.8 + + .. seealso:: + + ``load_on_pending`` at :func:`.relationship` - this flag + allows per-relationship loading of many-to-ones on items that + are pending. + + """ + state = attributes.instance_state(obj) + self._attach(state, include_before=True) + state._load_pending = True + + def _before_attach(self, state, check_identity_map=True): + if state.session_id != self.hash_key and \ + self.dispatch.before_attach: + self.dispatch.before_attach(self, state.obj()) + + if check_identity_map and state.key and \ + state.key in self.identity_map and \ + not self.identity_map.contains_state(state): + raise sa_exc.InvalidRequestError( + "Can't attach instance " + "%s; another instance with key %s is already " + "present in this session." % (state_str(state), state.key)) + + if state.session_id and \ + state.session_id is not self.hash_key and \ + state.session_id in _sessions: + raise sa_exc.InvalidRequestError( + "Object '%s' is already attached to session '%s' " + "(this is '%s')" % (state_str(state), + state.session_id, self.hash_key)) + + def _attach(self, state, include_before=False): + + if state.session_id != self.hash_key: + if include_before: + self._before_attach(state) + state.session_id = self.hash_key + if state.modified and state._strong_obj is None: + state._strong_obj = state.obj() + if self.dispatch.after_attach: + self.dispatch.after_attach(self, state.obj()) + + def __contains__(self, instance): + """Return True if the instance is associated with this session. + + The instance may be pending or persistent within the Session for a + result of True. + + """ + try: + state = attributes.instance_state(instance) + except exc.NO_STATE: + raise exc.UnmappedInstanceError(instance) + return self._contains_state(state) + + def __iter__(self): + """Iterate over all pending or persistent instances within this + Session. + + """ + return iter( + list(self._new.values()) + list(self.identity_map.values())) + + def _contains_state(self, state): + return state in self._new or self.identity_map.contains_state(state) + + def flush(self, objects=None): + """Flush all the object changes to the database. + + Writes out all pending object creations, deletions and modifications + to the database as INSERTs, DELETEs, UPDATEs, etc. Operations are + automatically ordered by the Session's unit of work dependency + solver. + + Database operations will be issued in the current transactional + context and do not affect the state of the transaction, unless an + error occurs, in which case the entire transaction is rolled back. + You may flush() as often as you like within a transaction to move + changes from Python to the database's transaction buffer. + + For ``autocommit`` Sessions with no active manual transaction, flush() + will create a transaction on the fly that surrounds the entire set of + operations into the flush. + + :param objects: Optional; restricts the flush operation to operate + only on elements that are in the given collection. + + This feature is for an extremely narrow set of use cases where + particular objects may need to be operated upon before the + full flush() occurs. It is not intended for general use. + + """ + + if self._flushing: + raise sa_exc.InvalidRequestError("Session is already flushing") + + if self._is_clean(): + return + try: + self._flushing = True + self._flush(objects) + finally: + self._flushing = False + + def _flush_warning(self, method): + util.warn( + "Usage of the '%s' operation is not currently supported " + "within the execution stage of the flush process. " + "Results may not be consistent. Consider using alternative " + "event listeners or connection-level operations instead." + % method) + + def _is_clean(self): + return not self.identity_map.check_modified() and \ + not self._deleted and \ + not self._new + + def _flush(self, objects=None): + + dirty = self._dirty_states + if not dirty and not self._deleted and not self._new: + self.identity_map._modified.clear() + return + + flush_context = UOWTransaction(self) + + if self.dispatch.before_flush: + self.dispatch.before_flush(self, flush_context, objects) + # re-establish "dirty states" in case the listeners + # added + dirty = self._dirty_states + + deleted = set(self._deleted) + new = set(self._new) + + dirty = set(dirty).difference(deleted) + + # create the set of all objects we want to operate upon + if objects: + # specific list passed in + objset = set() + for o in objects: + try: + state = attributes.instance_state(o) + except exc.NO_STATE: + raise exc.UnmappedInstanceError(o) + objset.add(state) + else: + objset = None + + # store objects whose fate has been decided + processed = set() + + # put all saves/updates into the flush context. detect top-level + # orphans and throw them into deleted. + if objset: + proc = new.union(dirty).intersection(objset).difference(deleted) + else: + proc = new.union(dirty).difference(deleted) + + for state in proc: + is_orphan = ( + _state_mapper(state)._is_orphan(state) and state.has_identity) + flush_context.register_object(state, isdelete=is_orphan) + processed.add(state) + + # put all remaining deletes into the flush context. + if objset: + proc = deleted.intersection(objset).difference(processed) + else: + proc = deleted.difference(processed) + for state in proc: + flush_context.register_object(state, isdelete=True) + + if not flush_context.has_work: + return + + flush_context.transaction = transaction = self.begin( + subtransactions=True) + try: + self._warn_on_events = True + try: + flush_context.execute() + finally: + self._warn_on_events = False + + self.dispatch.after_flush(self, flush_context) + + flush_context.finalize_flush_changes() + + if not objects and self.identity_map._modified: + len_ = len(self.identity_map._modified) + + statelib.InstanceState._commit_all_states( + [(state, state.dict) for state in + self.identity_map._modified], + instance_dict=self.identity_map) + util.warn("Attribute history events accumulated on %d " + "previously clean instances " + "within inner-flush event handlers have been " + "reset, and will not result in database updates. " + "Consider using set_committed_value() within " + "inner-flush event handlers to avoid this warning." + % len_) + + # useful assertions: + # if not objects: + # assert not self.identity_map._modified + # else: + # assert self.identity_map._modified == \ + # self.identity_map._modified.difference(objects) + + self.dispatch.after_flush_postexec(self, flush_context) + + transaction.commit() + + except: + with util.safe_reraise(): + transaction.rollback(_capture_exception=True) + + def bulk_save_objects( + self, objects, return_defaults=False, update_changed_only=True): + """Perform a bulk save of the given list of objects. + + The bulk save feature allows mapped objects to be used as the + source of simple INSERT and UPDATE operations which can be more easily + grouped together into higher performing "executemany" + operations; the extraction of data from the objects is also performed + using a lower-latency process that ignores whether or not attributes + have actually been modified in the case of UPDATEs, and also ignores + SQL expressions. + + The objects as given are not added to the session and no additional + state is established on them, unless the ``return_defaults`` flag + is also set, in which case primary key attributes and server-side + default values will be populated. + + .. versionadded:: 1.0.0 + + .. warning:: + + The bulk save feature allows for a lower-latency INSERT/UPDATE + of rows at the expense of most other unit-of-work features. + Features such as object management, relationship handling, + and SQL clause support are **silently omitted** in favor of raw + INSERT/UPDATES of records. + + **Please read the list of caveats at** :ref:`bulk_operations` + **before using this method, and fully test and confirm the + functionality of all code developed using these systems.** + + :param objects: a list of mapped object instances. The mapped + objects are persisted as is, and are **not** associated with the + :class:`.Session` afterwards. + + For each object, whether the object is sent as an INSERT or an + UPDATE is dependent on the same rules used by the :class:`.Session` + in traditional operation; if the object has the + :attr:`.InstanceState.key` + attribute set, then the object is assumed to be "detached" and + will result in an UPDATE. Otherwise, an INSERT is used. + + In the case of an UPDATE, statements are grouped based on which + attributes have changed, and are thus to be the subject of each + SET clause. If ``update_changed_only`` is False, then all + attributes present within each object are applied to the UPDATE + statement, which may help in allowing the statements to be grouped + together into a larger executemany(), and will also reduce the + overhead of checking history on attributes. + + :param return_defaults: when True, rows that are missing values which + generate defaults, namely integer primary key defaults and sequences, + will be inserted **one at a time**, so that the primary key value + is available. In particular this will allow joined-inheritance + and other multi-table mappings to insert correctly without the need + to provide primary key values ahead of time; however, + :paramref:`.Session.bulk_save_objects.return_defaults` **greatly + reduces the performance gains** of the method overall. + + :param update_changed_only: when True, UPDATE statements are rendered + based on those attributes in each state that have logged changes. + When False, all attributes present are rendered into the SET clause + with the exception of primary key attributes. + + .. seealso:: + + :ref:`bulk_operations` + + :meth:`.Session.bulk_insert_mappings` + + :meth:`.Session.bulk_update_mappings` + + """ + for (mapper, isupdate), states in itertools.groupby( + (attributes.instance_state(obj) for obj in objects), + lambda state: (state.mapper, state.key is not None) + ): + self._bulk_save_mappings( + mapper, states, isupdate, True, + return_defaults, update_changed_only) + + def bulk_insert_mappings(self, mapper, mappings, return_defaults=False): + """Perform a bulk insert of the given list of mapping dictionaries. + + The bulk insert feature allows plain Python dictionaries to be used as + the source of simple INSERT operations which can be more easily + grouped together into higher performing "executemany" + operations. Using dictionaries, there is no "history" or session + state management features in use, reducing latency when inserting + large numbers of simple rows. + + The values within the dictionaries as given are typically passed + without modification into Core :meth:`.Insert` constructs, after + organizing the values within them across the tables to which + the given mapper is mapped. + + .. versionadded:: 1.0.0 + + .. warning:: + + The bulk insert feature allows for a lower-latency INSERT + of rows at the expense of most other unit-of-work features. + Features such as object management, relationship handling, + and SQL clause support are **silently omitted** in favor of raw + INSERT of records. + + **Please read the list of caveats at** :ref:`bulk_operations` + **before using this method, and fully test and confirm the + functionality of all code developed using these systems.** + + :param mapper: a mapped class, or the actual :class:`.Mapper` object, + representing the single kind of object represented within the mapping + list. + + :param mappings: a list of dictionaries, each one containing the state + of the mapped row to be inserted, in terms of the attribute names + on the mapped class. If the mapping refers to multiple tables, + such as a joined-inheritance mapping, each dictionary must contain + all keys to be populated into all tables. + + :param return_defaults: when True, rows that are missing values which + generate defaults, namely integer primary key defaults and sequences, + will be inserted **one at a time**, so that the primary key value + is available. In particular this will allow joined-inheritance + and other multi-table mappings to insert correctly without the need + to provide primary + key values ahead of time; however, + :paramref:`.Session.bulk_insert_mappings.return_defaults` + **greatly reduces the performance gains** of the method overall. + If the rows + to be inserted only refer to a single table, then there is no + reason this flag should be set as the returned default information + is not used. + + + .. seealso:: + + :ref:`bulk_operations` + + :meth:`.Session.bulk_save_objects` + + :meth:`.Session.bulk_update_mappings` + + """ + self._bulk_save_mappings( + mapper, mappings, False, False, return_defaults, False) + + def bulk_update_mappings(self, mapper, mappings): + """Perform a bulk update of the given list of mapping dictionaries. + + The bulk update feature allows plain Python dictionaries to be used as + the source of simple UPDATE operations which can be more easily + grouped together into higher performing "executemany" + operations. Using dictionaries, there is no "history" or session + state management features in use, reducing latency when updating + large numbers of simple rows. + + .. versionadded:: 1.0.0 + + .. warning:: + + The bulk update feature allows for a lower-latency UPDATE + of rows at the expense of most other unit-of-work features. + Features such as object management, relationship handling, + and SQL clause support are **silently omitted** in favor of raw + UPDATES of records. + + **Please read the list of caveats at** :ref:`bulk_operations` + **before using this method, and fully test and confirm the + functionality of all code developed using these systems.** + + :param mapper: a mapped class, or the actual :class:`.Mapper` object, + representing the single kind of object represented within the mapping + list. + + :param mappings: a list of dictionaries, each one containing the state + of the mapped row to be updated, in terms of the attribute names + on the mapped class. If the mapping refers to multiple tables, + such as a joined-inheritance mapping, each dictionary may contain + keys corresponding to all tables. All those keys which are present + and are not part of the primary key are applied to the SET clause + of the UPDATE statement; the primary key values, which are required, + are applied to the WHERE clause. + + + .. seealso:: + + :ref:`bulk_operations` + + :meth:`.Session.bulk_insert_mappings` + + :meth:`.Session.bulk_save_objects` + + """ + self._bulk_save_mappings(mapper, mappings, True, False, False, False) + + def _bulk_save_mappings( + self, mapper, mappings, isupdate, isstates, + return_defaults, update_changed_only): + mapper = _class_to_mapper(mapper) + self._flushing = True + + transaction = self.begin( + subtransactions=True) + try: + if isupdate: + persistence._bulk_update( + mapper, mappings, transaction, + isstates, update_changed_only) + else: + persistence._bulk_insert( + mapper, mappings, transaction, isstates, return_defaults) + transaction.commit() + + except: + with util.safe_reraise(): + transaction.rollback(_capture_exception=True) + finally: + self._flushing = False + + def is_modified(self, instance, include_collections=True, + passive=True): + """Return ``True`` if the given instance has locally + modified attributes. + + This method retrieves the history for each instrumented + attribute on the instance and performs a comparison of the current + value to its previously committed value, if any. + + It is in effect a more expensive and accurate + version of checking for the given instance in the + :attr:`.Session.dirty` collection; a full test for + each attribute's net "dirty" status is performed. + + E.g.:: + + return session.is_modified(someobject) + + .. versionchanged:: 0.8 + When using SQLAlchemy 0.7 and earlier, the ``passive`` + flag should **always** be explicitly set to ``True``, + else SQL loads/autoflushes may proceed which can affect + the modified state itself: + ``session.is_modified(someobject, passive=True)``\ . + In 0.8 and above, the behavior is corrected and + this flag is ignored. + + A few caveats to this method apply: + + * Instances present in the :attr:`.Session.dirty` collection may + report ``False`` when tested with this method. This is because + the object may have received change events via attribute mutation, + thus placing it in :attr:`.Session.dirty`, but ultimately the state + is the same as that loaded from the database, resulting in no net + change here. + * Scalar attributes may not have recorded the previously set + value when a new value was applied, if the attribute was not loaded, + or was expired, at the time the new value was received - in these + cases, the attribute is assumed to have a change, even if there is + ultimately no net change against its database value. SQLAlchemy in + most cases does not need the "old" value when a set event occurs, so + it skips the expense of a SQL call if the old value isn't present, + based on the assumption that an UPDATE of the scalar value is + usually needed, and in those few cases where it isn't, is less + expensive on average than issuing a defensive SELECT. + + The "old" value is fetched unconditionally upon set only if the + attribute container has the ``active_history`` flag set to ``True``. + This flag is set typically for primary key attributes and scalar + object references that are not a simple many-to-one. To set this + flag for any arbitrary mapped column, use the ``active_history`` + argument with :func:`.column_property`. + + :param instance: mapped instance to be tested for pending changes. + :param include_collections: Indicates if multivalued collections + should be included in the operation. Setting this to ``False`` is a + way to detect only local-column based properties (i.e. scalar columns + or many-to-one foreign keys) that would result in an UPDATE for this + instance upon flush. + :param passive: + .. versionchanged:: 0.8 + Ignored for backwards compatibility. + When using SQLAlchemy 0.7 and earlier, this flag should always + be set to ``True``. + + """ + state = object_state(instance) + + if not state.modified: + return False + + dict_ = state.dict + + for attr in state.manager.attributes: + if \ + ( + not include_collections and + hasattr(attr.impl, 'get_collection') + ) or not hasattr(attr.impl, 'get_history'): + continue + + (added, unchanged, deleted) = \ + attr.impl.get_history(state, dict_, + passive=attributes.NO_CHANGE) + + if added or deleted: + return True + else: + return False + + @property + def is_active(self): + """True if this :class:`.Session` is in "transaction mode" and + is not in "partial rollback" state. + + The :class:`.Session` in its default mode of ``autocommit=False`` + is essentially always in "transaction mode", in that a + :class:`.SessionTransaction` is associated with it as soon as + it is instantiated. This :class:`.SessionTransaction` is immediately + replaced with a new one as soon as it is ended, due to a rollback, + commit, or close operation. + + "Transaction mode" does *not* indicate whether + or not actual database connection resources are in use; the + :class:`.SessionTransaction` object coordinates among zero or more + actual database transactions, and starts out with none, accumulating + individual DBAPI connections as different data sources are used + within its scope. The best way to track when a particular + :class:`.Session` has actually begun to use DBAPI resources is to + implement a listener using the :meth:`.SessionEvents.after_begin` + method, which will deliver both the :class:`.Session` as well as the + target :class:`.Connection` to a user-defined event listener. + + The "partial rollback" state refers to when an "inner" transaction, + typically used during a flush, encounters an error and emits a + rollback of the DBAPI connection. At this point, the + :class:`.Session` is in "partial rollback" and awaits for the user to + call :meth:`.Session.rollback`, in order to close out the + transaction stack. It is in this "partial rollback" period that the + :attr:`.is_active` flag returns False. After the call to + :meth:`.Session.rollback`, the :class:`.SessionTransaction` is + replaced with a new one and :attr:`.is_active` returns ``True`` again. + + When a :class:`.Session` is used in ``autocommit=True`` mode, the + :class:`.SessionTransaction` is only instantiated within the scope + of a flush call, or when :meth:`.Session.begin` is called. So + :attr:`.is_active` will always be ``False`` outside of a flush or + :meth:`.Session.begin` block in this mode, and will be ``True`` + within the :meth:`.Session.begin` block as long as it doesn't enter + "partial rollback" state. + + From all the above, it follows that the only purpose to this flag is + for application frameworks that wish to detect is a "rollback" is + necessary within a generic error handling routine, for + :class:`.Session` objects that would otherwise be in + "partial rollback" mode. In a typical integration case, this is also + not necessary as it is standard practice to emit + :meth:`.Session.rollback` unconditionally within the outermost + exception catch. + + To track the transactional state of a :class:`.Session` fully, + use event listeners, primarily the :meth:`.SessionEvents.after_begin`, + :meth:`.SessionEvents.after_commit`, + :meth:`.SessionEvents.after_rollback` and related events. + + """ + return self.transaction and self.transaction.is_active + + identity_map = None + """A mapping of object identities to objects themselves. + + Iterating through ``Session.identity_map.values()`` provides + access to the full set of persistent objects (i.e., those + that have row identity) currently in the session. + + .. seealso:: + + :func:`.identity_key` - helper function to produce the keys used + in this dictionary. + + """ + + @property + def _dirty_states(self): + """The set of all persistent states considered dirty. + + This method returns all states that were modified including + those that were possibly deleted. + + """ + return self.identity_map._dirty_states() + + @property + def dirty(self): + """The set of all persistent instances considered dirty. + + E.g.:: + + some_mapped_object in session.dirty + + Instances are considered dirty when they were modified but not + deleted. + + Note that this 'dirty' calculation is 'optimistic'; most + attribute-setting or collection modification operations will + mark an instance as 'dirty' and place it in this set, even if + there is no net change to the attribute's value. At flush + time, the value of each attribute is compared to its + previously saved value, and if there's no net change, no SQL + operation will occur (this is a more expensive operation so + it's only done at flush time). + + To check if an instance has actionable net changes to its + attributes, use the :meth:`.Session.is_modified` method. + + """ + return util.IdentitySet( + [state.obj() + for state in self._dirty_states + if state not in self._deleted]) + + @property + def deleted(self): + "The set of all instances marked as 'deleted' within this ``Session``" + + return util.IdentitySet(list(self._deleted.values())) + + @property + def new(self): + "The set of all instances marked as 'new' within this ``Session``." + + return util.IdentitySet(list(self._new.values())) + + +class sessionmaker(_SessionClassMethods): + """A configurable :class:`.Session` factory. + + The :class:`.sessionmaker` factory generates new + :class:`.Session` objects when called, creating them given + the configurational arguments established here. + + e.g.:: + + # global scope + Session = sessionmaker(autoflush=False) + + # later, in a local scope, create and use a session: + sess = Session() + + Any keyword arguments sent to the constructor itself will override the + "configured" keywords:: + + Session = sessionmaker() + + # bind an individual session to a connection + sess = Session(bind=connection) + + The class also includes a method :meth:`.configure`, which can + be used to specify additional keyword arguments to the factory, which + will take effect for subsequent :class:`.Session` objects generated. + This is usually used to associate one or more :class:`.Engine` objects + with an existing :class:`.sessionmaker` factory before it is first + used:: + + # application starts + Session = sessionmaker() + + # ... later + engine = create_engine('sqlite:///foo.db') + Session.configure(bind=engine) + + sess = Session() + + .. seealso: + + :ref:`session_getting` - introductory text on creating + sessions using :class:`.sessionmaker`. + + """ + + def __init__(self, bind=None, class_=Session, autoflush=True, + autocommit=False, + expire_on_commit=True, + info=None, **kw): + """Construct a new :class:`.sessionmaker`. + + All arguments here except for ``class_`` correspond to arguments + accepted by :class:`.Session` directly. See the + :meth:`.Session.__init__` docstring for more details on parameters. + + :param bind: a :class:`.Engine` or other :class:`.Connectable` with + which newly created :class:`.Session` objects will be associated. + :param class_: class to use in order to create new :class:`.Session` + objects. Defaults to :class:`.Session`. + :param autoflush: The autoflush setting to use with newly created + :class:`.Session` objects. + :param autocommit: The autocommit setting to use with newly created + :class:`.Session` objects. + :param expire_on_commit=True: the expire_on_commit setting to use + with newly created :class:`.Session` objects. + :param info: optional dictionary of information that will be available + via :attr:`.Session.info`. Note this dictionary is *updated*, not + replaced, when the ``info`` parameter is specified to the specific + :class:`.Session` construction operation. + + .. versionadded:: 0.9.0 + + :param \**kw: all other keyword arguments are passed to the + constructor of newly created :class:`.Session` objects. + + """ + kw['bind'] = bind + kw['autoflush'] = autoflush + kw['autocommit'] = autocommit + kw['expire_on_commit'] = expire_on_commit + if info is not None: + kw['info'] = info + self.kw = kw + # make our own subclass of the given class, so that + # events can be associated with it specifically. + self.class_ = type(class_.__name__, (class_,), {}) + + def __call__(self, **local_kw): + """Produce a new :class:`.Session` object using the configuration + established in this :class:`.sessionmaker`. + + In Python, the ``__call__`` method is invoked on an object when + it is "called" in the same way as a function:: + + Session = sessionmaker() + session = Session() # invokes sessionmaker.__call__() + + """ + for k, v in self.kw.items(): + if k == 'info' and 'info' in local_kw: + d = v.copy() + d.update(local_kw['info']) + local_kw['info'] = d + else: + local_kw.setdefault(k, v) + return self.class_(**local_kw) + + def configure(self, **new_kw): + """(Re)configure the arguments for this sessionmaker. + + e.g.:: + + Session = sessionmaker() + + Session.configure(bind=create_engine('sqlite://')) + """ + self.kw.update(new_kw) + + def __repr__(self): + return "%s(class_=%r,%s)" % ( + self.__class__.__name__, + self.class_.__name__, + ", ".join("%s=%r" % (k, v) for k, v in self.kw.items()) + ) + + +def make_transient(instance): + """Alter the state of the given instance so that it is :term:`transient`. + + .. note:: + + :func:`.make_transient` is a special-case function for + advanced use cases only. + + The given mapped instance is assumed to be in the :term:`persistent` or + :term:`detached` state. The function will remove its association with any + :class:`.Session` as well as its :attr:`.InstanceState.identity`. The + effect is that the object will behave as though it were newly constructed, + except retaining any attribute / collection values that were loaded at the + time of the call. The :attr:`.InstanceState.deleted` flag is also reset + if this object had been deleted as a result of using + :meth:`.Session.delete`. + + .. warning:: + + :func:`.make_transient` does **not** "unexpire" or otherwise eagerly + load ORM-mapped attributes that are not currently loaded at the time + the function is called. This includes attributes which: + + * were expired via :meth:`.Session.expire` + + * were expired as the natural effect of committing a session + transaction, e.g. :meth:`.Session.commit` + + * are normally :term:`lazy loaded` but are not currently loaded + + * are "deferred" via :ref:`deferred` and are not yet loaded + + * were not present in the query which loaded this object, such as that + which is common in joined table inheritance and other scenarios. + + After :func:`.make_transient` is called, unloaded attributes such + as those above will normally resolve to the value ``None`` when + accessed, or an empty collection for a collection-oriented attribute. + As the object is transient and un-associated with any database + identity, it will no longer retrieve these values. + + .. seealso:: + + :func:`.make_transient_to_detached` + + """ + state = attributes.instance_state(instance) + s = _state_session(state) + if s: + s._expunge_state(state) + + # remove expired state + state.expired_attributes.clear() + + # remove deferred callables + if state.callables: + del state.callables + + if state.key: + del state.key + if state.deleted: + del state.deleted + + +def make_transient_to_detached(instance): + """Make the given transient instance :term:`detached`. + + .. note:: + + :func:`.make_transient_to_detached` is a special-case function for + advanced use cases only. + + All attribute history on the given instance + will be reset as though the instance were freshly loaded + from a query. Missing attributes will be marked as expired. + The primary key attributes of the object, which are required, will be made + into the "key" of the instance. + + The object can then be added to a session, or merged + possibly with the load=False flag, at which point it will look + as if it were loaded that way, without emitting SQL. + + This is a special use case function that differs from a normal + call to :meth:`.Session.merge` in that a given persistent state + can be manufactured without any SQL calls. + + .. versionadded:: 0.9.5 + + .. seealso:: + + :func:`.make_transient` + + """ + state = attributes.instance_state(instance) + if state.session_id or state.key: + raise sa_exc.InvalidRequestError( + "Given object must be transient") + state.key = state.mapper._identity_key_from_state(state) + if state.deleted: + del state.deleted + state._commit_all(state.dict) + state._expire_attributes(state.dict, state.unloaded) + + +def object_session(instance): + """Return the :class:`.Session` to which the given instance belongs. + + This is essentially the same as the :attr:`.InstanceState.session` + accessor. See that attribute for details. + + """ + + try: + state = attributes.instance_state(instance) + except exc.NO_STATE: + raise exc.UnmappedInstanceError(instance) + else: + return _state_session(state) + + +_new_sessionid = util.counter() diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/state.py b/lib/python3.4/site-packages/sqlalchemy/orm/state.py new file mode 100644 index 0000000..c66507d --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/orm/state.py @@ -0,0 +1,729 @@ +# orm/state.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Defines instrumentation of instances. + +This module is usually not directly visible to user applications, but +defines a large part of the ORM's interactivity. + +""" + +import weakref +from .. import util +from . import exc as orm_exc, interfaces +from .path_registry import PathRegistry +from .base import PASSIVE_NO_RESULT, SQL_OK, NEVER_SET, ATTR_WAS_SET, \ + NO_VALUE, PASSIVE_NO_INITIALIZE, INIT_OK, PASSIVE_OFF +from . import base + + +class InstanceState(interfaces.InspectionAttr): + """tracks state information at the instance level. + + The :class:`.InstanceState` is a key object used by the + SQLAlchemy ORM in order to track the state of an object; + it is created the moment an object is instantiated, typically + as a result of :term:`instrumentation` which SQLAlchemy applies + to the ``__init__()`` method of the class. + + :class:`.InstanceState` is also a semi-public object, + available for runtime inspection as to the state of a + mapped instance, including information such as its current + status within a particular :class:`.Session` and details + about data on individual attributes. The public API + in order to acquire a :class:`.InstanceState` object + is to use the :func:`.inspect` system:: + + >>> from sqlalchemy import inspect + >>> insp = inspect(some_mapped_object) + + .. seealso:: + + :ref:`core_inspection_toplevel` + + """ + + session_id = None + key = None + runid = None + load_options = util.EMPTY_SET + load_path = () + insert_order = None + _strong_obj = None + modified = False + expired = False + deleted = False + _load_pending = False + is_instance = True + + callables = () + """A namespace where a per-state loader callable can be associated. + + In SQLAlchemy 1.0, this is only used for lazy loaders / deferred + loaders that were set up via query option. + + Previously, callables was used also to indicate expired attributes + by storing a link to the InstanceState itself in this dictionary. + This role is now handled by the expired_attributes set. + + """ + + def __init__(self, obj, manager): + self.class_ = obj.__class__ + self.manager = manager + self.obj = weakref.ref(obj, self._cleanup) + self.committed_state = {} + self.expired_attributes = set() + + expired_attributes = None + """The set of keys which are 'expired' to be loaded by + the manager's deferred scalar loader, assuming no pending + changes. + + see also the ``unmodified`` collection which is intersected + against this set when a refresh operation occurs.""" + + + @util.memoized_property + def attrs(self): + """Return a namespace representing each attribute on + the mapped object, including its current value + and history. + + The returned object is an instance of :class:`.AttributeState`. + This object allows inspection of the current data + within an attribute as well as attribute history + since the last flush. + + """ + return util.ImmutableProperties( + dict( + (key, AttributeState(self, key)) + for key in self.manager + ) + ) + + @property + def transient(self): + """Return true if the object is :term:`transient`. + + .. seealso:: + + :ref:`session_object_states` + + """ + return self.key is None and \ + not self._attached + + @property + def pending(self): + """Return true if the object is :term:`pending`. + + + .. seealso:: + + :ref:`session_object_states` + + """ + return self.key is None and \ + self._attached + + @property + def persistent(self): + """Return true if the object is :term:`persistent`. + + .. seealso:: + + :ref:`session_object_states` + + """ + return self.key is not None and \ + self._attached + + @property + def detached(self): + """Return true if the object is :term:`detached`. + + .. seealso:: + + :ref:`session_object_states` + + """ + return self.key is not None and \ + not self._attached + + @property + @util.dependencies("sqlalchemy.orm.session") + def _attached(self, sessionlib): + return self.session_id is not None and \ + self.session_id in sessionlib._sessions + + @property + @util.dependencies("sqlalchemy.orm.session") + def session(self, sessionlib): + """Return the owning :class:`.Session` for this instance, + or ``None`` if none available. + + Note that the result here can in some cases be *different* + from that of ``obj in session``; an object that's been deleted + will report as not ``in session``, however if the transaction is + still in progress, this attribute will still refer to that session. + Only when the transaction is completed does the object become + fully detached under normal circumstances. + + """ + return sessionlib._state_session(self) + + @property + def object(self): + """Return the mapped object represented by this + :class:`.InstanceState`.""" + return self.obj() + + @property + def identity(self): + """Return the mapped identity of the mapped object. + This is the primary key identity as persisted by the ORM + which can always be passed directly to + :meth:`.Query.get`. + + Returns ``None`` if the object has no primary key identity. + + .. note:: + An object which is :term:`transient` or :term:`pending` + does **not** have a mapped identity until it is flushed, + even if its attributes include primary key values. + + """ + if self.key is None: + return None + else: + return self.key[1] + + @property + def identity_key(self): + """Return the identity key for the mapped object. + + This is the key used to locate the object within + the :attr:`.Session.identity_map` mapping. It contains + the identity as returned by :attr:`.identity` within it. + + + """ + # TODO: just change .key to .identity_key across + # the board ? probably + return self.key + + @util.memoized_property + def parents(self): + return {} + + @util.memoized_property + def _pending_mutations(self): + return {} + + @util.memoized_property + def mapper(self): + """Return the :class:`.Mapper` used for this mapepd object.""" + return self.manager.mapper + + @property + def has_identity(self): + """Return ``True`` if this object has an identity key. + + This should always have the same value as the + expression ``state.persistent or state.detached``. + + """ + return bool(self.key) + + def _detach(self): + self.session_id = self._strong_obj = None + + def _dispose(self): + self._detach() + del self.obj + + def _cleanup(self, ref): + """Weakref callback cleanup. + + This callable cleans out the state when it is being garbage + collected. + + this _cleanup **assumes** that there are no strong refs to us! + Will not work otherwise! + + """ + instance_dict = self._instance_dict() + if instance_dict is not None: + instance_dict._fast_discard(self) + del self._instance_dict + + # we can't possibly be in instance_dict._modified + # b.c. this is weakref cleanup only, that set + # is strong referencing! + # assert self not in instance_dict._modified + + self.session_id = self._strong_obj = None + del self.obj + + def obj(self): + return None + + @property + def dict(self): + """Return the instance dict used by the object. + + Under normal circumstances, this is always synonymous + with the ``__dict__`` attribute of the mapped object, + unless an alternative instrumentation system has been + configured. + + In the case that the actual object has been garbage + collected, this accessor returns a blank dictionary. + + """ + o = self.obj() + if o is not None: + return base.instance_dict(o) + else: + return {} + + def _initialize_instance(*mixed, **kwargs): + self, instance, args = mixed[0], mixed[1], mixed[2:] # noqa + manager = self.manager + + manager.dispatch.init(self, args, kwargs) + + try: + return manager.original_init(*mixed[1:], **kwargs) + except: + with util.safe_reraise(): + manager.dispatch.init_failure(self, args, kwargs) + + def get_history(self, key, passive): + return self.manager[key].impl.get_history(self, self.dict, passive) + + def get_impl(self, key): + return self.manager[key].impl + + def _get_pending_mutation(self, key): + if key not in self._pending_mutations: + self._pending_mutations[key] = PendingCollection() + return self._pending_mutations[key] + + def __getstate__(self): + state_dict = {'instance': self.obj()} + state_dict.update( + (k, self.__dict__[k]) for k in ( + 'committed_state', '_pending_mutations', 'modified', + 'expired', 'callables', 'key', 'parents', 'load_options', + 'class_', 'expired_attributes' + ) if k in self.__dict__ + ) + if self.load_path: + state_dict['load_path'] = self.load_path.serialize() + + state_dict['manager'] = self.manager._serialize(self, state_dict) + + return state_dict + + def __setstate__(self, state_dict): + inst = state_dict['instance'] + if inst is not None: + self.obj = weakref.ref(inst, self._cleanup) + self.class_ = inst.__class__ + else: + # None being possible here generally new as of 0.7.4 + # due to storage of state in "parents". "class_" + # also new. + self.obj = None + self.class_ = state_dict['class_'] + + self.committed_state = state_dict.get('committed_state', {}) + self._pending_mutations = state_dict.get('_pending_mutations', {}) + self.parents = state_dict.get('parents', {}) + self.modified = state_dict.get('modified', False) + self.expired = state_dict.get('expired', False) + if 'callables' in state_dict: + self.callables = state_dict['callables'] + + try: + self.expired_attributes = state_dict['expired_attributes'] + except KeyError: + self.expired_attributes = set() + # 0.9 and earlier compat + for k in list(self.callables): + if self.callables[k] is self: + self.expired_attributes.add(k) + del self.callables[k] + + self.__dict__.update([ + (k, state_dict[k]) for k in ( + 'key', 'load_options', + ) if k in state_dict + ]) + + if 'load_path' in state_dict: + self.load_path = PathRegistry.\ + deserialize(state_dict['load_path']) + + state_dict['manager'](self, inst, state_dict) + + def _reset(self, dict_, key): + """Remove the given attribute and any + callables associated with it.""" + + old = dict_.pop(key, None) + if old is not None and self.manager[key].impl.collection: + self.manager[key].impl._invalidate_collection(old) + self.expired_attributes.discard(key) + if self.callables: + self.callables.pop(key, None) + + @classmethod + def _instance_level_callable_processor(cls, manager, fn, key): + impl = manager[key].impl + if impl.collection: + def _set_callable(state, dict_, row): + if 'callables' not in state.__dict__: + state.callables = {} + old = dict_.pop(key, None) + if old is not None: + impl._invalidate_collection(old) + state.callables[key] = fn + else: + def _set_callable(state, dict_, row): + if 'callables' not in state.__dict__: + state.callables = {} + state.callables[key] = fn + return _set_callable + + def _expire(self, dict_, modified_set): + self.expired = True + + if self.modified: + modified_set.discard(self) + self.committed_state.clear() + self.modified = False + + self._strong_obj = None + + if '_pending_mutations' in self.__dict__: + del self.__dict__['_pending_mutations'] + + if 'parents' in self.__dict__: + del self.__dict__['parents'] + + self.expired_attributes.update( + [impl.key for impl in self.manager._scalar_loader_impls + if impl.expire_missing or impl.key in dict_] + ) + + if self.callables: + for k in self.expired_attributes.intersection(self.callables): + del self.callables[k] + + for k in self.manager._collection_impl_keys.intersection(dict_): + collection = dict_.pop(k) + collection._sa_adapter.invalidated = True + + for key in self.manager._all_key_set.intersection(dict_): + del dict_[key] + + self.manager.dispatch.expire(self, None) + + def _expire_attributes(self, dict_, attribute_names): + pending = self.__dict__.get('_pending_mutations', None) + + callables = self.callables + + for key in attribute_names: + impl = self.manager[key].impl + if impl.accepts_scalar_loader: + self.expired_attributes.add(key) + if callables and key in callables: + del callables[key] + old = dict_.pop(key, None) + if impl.collection and old is not None: + impl._invalidate_collection(old) + + self.committed_state.pop(key, None) + if pending: + pending.pop(key, None) + + self.manager.dispatch.expire(self, attribute_names) + + def _load_expired(self, state, passive): + """__call__ allows the InstanceState to act as a deferred + callable for loading expired attributes, which is also + serializable (picklable). + + """ + + if not passive & SQL_OK: + return PASSIVE_NO_RESULT + + toload = self.expired_attributes.\ + intersection(self.unmodified) + + self.manager.deferred_scalar_loader(self, toload) + + # if the loader failed, or this + # instance state didn't have an identity, + # the attributes still might be in the callables + # dict. ensure they are removed. + self.expired_attributes.clear() + + return ATTR_WAS_SET + + @property + def unmodified(self): + """Return the set of keys which have no uncommitted changes""" + + return set(self.manager).difference(self.committed_state) + + def unmodified_intersection(self, keys): + """Return self.unmodified.intersection(keys).""" + + return set(keys).intersection(self.manager).\ + difference(self.committed_state) + + @property + def unloaded(self): + """Return the set of keys which do not have a loaded value. + + This includes expired attributes and any other attribute that + was never populated or modified. + + """ + return set(self.manager).\ + difference(self.committed_state).\ + difference(self.dict) + + @property + def _unloaded_non_object(self): + return self.unloaded.intersection( + attr for attr in self.manager + if self.manager[attr].impl.accepts_scalar_loader + ) + + def _instance_dict(self): + return None + + def _modified_event( + self, dict_, attr, previous, collection=False, force=False): + if not attr.send_modified_events: + return + if attr.key not in self.committed_state or force: + if collection: + if previous is NEVER_SET: + if attr.key in dict_: + previous = dict_[attr.key] + + if previous not in (None, NO_VALUE, NEVER_SET): + previous = attr.copy(previous) + + self.committed_state[attr.key] = previous + + # assert self._strong_obj is None or self.modified + + if (self.session_id and self._strong_obj is None) \ + or not self.modified: + self.modified = True + instance_dict = self._instance_dict() + if instance_dict: + instance_dict._modified.add(self) + + # only create _strong_obj link if attached + # to a session + + inst = self.obj() + if self.session_id: + self._strong_obj = inst + + if inst is None: + raise orm_exc.ObjectDereferencedError( + "Can't emit change event for attribute '%s' - " + "parent object of type %s has been garbage " + "collected." + % ( + self.manager[attr.key], + base.state_class_str(self) + )) + + def _commit(self, dict_, keys): + """Commit attributes. + + This is used by a partial-attribute load operation to mark committed + those attributes which were refreshed from the database. + + Attributes marked as "expired" can potentially remain "expired" after + this step if a value was not populated in state.dict. + + """ + for key in keys: + self.committed_state.pop(key, None) + + self.expired = False + + self.expired_attributes.difference_update( + set(keys).intersection(dict_)) + + # the per-keys commit removes object-level callables, + # while that of commit_all does not. it's not clear + # if this behavior has a clear rationale, however tests do + # ensure this is what it does. + if self.callables: + for key in set(self.callables).\ + intersection(keys).\ + intersection(dict_): + del self.callables[key] + + def _commit_all(self, dict_, instance_dict=None): + """commit all attributes unconditionally. + + This is used after a flush() or a full load/refresh + to remove all pending state from the instance. + + - all attributes are marked as "committed" + - the "strong dirty reference" is removed + - the "modified" flag is set to False + - any "expired" markers for scalar attributes loaded are removed. + - lazy load callables for objects / collections *stay* + + Attributes marked as "expired" can potentially remain + "expired" after this step if a value was not populated in state.dict. + + """ + self._commit_all_states([(self, dict_)], instance_dict) + + @classmethod + def _commit_all_states(self, iter, instance_dict=None): + """Mass / highly inlined version of commit_all().""" + + for state, dict_ in iter: + state_dict = state.__dict__ + + state.committed_state.clear() + + if '_pending_mutations' in state_dict: + del state_dict['_pending_mutations'] + + state.expired_attributes.difference_update(dict_) + + if instance_dict and state.modified: + instance_dict._modified.discard(state) + + state.modified = state.expired = False + state._strong_obj = None + + +class AttributeState(object): + """Provide an inspection interface corresponding + to a particular attribute on a particular mapped object. + + The :class:`.AttributeState` object is accessed + via the :attr:`.InstanceState.attrs` collection + of a particular :class:`.InstanceState`:: + + from sqlalchemy import inspect + + insp = inspect(some_mapped_object) + attr_state = insp.attrs.some_attribute + + """ + + def __init__(self, state, key): + self.state = state + self.key = key + + @property + def loaded_value(self): + """The current value of this attribute as loaded from the database. + + If the value has not been loaded, or is otherwise not present + in the object's dictionary, returns NO_VALUE. + + """ + return self.state.dict.get(self.key, NO_VALUE) + + @property + def value(self): + """Return the value of this attribute. + + This operation is equivalent to accessing the object's + attribute directly or via ``getattr()``, and will fire + off any pending loader callables if needed. + + """ + return self.state.manager[self.key].__get__( + self.state.obj(), self.state.class_) + + @property + def history(self): + """Return the current pre-flush change history for + this attribute, via the :class:`.History` interface. + + This method will **not** emit loader callables if the value of the + attribute is unloaded. + + .. seealso:: + + :meth:`.AttributeState.load_history` - retrieve history + using loader callables if the value is not locally present. + + :func:`.attributes.get_history` - underlying function + + """ + return self.state.get_history(self.key, + PASSIVE_NO_INITIALIZE) + + def load_history(self): + """Return the current pre-flush change history for + this attribute, via the :class:`.History` interface. + + This method **will** emit loader callables if the value of the + attribute is unloaded. + + .. seealso:: + + :attr:`.AttributeState.history` + + :func:`.attributes.get_history` - underlying function + + .. versionadded:: 0.9.0 + + """ + return self.state.get_history(self.key, + PASSIVE_OFF ^ INIT_OK) + + +class PendingCollection(object): + """A writable placeholder for an unloaded collection. + + Stores items appended to and removed from a collection that has not yet + been loaded. When the collection is loaded, the changes stored in + PendingCollection are applied to it to produce the final result. + + """ + + def __init__(self): + self.deleted_items = util.IdentitySet() + self.added_items = util.OrderedIdentitySet() + + def append(self, value): + if value in self.deleted_items: + self.deleted_items.remove(value) + else: + self.added_items.add(value) + + def remove(self, value): + if value in self.added_items: + self.added_items.remove(value) + else: + self.deleted_items.add(value) diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/strategies.py b/lib/python3.4/site-packages/sqlalchemy/orm/strategies.py new file mode 100644 index 0000000..7942b14 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/orm/strategies.py @@ -0,0 +1,1618 @@ +# orm/strategies.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""sqlalchemy.orm.interfaces.LoaderStrategy + implementations, and related MapperOptions.""" + +from .. import exc as sa_exc, inspect +from .. import util, log, event +from ..sql import util as sql_util, visitors +from .. import sql +from . import ( + attributes, interfaces, exc as orm_exc, loading, + unitofwork, util as orm_util +) +from .state import InstanceState +from .util import _none_set +from . import properties +from .interfaces import ( + LoaderStrategy, StrategizedProperty +) +from .base import _SET_DEFERRED_EXPIRED, _DEFER_FOR_STATE +from .session import _state_session +import itertools + + +def _register_attribute( + strategy, mapper, useobject, + compare_function=None, + typecallable=None, + uselist=False, + callable_=None, + proxy_property=None, + active_history=False, + impl_class=None, + **kw +): + + prop = strategy.parent_property + + attribute_ext = list(util.to_list(prop.extension, default=[])) + + listen_hooks = [] + + if useobject and prop.single_parent: + listen_hooks.append(single_parent_validator) + + if prop.key in prop.parent.validators: + fn, opts = prop.parent.validators[prop.key] + listen_hooks.append( + lambda desc, prop: orm_util._validator_events( + desc, + prop.key, fn, **opts) + ) + + if useobject: + listen_hooks.append(unitofwork.track_cascade_events) + + # need to assemble backref listeners + # after the singleparentvalidator, mapper validator + backref = kw.pop('backref', None) + if backref: + listen_hooks.append( + lambda desc, prop: attributes.backref_listeners( + desc, + backref, + uselist + ) + ) + + for m in mapper.self_and_descendants: + if prop is m._props.get(prop.key): + + desc = attributes.register_attribute_impl( + m.class_, + prop.key, + parent_token=prop, + uselist=uselist, + compare_function=compare_function, + useobject=useobject, + extension=attribute_ext, + trackparent=useobject and ( + prop.single_parent + or prop.direction is interfaces.ONETOMANY), + typecallable=typecallable, + callable_=callable_, + active_history=active_history, + impl_class=impl_class, + send_modified_events=not useobject or not prop.viewonly, + doc=prop.doc, + **kw + ) + + for hook in listen_hooks: + hook(desc, prop) + + +@properties.ColumnProperty.strategy_for(instrument=False, deferred=False) +class UninstrumentedColumnLoader(LoaderStrategy): + """Represent the a non-instrumented MapperProperty. + + The polymorphic_on argument of mapper() often results in this, + if the argument is against the with_polymorphic selectable. + + """ + __slots__ = 'columns', + + def __init__(self, parent): + super(UninstrumentedColumnLoader, self).__init__(parent) + self.columns = self.parent_property.columns + + def setup_query( + self, context, entity, path, loadopt, adapter, + column_collection=None, **kwargs): + for c in self.columns: + if adapter: + c = adapter.columns[c] + column_collection.append(c) + + def create_row_processor( + self, context, path, loadopt, + mapper, result, adapter, populators): + pass + + +@log.class_logger +@properties.ColumnProperty.strategy_for(instrument=True, deferred=False) +class ColumnLoader(LoaderStrategy): + """Provide loading behavior for a :class:`.ColumnProperty`.""" + + __slots__ = 'columns', 'is_composite' + + def __init__(self, parent): + super(ColumnLoader, self).__init__(parent) + self.columns = self.parent_property.columns + self.is_composite = hasattr(self.parent_property, 'composite_class') + + def setup_query( + self, context, entity, path, loadopt, + adapter, column_collection, memoized_populators, **kwargs): + + for c in self.columns: + if adapter: + c = adapter.columns[c] + column_collection.append(c) + + fetch = self.columns[0] + if adapter: + fetch = adapter.columns[fetch] + memoized_populators[self.parent_property] = fetch + + def init_class_attribute(self, mapper): + self.is_class_level = True + coltype = self.columns[0].type + # TODO: check all columns ? check for foreign key as well? + active_history = self.parent_property.active_history or \ + self.columns[0].primary_key or \ + mapper.version_id_col in set(self.columns) + + _register_attribute( + self, mapper, useobject=False, + compare_function=coltype.compare_values, + active_history=active_history + ) + + def create_row_processor( + self, context, path, + loadopt, mapper, result, adapter, populators): + # look through list of columns represented here + # to see which, if any, is present in the row. + for col in self.columns: + if adapter: + col = adapter.columns[col] + getter = result._getter(col) + if getter: + populators["quick"].append((self.key, getter)) + break + else: + populators["expire"].append((self.key, True)) + + +@log.class_logger +@properties.ColumnProperty.strategy_for(deferred=True, instrument=True) +class DeferredColumnLoader(LoaderStrategy): + """Provide loading behavior for a deferred :class:`.ColumnProperty`.""" + + __slots__ = 'columns', 'group' + + def __init__(self, parent): + super(DeferredColumnLoader, self).__init__(parent) + if hasattr(self.parent_property, 'composite_class'): + raise NotImplementedError("Deferred loading for composite " + "types not implemented yet") + self.columns = self.parent_property.columns + self.group = self.parent_property.group + + def create_row_processor( + self, context, path, loadopt, + mapper, result, adapter, populators): + + # this path currently does not check the result + # for the column; this is because in most cases we are + # working just with the setup_query() directive which does + # not support this, and the behavior here should be consistent. + if not self.is_class_level: + set_deferred_for_local_state = \ + self.parent_property._deferred_column_loader + populators["new"].append((self.key, set_deferred_for_local_state)) + else: + populators["expire"].append((self.key, False)) + + def init_class_attribute(self, mapper): + self.is_class_level = True + + _register_attribute( + self, mapper, useobject=False, + compare_function=self.columns[0].type.compare_values, + callable_=self._load_for_state, + expire_missing=False + ) + + def setup_query( + self, context, entity, path, loadopt, + adapter, column_collection, memoized_populators, + only_load_props=None, **kw): + + if ( + ( + loadopt and + 'undefer_pks' in loadopt.local_opts and + set(self.columns).intersection( + self.parent._should_undefer_in_wildcard) + ) + or + ( + loadopt and + self.group and + loadopt.local_opts.get('undefer_group_%s' % self.group, False) + ) + or + ( + only_load_props and self.key in only_load_props + ) + ): + self.parent_property._get_strategy_by_cls(ColumnLoader).\ + setup_query(context, entity, + path, loadopt, adapter, + column_collection, memoized_populators, **kw) + elif self.is_class_level: + memoized_populators[self.parent_property] = _SET_DEFERRED_EXPIRED + else: + memoized_populators[self.parent_property] = _DEFER_FOR_STATE + + def _load_for_state(self, state, passive): + if not state.key: + return attributes.ATTR_EMPTY + + if not passive & attributes.SQL_OK: + return attributes.PASSIVE_NO_RESULT + + localparent = state.manager.mapper + + if self.group: + toload = [ + p.key for p in + localparent.iterate_properties + if isinstance(p, StrategizedProperty) and + isinstance(p.strategy, DeferredColumnLoader) and + p.group == self.group + ] + else: + toload = [self.key] + + # narrow the keys down to just those which have no history + group = [k for k in toload if k in state.unmodified] + + session = _state_session(state) + if session is None: + raise orm_exc.DetachedInstanceError( + "Parent instance %s is not bound to a Session; " + "deferred load operation of attribute '%s' cannot proceed" % + (orm_util.state_str(state), self.key) + ) + + query = session.query(localparent) + if loading.load_on_ident( + query, state.key, + only_load_props=group, refresh_state=state) is None: + raise orm_exc.ObjectDeletedError(state) + + return attributes.ATTR_WAS_SET + + +class LoadDeferredColumns(object): + """serializable loader object used by DeferredColumnLoader""" + + def __init__(self, key): + self.key = key + + def __call__(self, state, passive=attributes.PASSIVE_OFF): + key = self.key + + localparent = state.manager.mapper + prop = localparent._props[key] + strategy = prop._strategies[DeferredColumnLoader] + return strategy._load_for_state(state, passive) + + +class AbstractRelationshipLoader(LoaderStrategy): + """LoaderStratgies which deal with related objects.""" + + __slots__ = 'mapper', 'target', 'uselist' + + def __init__(self, parent): + super(AbstractRelationshipLoader, self).__init__(parent) + self.mapper = self.parent_property.mapper + self.target = self.parent_property.target + self.uselist = self.parent_property.uselist + + +@log.class_logger +@properties.RelationshipProperty.strategy_for(lazy="noload") +@properties.RelationshipProperty.strategy_for(lazy=None) +class NoLoader(AbstractRelationshipLoader): + """Provide loading behavior for a :class:`.RelationshipProperty` + with "lazy=None". + + """ + + __slots__ = () + + def init_class_attribute(self, mapper): + self.is_class_level = True + + _register_attribute( + self, mapper, + useobject=True, + uselist=self.parent_property.uselist, + typecallable=self.parent_property.collection_class, + ) + + def create_row_processor( + self, context, path, loadopt, mapper, + result, adapter, populators): + def invoke_no_load(state, dict_, row): + if self.uselist: + state.manager.get_impl(self.key).initialize(state, dict_) + else: + dict_[self.key] = None + populators["new"].append((self.key, invoke_no_load)) + + +@log.class_logger +@properties.RelationshipProperty.strategy_for(lazy=True) +@properties.RelationshipProperty.strategy_for(lazy="select") +class LazyLoader(AbstractRelationshipLoader, util.MemoizedSlots): + """Provide loading behavior for a :class:`.RelationshipProperty` + with "lazy=True", that is loads when first accessed. + + """ + + __slots__ = ( + '_lazywhere', '_rev_lazywhere', 'use_get', '_bind_to_col', + '_equated_columns', '_rev_bind_to_col', '_rev_equated_columns', + '_simple_lazy_clause') + + def __init__(self, parent): + super(LazyLoader, self).__init__(parent) + join_condition = self.parent_property._join_condition + self._lazywhere, \ + self._bind_to_col, \ + self._equated_columns = join_condition.create_lazy_clause() + + self._rev_lazywhere, \ + self._rev_bind_to_col, \ + self._rev_equated_columns = join_condition.create_lazy_clause( + reverse_direction=True) + + self.logger.info("%s lazy loading clause %s", self, self._lazywhere) + + # determine if our "lazywhere" clause is the same as the mapper's + # get() clause. then we can just use mapper.get() + self.use_get = not self.uselist and \ + self.mapper._get_clause[0].compare( + self._lazywhere, + use_proxies=True, + equivalents=self.mapper._equivalent_columns + ) + + if self.use_get: + for col in list(self._equated_columns): + if col in self.mapper._equivalent_columns: + for c in self.mapper._equivalent_columns[col]: + self._equated_columns[c] = self._equated_columns[col] + + self.logger.info("%s will use query.get() to " + "optimize instance loads", self) + + def init_class_attribute(self, mapper): + self.is_class_level = True + + active_history = ( + self.parent_property.active_history or + self.parent_property.direction is not interfaces.MANYTOONE or + not self.use_get + ) + + # MANYTOONE currently only needs the + # "old" value for delete-orphan + # cascades. the required _SingleParentValidator + # will enable active_history + # in that case. otherwise we don't need the + # "old" value during backref operations. + _register_attribute( + self, + mapper, + useobject=True, + callable_=self._load_for_state, + uselist=self.parent_property.uselist, + backref=self.parent_property.back_populates, + typecallable=self.parent_property.collection_class, + active_history=active_history + ) + + def _memoized_attr__simple_lazy_clause(self): + criterion, bind_to_col = ( + self._lazywhere, + self._bind_to_col + ) + + params = [] + + def visit_bindparam(bindparam): + bindparam.unique = False + if bindparam._identifying_key in bind_to_col: + params.append(( + bindparam.key, bind_to_col[bindparam._identifying_key], + None)) + else: + params.append((bindparam.key, None, bindparam.value)) + + criterion = visitors.cloned_traverse( + criterion, {}, {'bindparam': visit_bindparam} + ) + + return criterion, params + + def _generate_lazy_clause(self, state, passive): + criterion, param_keys = self._simple_lazy_clause + + if state is None: + return sql_util.adapt_criterion_to_null( + criterion, [key for key, ident, value in param_keys]) + + mapper = self.parent_property.parent + + o = state.obj() # strong ref + dict_ = attributes.instance_dict(o) + + if passive & attributes.INIT_OK: + passive ^= attributes.INIT_OK + + params = {} + for key, ident, value in param_keys: + if ident is not None: + if passive and passive & attributes.LOAD_AGAINST_COMMITTED: + value = mapper._get_committed_state_attr_by_column( + state, dict_, ident, passive) + else: + value = mapper._get_state_attr_by_column( + state, dict_, ident, passive) + + params[key] = value + + return criterion, params + + def _load_for_state(self, state, passive): + if not state.key and ( + ( + not self.parent_property.load_on_pending + and not state._load_pending + ) + or not state.session_id + ): + return attributes.ATTR_EMPTY + + pending = not state.key + ident_key = None + + if ( + (not passive & attributes.SQL_OK and not self.use_get) + or + (not passive & attributes.NON_PERSISTENT_OK and pending) + ): + return attributes.PASSIVE_NO_RESULT + + session = _state_session(state) + if not session: + raise orm_exc.DetachedInstanceError( + "Parent instance %s is not bound to a Session; " + "lazy load operation of attribute '%s' cannot proceed" % + (orm_util.state_str(state), self.key) + ) + + # if we have a simple primary key load, check the + # identity map without generating a Query at all + if self.use_get: + ident = self._get_ident_for_use_get( + session, + state, + passive + ) + if attributes.PASSIVE_NO_RESULT in ident: + return attributes.PASSIVE_NO_RESULT + elif attributes.NEVER_SET in ident: + return attributes.NEVER_SET + + if _none_set.issuperset(ident): + return None + + ident_key = self.mapper.identity_key_from_primary_key(ident) + instance = loading.get_from_identity(session, ident_key, passive) + if instance is not None: + return instance + elif not passive & attributes.SQL_OK or \ + not passive & attributes.RELATED_OBJECT_OK: + return attributes.PASSIVE_NO_RESULT + + return self._emit_lazyload(session, state, ident_key, passive) + + def _get_ident_for_use_get(self, session, state, passive): + instance_mapper = state.manager.mapper + + if passive & attributes.LOAD_AGAINST_COMMITTED: + get_attr = instance_mapper._get_committed_state_attr_by_column + else: + get_attr = instance_mapper._get_state_attr_by_column + + dict_ = state.dict + + return [ + get_attr( + state, + dict_, + self._equated_columns[pk], + passive=passive) + for pk in self.mapper.primary_key + ] + + @util.dependencies("sqlalchemy.orm.strategy_options") + def _emit_lazyload( + self, strategy_options, session, state, ident_key, passive): + + q = session.query(self.mapper)._adapt_all_clauses() + if self.parent_property.secondary is not None: + q = q.select_from(self.mapper, self.parent_property.secondary) + + q = q._with_invoke_all_eagers(False) + + pending = not state.key + + # don't autoflush on pending + if pending or passive & attributes.NO_AUTOFLUSH: + q = q.autoflush(False) + + if state.load_path: + q = q._with_current_path(state.load_path[self.parent_property]) + + if state.load_options: + q = q._conditional_options(*state.load_options) + + if self.use_get: + return loading.load_on_ident(q, ident_key) + + if self.parent_property.order_by: + q = q.order_by(*util.to_list(self.parent_property.order_by)) + + for rev in self.parent_property._reverse_property: + # reverse props that are MANYTOONE are loading *this* + # object from get(), so don't need to eager out to those. + if rev.direction is interfaces.MANYTOONE and \ + rev._use_get and \ + not isinstance(rev.strategy, LazyLoader): + q = q.options( + strategy_options.Load(rev.parent).lazyload(rev.key)) + + lazy_clause, params = self._generate_lazy_clause( + state, passive=passive) + + if pending: + if util.has_intersection( + orm_util._none_set, params.values()): + return None + elif util.has_intersection(orm_util._never_set, params.values()): + return None + + q = q.filter(lazy_clause).params(params) + + result = q.all() + if self.uselist: + return result + else: + l = len(result) + if l: + if l > 1: + util.warn( + "Multiple rows returned with " + "uselist=False for lazily-loaded attribute '%s' " + % self.parent_property) + + return result[0] + else: + return None + + def create_row_processor( + self, context, path, loadopt, + mapper, result, adapter, populators): + key = self.key + if not self.is_class_level: + # we are not the primary manager for this attribute + # on this class - set up a + # per-instance lazyloader, which will override the + # class-level behavior. + # this currently only happens when using a + # "lazyload" option on a "no load" + # attribute - "eager" attributes always have a + # class-level lazyloader installed. + set_lazy_callable = InstanceState._instance_level_callable_processor( + mapper.class_manager, + LoadLazyAttribute(key, self._strategy_keys[0]), key) + + populators["new"].append((self.key, set_lazy_callable)) + elif context.populate_existing or mapper.always_refresh: + def reset_for_lazy_callable(state, dict_, row): + # we are the primary manager for this attribute on + # this class - reset its + # per-instance attribute state, so that the class-level + # lazy loader is + # executed when next referenced on this instance. + # this is needed in + # populate_existing() types of scenarios to reset + # any existing state. + state._reset(dict_, key) + + populators["new"].append((self.key, reset_for_lazy_callable)) + + +class LoadLazyAttribute(object): + """serializable loader object used by LazyLoader""" + + def __init__(self, key, strategy_key=(('lazy', 'select'),)): + self.key = key + self.strategy_key = strategy_key + + def __call__(self, state, passive=attributes.PASSIVE_OFF): + key = self.key + instance_mapper = state.manager.mapper + prop = instance_mapper._props[key] + strategy = prop._strategies[self.strategy_key] + + return strategy._load_for_state(state, passive) + + +@properties.RelationshipProperty.strategy_for(lazy="immediate") +class ImmediateLoader(AbstractRelationshipLoader): + __slots__ = () + + def init_class_attribute(self, mapper): + self.parent_property.\ + _get_strategy_by_cls(LazyLoader).\ + init_class_attribute(mapper) + + def setup_query( + self, context, entity, + path, loadopt, adapter, column_collection=None, + parentmapper=None, **kwargs): + pass + + def create_row_processor( + self, context, path, loadopt, + mapper, result, adapter, populators): + def load_immediate(state, dict_, row): + state.get_impl(self.key).get(state, dict_) + + populators["delayed"].append((self.key, load_immediate)) + + +@log.class_logger +@properties.RelationshipProperty.strategy_for(lazy="subquery") +class SubqueryLoader(AbstractRelationshipLoader): + __slots__ = 'join_depth', + + def __init__(self, parent): + super(SubqueryLoader, self).__init__(parent) + self.join_depth = self.parent_property.join_depth + + def init_class_attribute(self, mapper): + self.parent_property.\ + _get_strategy_by_cls(LazyLoader).\ + init_class_attribute(mapper) + + def setup_query( + self, context, entity, + path, loadopt, adapter, + column_collection=None, + parentmapper=None, **kwargs): + + if not context.query._enable_eagerloads: + return + elif context.query._yield_per: + context.query._no_yield_per("subquery") + + path = path[self.parent_property] + + # build up a path indicating the path from the leftmost + # entity to the thing we're subquery loading. + with_poly_info = path.get( + context.attributes, + "path_with_polymorphic", None) + if with_poly_info is not None: + effective_entity = with_poly_info.entity + else: + effective_entity = self.mapper + + subq_path = context.attributes.get( + ('subquery_path', None), + orm_util.PathRegistry.root) + + subq_path = subq_path + path + + # if not via query option, check for + # a cycle + if not path.contains(context.attributes, "loader"): + if self.join_depth: + if path.length / 2 > self.join_depth: + return + elif subq_path.contains_mapper(self.mapper): + return + + leftmost_mapper, leftmost_attr, leftmost_relationship = \ + self._get_leftmost(subq_path) + + orig_query = context.attributes.get( + ("orig_query", SubqueryLoader), + context.query) + + # generate a new Query from the original, then + # produce a subquery from it. + left_alias = self._generate_from_original_query( + orig_query, leftmost_mapper, + leftmost_attr, leftmost_relationship, + entity.entity_zero + ) + + # generate another Query that will join the + # left alias to the target relationships. + # basically doing a longhand + # "from_self()". (from_self() itself not quite industrial + # strength enough for all contingencies...but very close) + q = orig_query.session.query(effective_entity) + q._attributes = { + ("orig_query", SubqueryLoader): orig_query, + ('subquery_path', None): subq_path + } + + q = q._set_enable_single_crit(False) + to_join, local_attr, parent_alias = \ + self._prep_for_joins(left_alias, subq_path) + q = q.order_by(*local_attr) + q = q.add_columns(*local_attr) + q = self._apply_joins( + q, to_join, left_alias, + parent_alias, effective_entity) + + q = self._setup_options(q, subq_path, orig_query, effective_entity) + q = self._setup_outermost_orderby(q) + + # add new query to attributes to be picked up + # by create_row_processor + path.set(context.attributes, "subquery", q) + + def _get_leftmost(self, subq_path): + subq_path = subq_path.path + subq_mapper = orm_util._class_to_mapper(subq_path[0]) + + # determine attributes of the leftmost mapper + if self.parent.isa(subq_mapper) and \ + self.parent_property is subq_path[1]: + leftmost_mapper, leftmost_prop = \ + self.parent, self.parent_property + else: + leftmost_mapper, leftmost_prop = \ + subq_mapper, \ + subq_path[1] + + leftmost_cols = leftmost_prop.local_columns + + leftmost_attr = [ + getattr( + subq_path[0].entity, + leftmost_mapper._columntoproperty[c].key) + for c in leftmost_cols + ] + + return leftmost_mapper, leftmost_attr, leftmost_prop + + def _generate_from_original_query( + self, + orig_query, leftmost_mapper, + leftmost_attr, leftmost_relationship, orig_entity + ): + # reformat the original query + # to look only for significant columns + q = orig_query._clone().correlate(None) + + # set a real "from" if not present, as this is more + # accurate than just going off of the column expression + if not q._from_obj and orig_entity.mapper.isa(leftmost_mapper): + q._set_select_from([orig_entity], False) + target_cols = q._adapt_col_list(leftmost_attr) + + # select from the identity columns of the outer + q._set_entities(target_cols) + + distinct_target_key = leftmost_relationship.distinct_target_key + + if distinct_target_key is True: + q._distinct = True + elif distinct_target_key is None: + # if target_cols refer to a non-primary key or only + # part of a composite primary key, set the q as distinct + for t in set(c.table for c in target_cols): + if not set(target_cols).issuperset(t.primary_key): + q._distinct = True + break + + if q._order_by is False: + q._order_by = leftmost_mapper.order_by + + # don't need ORDER BY if no limit/offset + if q._limit is None and q._offset is None: + q._order_by = None + + # the original query now becomes a subquery + # which we'll join onto. + + embed_q = q.with_labels().subquery() + left_alias = orm_util.AliasedClass( + leftmost_mapper, embed_q, + use_mapper_path=True) + return left_alias + + def _prep_for_joins(self, left_alias, subq_path): + # figure out what's being joined. a.k.a. the fun part + to_join = [] + pairs = list(subq_path.pairs()) + + for i, (mapper, prop) in enumerate(pairs): + if i > 0: + # look at the previous mapper in the chain - + # if it is as or more specific than this prop's + # mapper, use that instead. + # note we have an assumption here that + # the non-first element is always going to be a mapper, + # not an AliasedClass + + prev_mapper = pairs[i - 1][1].mapper + to_append = prev_mapper if prev_mapper.isa(mapper) else mapper + else: + to_append = mapper + + to_join.append((to_append, prop.key)) + + # determine the immediate parent class we are joining from, + # which needs to be aliased. + if len(to_join) > 1: + info = inspect(to_join[-1][0]) + + if len(to_join) < 2: + # in the case of a one level eager load, this is the + # leftmost "left_alias". + parent_alias = left_alias + elif info.mapper.isa(self.parent): + # In the case of multiple levels, retrieve + # it from subq_path[-2]. This is the same as self.parent + # in the vast majority of cases, and [ticket:2014] + # illustrates a case where sub_path[-2] is a subclass + # of self.parent + parent_alias = orm_util.AliasedClass( + to_join[-1][0], + use_mapper_path=True) + else: + # if of_type() were used leading to this relationship, + # self.parent is more specific than subq_path[-2] + parent_alias = orm_util.AliasedClass( + self.parent, + use_mapper_path=True) + + local_cols = self.parent_property.local_columns + + local_attr = [ + getattr(parent_alias, self.parent._columntoproperty[c].key) + for c in local_cols + ] + return to_join, local_attr, parent_alias + + def _apply_joins( + self, q, to_join, left_alias, parent_alias, + effective_entity): + for i, (mapper, key) in enumerate(to_join): + + # we need to use query.join() as opposed to + # orm.join() here because of the + # rich behavior it brings when dealing with + # "with_polymorphic" mappers. "aliased" + # and "from_joinpoint" take care of most of + # the chaining and aliasing for us. + + first = i == 0 + middle = i < len(to_join) - 1 + second_to_last = i == len(to_join) - 2 + last = i == len(to_join) - 1 + + if first: + attr = getattr(left_alias, key) + if last and effective_entity is not self.mapper: + attr = attr.of_type(effective_entity) + else: + if last and effective_entity is not self.mapper: + attr = getattr(parent_alias, key).\ + of_type(effective_entity) + else: + attr = getattr(mapper.entity, key) + + if second_to_last: + q = q.join(parent_alias, attr, from_joinpoint=True) + else: + q = q.join(attr, aliased=middle, from_joinpoint=True) + return q + + def _setup_options(self, q, subq_path, orig_query, effective_entity): + # propagate loader options etc. to the new query. + # these will fire relative to subq_path. + q = q._with_current_path(subq_path) + q = q._conditional_options(*orig_query._with_options) + if orig_query._populate_existing: + q._populate_existing = orig_query._populate_existing + + return q + + def _setup_outermost_orderby(self, q): + if self.parent_property.order_by: + # if there's an ORDER BY, alias it the same + # way joinedloader does, but we have to pull out + # the "eagerjoin" from the query. + # this really only picks up the "secondary" table + # right now. + eagerjoin = q._from_obj[0] + eager_order_by = \ + eagerjoin._target_adapter.\ + copy_and_process( + util.to_list( + self.parent_property.order_by + ) + ) + q = q.order_by(*eager_order_by) + return q + + class _SubqCollections(object): + """Given a :class:`.Query` used to emit the "subquery load", + provide a load interface that executes the query at the + first moment a value is needed. + + """ + _data = None + + def __init__(self, subq): + self.subq = subq + + def get(self, key, default): + if self._data is None: + self._load() + return self._data.get(key, default) + + def _load(self): + self._data = dict( + (k, [vv[0] for vv in v]) + for k, v in itertools.groupby( + self.subq, + lambda x: x[1:] + ) + ) + + def loader(self, state, dict_, row): + if self._data is None: + self._load() + + def create_row_processor( + self, context, path, loadopt, + mapper, result, adapter, populators): + if not self.parent.class_manager[self.key].impl.supports_population: + raise sa_exc.InvalidRequestError( + "'%s' does not support object " + "population - eager loading cannot be applied." % + self) + + path = path[self.parent_property] + + subq = path.get(context.attributes, 'subquery') + + if subq is None: + return + + assert subq.session is context.session, ( + "Subquery session doesn't refer to that of " + "our context. Are there broken context caching " + "schemes being used?" + ) + + local_cols = self.parent_property.local_columns + + # cache the loaded collections in the context + # so that inheriting mappers don't re-load when they + # call upon create_row_processor again + collections = path.get(context.attributes, "collections") + if collections is None: + collections = self._SubqCollections(subq) + path.set(context.attributes, 'collections', collections) + + if adapter: + local_cols = [adapter.columns[c] for c in local_cols] + + if self.uselist: + self._create_collection_loader( + context, collections, local_cols, populators) + else: + self._create_scalar_loader( + context, collections, local_cols, populators) + + def _create_collection_loader( + self, context, collections, local_cols, populators): + def load_collection_from_subq(state, dict_, row): + collection = collections.get( + tuple([row[col] for col in local_cols]), + () + ) + state.get_impl(self.key).\ + set_committed_value(state, dict_, collection) + + populators["new"].append((self.key, load_collection_from_subq)) + if context.invoke_all_eagers: + populators["eager"].append((self.key, collections.loader)) + + def _create_scalar_loader( + self, context, collections, local_cols, populators): + def load_scalar_from_subq(state, dict_, row): + collection = collections.get( + tuple([row[col] for col in local_cols]), + (None,) + ) + if len(collection) > 1: + util.warn( + "Multiple rows returned with " + "uselist=False for eagerly-loaded attribute '%s' " + % self) + + scalar = collection[0] + state.get_impl(self.key).\ + set_committed_value(state, dict_, scalar) + + populators["new"].append((self.key, load_scalar_from_subq)) + if context.invoke_all_eagers: + populators["eager"].append((self.key, collections.loader)) + + +@log.class_logger +@properties.RelationshipProperty.strategy_for(lazy="joined") +@properties.RelationshipProperty.strategy_for(lazy=False) +class JoinedLoader(AbstractRelationshipLoader): + """Provide loading behavior for a :class:`.RelationshipProperty` + using joined eager loading. + + """ + + __slots__ = 'join_depth', + + def __init__(self, parent): + super(JoinedLoader, self).__init__(parent) + self.join_depth = self.parent_property.join_depth + + def init_class_attribute(self, mapper): + self.parent_property.\ + _get_strategy_by_cls(LazyLoader).init_class_attribute(mapper) + + def setup_query( + self, context, entity, path, loadopt, adapter, + column_collection=None, parentmapper=None, + chained_from_outerjoin=False, + **kwargs): + """Add a left outer join to the statement that's being constructed.""" + + if not context.query._enable_eagerloads: + return + elif context.query._yield_per and self.uselist: + context.query._no_yield_per("joined collection") + + path = path[self.parent_property] + + with_polymorphic = None + + user_defined_adapter = self._init_user_defined_eager_proc( + loadopt, context) if loadopt else False + + if user_defined_adapter is not False: + clauses, adapter, add_to_collection = \ + self._setup_query_on_user_defined_adapter( + context, entity, path, adapter, + user_defined_adapter + ) + else: + # if not via query option, check for + # a cycle + if not path.contains(context.attributes, "loader"): + if self.join_depth: + if path.length / 2 > self.join_depth: + return + elif path.contains_mapper(self.mapper): + return + + clauses, adapter, add_to_collection, chained_from_outerjoin = \ + self._generate_row_adapter( + context, entity, path, loadopt, adapter, + column_collection, parentmapper, chained_from_outerjoin + ) + + with_poly_info = path.get( + context.attributes, + "path_with_polymorphic", + None + ) + if with_poly_info is not None: + with_polymorphic = with_poly_info.with_polymorphic_mappers + else: + with_polymorphic = None + + path = path[self.mapper] + + loading._setup_entity_query( + context, self.mapper, entity, + path, clauses, add_to_collection, + with_polymorphic=with_polymorphic, + parentmapper=self.mapper, + chained_from_outerjoin=chained_from_outerjoin) + + if with_poly_info is not None and \ + None in set(context.secondary_columns): + raise sa_exc.InvalidRequestError( + "Detected unaliased columns when generating joined " + "load. Make sure to use aliased=True or flat=True " + "when using joined loading with with_polymorphic()." + ) + + def _init_user_defined_eager_proc(self, loadopt, context): + + # check if the opt applies at all + if "eager_from_alias" not in loadopt.local_opts: + # nope + return False + + path = loadopt.path.parent + + # the option applies. check if the "user_defined_eager_row_processor" + # has been built up. + adapter = path.get( + context.attributes, + "user_defined_eager_row_processor", False) + if adapter is not False: + # just return it + return adapter + + # otherwise figure it out. + alias = loadopt.local_opts["eager_from_alias"] + + root_mapper, prop = path[-2:] + + #from .mapper import Mapper + #from .interfaces import MapperProperty + #assert isinstance(root_mapper, Mapper) + #assert isinstance(prop, MapperProperty) + + if alias is not None: + if isinstance(alias, str): + alias = prop.target.alias(alias) + adapter = sql_util.ColumnAdapter( + alias, + equivalents=prop.mapper._equivalent_columns) + else: + if path.contains(context.attributes, "path_with_polymorphic"): + with_poly_info = path.get( + context.attributes, + "path_with_polymorphic") + adapter = orm_util.ORMAdapter( + with_poly_info.entity, + equivalents=prop.mapper._equivalent_columns) + else: + adapter = context.query._polymorphic_adapters.get( + prop.mapper, None) + path.set( + context.attributes, + "user_defined_eager_row_processor", + adapter) + + return adapter + + def _setup_query_on_user_defined_adapter( + self, context, entity, + path, adapter, user_defined_adapter): + + # apply some more wrapping to the "user defined adapter" + # if we are setting up the query for SQL render. + adapter = entity._get_entity_clauses(context.query, context) + + if adapter and user_defined_adapter: + user_defined_adapter = user_defined_adapter.wrap(adapter) + path.set( + context.attributes, "user_defined_eager_row_processor", + user_defined_adapter) + elif adapter: + user_defined_adapter = adapter + path.set( + context.attributes, "user_defined_eager_row_processor", + user_defined_adapter) + + add_to_collection = context.primary_columns + return user_defined_adapter, adapter, add_to_collection + + def _generate_row_adapter( + self, + context, entity, path, loadopt, adapter, + column_collection, parentmapper, chained_from_outerjoin): + with_poly_info = path.get( + context.attributes, + "path_with_polymorphic", + None + ) + if with_poly_info: + to_adapt = with_poly_info.entity + else: + to_adapt = orm_util.AliasedClass( + self.mapper, + flat=True, + use_mapper_path=True) + clauses = orm_util.ORMAdapter( + to_adapt, + equivalents=self.mapper._equivalent_columns, + adapt_required=True, allow_label_resolve=False, + anonymize_labels=True) + assert clauses.aliased_class is not None + + if self.parent_property.uselist: + context.multi_row_eager_loaders = True + + innerjoin = ( + loadopt.local_opts.get( + 'innerjoin', self.parent_property.innerjoin) + if loadopt is not None + else self.parent_property.innerjoin + ) + + if not innerjoin: + # if this is an outer join, all non-nested eager joins from + # this path must also be outer joins + chained_from_outerjoin = True + + context.create_eager_joins.append( + ( + self._create_eager_join, context, + entity, path, adapter, + parentmapper, clauses, innerjoin, chained_from_outerjoin + ) + ) + + add_to_collection = context.secondary_columns + path.set(context.attributes, "eager_row_processor", clauses) + + return clauses, adapter, add_to_collection, chained_from_outerjoin + + def _create_eager_join( + self, context, entity, + path, adapter, parentmapper, + clauses, innerjoin, chained_from_outerjoin): + + if parentmapper is None: + localparent = entity.mapper + else: + localparent = parentmapper + + # whether or not the Query will wrap the selectable in a subquery, + # and then attach eager load joins to that (i.e., in the case of + # LIMIT/OFFSET etc.) + should_nest_selectable = context.multi_row_eager_loaders and \ + context.query._should_nest_selectable + + entity_key = None + + if entity not in context.eager_joins and \ + not should_nest_selectable and \ + context.from_clause: + index, clause = sql_util.find_join_source( + context.from_clause, entity.selectable) + if clause is not None: + # join to an existing FROM clause on the query. + # key it to its list index in the eager_joins dict. + # Query._compile_context will adapt as needed and + # append to the FROM clause of the select(). + entity_key, default_towrap = index, clause + + if entity_key is None: + entity_key, default_towrap = entity, entity.selectable + + towrap = context.eager_joins.setdefault(entity_key, default_towrap) + + if adapter: + if getattr(adapter, 'aliased_class', None): + # joining from an adapted entity. The adapted entity + # might be a "with_polymorphic", so resolve that to our + # specific mapper's entity before looking for our attribute + # name on it. + efm = inspect(adapter.aliased_class).\ + _entity_for_mapper( + parentmapper + if parentmapper.isa(self.parent) else self.parent) + + # look for our attribute on the adapted entity, else fall back + # to our straight property + onclause = getattr( + efm.entity, self.key, + self.parent_property) + else: + onclause = getattr( + orm_util.AliasedClass( + self.parent, + adapter.selectable, + use_mapper_path=True + ), + self.key, self.parent_property + ) + + else: + onclause = self.parent_property + + assert clauses.aliased_class is not None + + attach_on_outside = ( + not chained_from_outerjoin or + not innerjoin or innerjoin == 'unnested') + + if attach_on_outside: + # this is the "classic" eager join case. + eagerjoin = orm_util._ORMJoin( + towrap, + clauses.aliased_class, + onclause, + isouter=not innerjoin or ( + chained_from_outerjoin and isinstance(towrap, sql.Join) + ), _left_memo=self.parent, _right_memo=self.mapper + ) + else: + # all other cases are innerjoin=='nested' approach + eagerjoin = self._splice_nested_inner_join( + path, towrap, clauses, onclause) + + context.eager_joins[entity_key] = eagerjoin + + # send a hint to the Query as to where it may "splice" this join + eagerjoin.stop_on = entity.selectable + + if not parentmapper: + # for parentclause that is the non-eager end of the join, + # ensure all the parent cols in the primaryjoin are actually + # in the + # columns clause (i.e. are not deferred), so that aliasing applied + # by the Query propagates those columns outward. + # This has the effect + # of "undefering" those columns. + for col in sql_util._find_columns( + self.parent_property.primaryjoin): + if localparent.mapped_table.c.contains_column(col): + if adapter: + col = adapter.columns[col] + context.primary_columns.append(col) + + if self.parent_property.order_by: + context.eager_order_by += eagerjoin._target_adapter.\ + copy_and_process( + util.to_list( + self.parent_property.order_by + ) + ) + + def _splice_nested_inner_join( + self, path, join_obj, clauses, onclause, splicing=False): + + if splicing is False: + # first call is always handed a join object + # from the outside + assert isinstance(join_obj, orm_util._ORMJoin) + elif isinstance(join_obj, sql.selectable.FromGrouping): + return self._splice_nested_inner_join( + path, join_obj.element, clauses, onclause, splicing + ) + elif not isinstance(join_obj, orm_util._ORMJoin): + if path[-2] is splicing: + return orm_util._ORMJoin( + join_obj, clauses.aliased_class, + onclause, isouter=False, + _left_memo=splicing, + _right_memo=path[-1].mapper + ) + else: + # only here if splicing == True + return None + + target_join = self._splice_nested_inner_join( + path, join_obj.right, clauses, + onclause, join_obj._right_memo) + if target_join is None: + right_splice = False + target_join = self._splice_nested_inner_join( + path, join_obj.left, clauses, + onclause, join_obj._left_memo) + if target_join is None: + # should only return None when recursively called, + # e.g. splicing==True + assert splicing is not False, \ + "assertion failed attempting to produce joined eager loads" + return None + else: + right_splice = True + + if right_splice: + # for a right splice, attempt to flatten out + # a JOIN b JOIN c JOIN .. to avoid needless + # parenthesis nesting + if not join_obj.isouter and not target_join.isouter: + eagerjoin = join_obj._splice_into_center(target_join) + else: + eagerjoin = orm_util._ORMJoin( + join_obj.left, target_join, + join_obj.onclause, isouter=join_obj.isouter, + _left_memo=join_obj._left_memo) + else: + eagerjoin = orm_util._ORMJoin( + target_join, join_obj.right, + join_obj.onclause, isouter=join_obj.isouter, + _right_memo=join_obj._right_memo) + + eagerjoin._target_adapter = target_join._target_adapter + return eagerjoin + + def _create_eager_adapter(self, context, result, adapter, path, loadopt): + user_defined_adapter = self._init_user_defined_eager_proc( + loadopt, context) if loadopt else False + + if user_defined_adapter is not False: + decorator = user_defined_adapter + # user defined eagerloads are part of the "primary" + # portion of the load. + # the adapters applied to the Query should be honored. + if context.adapter and decorator: + decorator = decorator.wrap(context.adapter) + elif context.adapter: + decorator = context.adapter + else: + decorator = path.get(context.attributes, "eager_row_processor") + if decorator is None: + return False + + if self.mapper._result_has_identity_key(result, decorator): + return decorator + else: + # no identity key - don't return a row + # processor, will cause a degrade to lazy + return False + + def create_row_processor( + self, context, path, loadopt, mapper, + result, adapter, populators): + if not self.parent.class_manager[self.key].impl.supports_population: + raise sa_exc.InvalidRequestError( + "'%s' does not support object " + "population - eager loading cannot be applied." % + self + ) + + our_path = path[self.parent_property] + + eager_adapter = self._create_eager_adapter( + context, + result, + adapter, our_path, loadopt) + + if eager_adapter is not False: + key = self.key + + _instance = loading._instance_processor( + self.mapper, + context, + result, + our_path[self.mapper], + eager_adapter) + + if not self.uselist: + self._create_scalar_loader(context, key, _instance, populators) + else: + self._create_collection_loader( + context, key, _instance, populators) + else: + self.parent_property._get_strategy_by_cls(LazyLoader).\ + create_row_processor( + context, path, loadopt, + mapper, result, adapter, populators) + + def _create_collection_loader(self, context, key, _instance, populators): + def load_collection_from_joined_new_row(state, dict_, row): + collection = attributes.init_state_collection( + state, dict_, key) + result_list = util.UniqueAppender(collection, + 'append_without_event') + context.attributes[(state, key)] = result_list + inst = _instance(row) + if inst is not None: + result_list.append(inst) + + def load_collection_from_joined_existing_row(state, dict_, row): + if (state, key) in context.attributes: + result_list = context.attributes[(state, key)] + else: + # appender_key can be absent from context.attributes + # with isnew=False when self-referential eager loading + # is used; the same instance may be present in two + # distinct sets of result columns + collection = attributes.init_state_collection( + state, dict_, key) + result_list = util.UniqueAppender( + collection, + 'append_without_event') + context.attributes[(state, key)] = result_list + inst = _instance(row) + if inst is not None: + result_list.append(inst) + + def load_collection_from_joined_exec(state, dict_, row): + _instance(row) + + populators["new"].append((self.key, load_collection_from_joined_new_row)) + populators["existing"].append( + (self.key, load_collection_from_joined_existing_row)) + if context.invoke_all_eagers: + populators["eager"].append( + (self.key, load_collection_from_joined_exec)) + + def _create_scalar_loader(self, context, key, _instance, populators): + def load_scalar_from_joined_new_row(state, dict_, row): + # set a scalar object instance directly on the parent + # object, bypassing InstrumentedAttribute event handlers. + dict_[key] = _instance(row) + + def load_scalar_from_joined_existing_row(state, dict_, row): + # call _instance on the row, even though the object has + # been created, so that we further descend into properties + existing = _instance(row) + if existing is not None \ + and key in dict_ \ + and existing is not dict_[key]: + util.warn( + "Multiple rows returned with " + "uselist=False for eagerly-loaded attribute '%s' " + % self) + + def load_scalar_from_joined_exec(state, dict_, row): + _instance(row) + + populators["new"].append((self.key, load_scalar_from_joined_new_row)) + populators["existing"].append( + (self.key, load_scalar_from_joined_existing_row)) + if context.invoke_all_eagers: + populators["eager"].append((self.key, load_scalar_from_joined_exec)) + + +def single_parent_validator(desc, prop): + def _do_check(state, value, oldvalue, initiator): + if value is not None and initiator.key == prop.key: + hasparent = initiator.hasparent(attributes.instance_state(value)) + if hasparent and oldvalue is not value: + raise sa_exc.InvalidRequestError( + "Instance %s is already associated with an instance " + "of %s via its %s attribute, and is only allowed a " + "single parent." % + (orm_util.instance_str(value), state.class_, prop) + ) + return value + + def append(state, value, initiator): + return _do_check(state, value, None, initiator) + + def set_(state, value, oldvalue, initiator): + return _do_check(state, value, oldvalue, initiator) + + event.listen( + desc, 'append', append, raw=True, retval=True, + active_history=True) + event.listen( + desc, 'set', set_, raw=True, retval=True, + active_history=True) diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/strategy_options.py b/lib/python3.4/site-packages/sqlalchemy/orm/strategy_options.py new file mode 100644 index 0000000..141f867 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/orm/strategy_options.py @@ -0,0 +1,1056 @@ +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" + +""" + +from .interfaces import MapperOption, PropComparator +from .. import util +from ..sql.base import _generative, Generative +from .. import exc as sa_exc, inspect +from .base import _is_aliased_class, _class_to_mapper +from . import util as orm_util +from .path_registry import PathRegistry, TokenRegistry, \ + _WILDCARD_TOKEN, _DEFAULT_TOKEN + + +class Load(Generative, MapperOption): + """Represents loader options which modify the state of a + :class:`.Query` in order to affect how various mapped attributes are + loaded. + + .. versionadded:: 0.9.0 The :meth:`.Load` system is a new foundation for + the existing system of loader options, including options such as + :func:`.orm.joinedload`, :func:`.orm.defer`, and others. In + particular, it introduces a new method-chained system that replaces the + need for dot-separated paths as well as "_all()" options such as + :func:`.orm.joinedload_all`. + + A :class:`.Load` object can be used directly or indirectly. To use one + directly, instantiate given the parent class. This style of usage is + useful when dealing with a :class:`.Query` that has multiple entities, + or when producing a loader option that can be applied generically to + any style of query:: + + myopt = Load(MyClass).joinedload("widgets") + + The above ``myopt`` can now be used with :meth:`.Query.options`:: + + session.query(MyClass).options(myopt) + + The :class:`.Load` construct is invoked indirectly whenever one makes use + of the various loader options that are present in ``sqlalchemy.orm``, + including options such as :func:`.orm.joinedload`, :func:`.orm.defer`, + :func:`.orm.subqueryload`, and all the rest. These constructs produce an + "anonymous" form of the :class:`.Load` object which tracks attributes and + options, but is not linked to a parent class until it is associated with a + parent :class:`.Query`:: + + # produce "unbound" Load object + myopt = joinedload("widgets") + + # when applied using options(), the option is "bound" to the + # class observed in the given query, e.g. MyClass + session.query(MyClass).options(myopt) + + Whether the direct or indirect style is used, the :class:`.Load` object + returned now represents a specific "path" along the entities of a + :class:`.Query`. This path can be traversed using a standard + method-chaining approach. Supposing a class hierarchy such as ``User``, + ``User.addresses -> Address``, ``User.orders -> Order`` and + ``Order.items -> Item``, we can specify a variety of loader options along + each element in the "path":: + + session.query(User).options( + joinedload("addresses"), + subqueryload("orders").joinedload("items") + ) + + Where above, the ``addresses`` collection will be joined-loaded, the + ``orders`` collection will be subquery-loaded, and within that subquery + load the ``items`` collection will be joined-loaded. + + + """ + + def __init__(self, entity): + insp = inspect(entity) + self.path = insp._path_registry + self.context = {} + self.local_opts = {} + + def _generate(self): + cloned = super(Load, self)._generate() + cloned.local_opts = {} + return cloned + + _merge_into_path = False + strategy = None + propagate_to_loaders = False + + def process_query(self, query): + self._process(query, True) + + def process_query_conditionally(self, query): + self._process(query, False) + + def _process(self, query, raiseerr): + current_path = query._current_path + if current_path: + for (token, start_path), loader in self.context.items(): + chopped_start_path = self._chop_path(start_path, current_path) + if chopped_start_path is not None: + query._attributes[(token, chopped_start_path)] = loader + else: + query._attributes.update(self.context) + + def _generate_path(self, path, attr, wildcard_key, raiseerr=True): + if raiseerr and not path.has_entity: + if isinstance(path, TokenRegistry): + raise sa_exc.ArgumentError( + "Wildcard token cannot be followed by another entity") + else: + raise sa_exc.ArgumentError( + "Attribute '%s' of entity '%s' does not " + "refer to a mapped entity" % + (path.prop.key, path.parent.entity) + ) + + if isinstance(attr, util.string_types): + default_token = attr.endswith(_DEFAULT_TOKEN) + if attr.endswith(_WILDCARD_TOKEN) or default_token: + if default_token: + self.propagate_to_loaders = False + if wildcard_key: + attr = "%s:%s" % (wildcard_key, attr) + return path.token(attr) + + try: + # use getattr on the class to work around + # synonyms, hybrids, etc. + attr = getattr(path.entity.class_, attr) + except AttributeError: + if raiseerr: + raise sa_exc.ArgumentError( + "Can't find property named '%s' on the " + "mapped entity %s in this Query. " % ( + attr, path.entity) + ) + else: + return None + else: + attr = attr.property + + path = path[attr] + else: + prop = attr.property + + if not prop.parent.common_parent(path.mapper): + if raiseerr: + raise sa_exc.ArgumentError( + "Attribute '%s' does not " + "link from element '%s'" % (attr, path.entity)) + else: + return None + + if getattr(attr, '_of_type', None): + ac = attr._of_type + ext_info = inspect(ac) + + path_element = ext_info.mapper + existing = path.entity_path[prop].get( + self.context, "path_with_polymorphic") + if not ext_info.is_aliased_class: + ac = orm_util.with_polymorphic( + ext_info.mapper.base_mapper, + ext_info.mapper, aliased=True, + _use_mapper_path=True, + _existing_alias=existing) + path.entity_path[prop].set( + self.context, "path_with_polymorphic", inspect(ac)) + path = path[prop][path_element] + else: + path = path[prop] + + if path.has_entity: + path = path.entity_path + return path + + def __str__(self): + return "Load(strategy=%r)" % (self.strategy, ) + + def _coerce_strat(self, strategy): + if strategy is not None: + strategy = tuple(sorted(strategy.items())) + return strategy + + @_generative + def set_relationship_strategy( + self, attr, strategy, propagate_to_loaders=True): + strategy = self._coerce_strat(strategy) + + self.propagate_to_loaders = propagate_to_loaders + # if the path is a wildcard, this will set propagate_to_loaders=False + self.path = self._generate_path(self.path, attr, "relationship") + self.strategy = strategy + if strategy is not None: + self._set_path_strategy() + + @_generative + def set_column_strategy(self, attrs, strategy, opts=None): + strategy = self._coerce_strat(strategy) + + for attr in attrs: + path = self._generate_path(self.path, attr, "column") + cloned = self._generate() + cloned.strategy = strategy + cloned.path = path + cloned.propagate_to_loaders = True + if opts: + cloned.local_opts.update(opts) + cloned._set_path_strategy() + + def _set_path_strategy(self): + if self._merge_into_path: + # special helper for undefer_group + existing = self.path.get(self.context, "loader") + if existing: + existing.local_opts.update(self.local_opts) + else: + self.path.set(self.context, "loader", self) + + elif self.path.has_entity: + self.path.parent.set(self.context, "loader", self) + else: + self.path.set(self.context, "loader", self) + + def __getstate__(self): + d = self.__dict__.copy() + d["path"] = self.path.serialize() + return d + + def __setstate__(self, state): + self.__dict__.update(state) + self.path = PathRegistry.deserialize(self.path) + + def _chop_path(self, to_chop, path): + i = -1 + + for i, (c_token, p_token) in enumerate(zip(to_chop, path.path)): + if isinstance(c_token, util.string_types): + # TODO: this is approximated from the _UnboundLoad + # version and probably has issues, not fully covered. + + if i == 0 and c_token.endswith(':' + _DEFAULT_TOKEN): + return to_chop + elif c_token != 'relationship:%s' % (_WILDCARD_TOKEN,) and \ + c_token != p_token.key: + return None + + if c_token is p_token: + continue + else: + return None + return to_chop[i + 1:] + + +class _UnboundLoad(Load): + """Represent a loader option that isn't tied to a root entity. + + The loader option will produce an entity-linked :class:`.Load` + object when it is passed :meth:`.Query.options`. + + This provides compatibility with the traditional system + of freestanding options, e.g. ``joinedload('x.y.z')``. + + """ + + def __init__(self): + self.path = () + self._to_bind = set() + self.local_opts = {} + + _is_chain_link = False + + def _set_path_strategy(self): + self._to_bind.add(self) + + def _generate_path(self, path, attr, wildcard_key): + if wildcard_key and isinstance(attr, util.string_types) and \ + attr in (_WILDCARD_TOKEN, _DEFAULT_TOKEN): + if attr == _DEFAULT_TOKEN: + self.propagate_to_loaders = False + attr = "%s:%s" % (wildcard_key, attr) + + return path + (attr, ) + + def __getstate__(self): + d = self.__dict__.copy() + d['path'] = ret = [] + for token in util.to_list(self.path): + if isinstance(token, PropComparator): + ret.append((token._parentmapper.class_, token.key)) + else: + ret.append(token) + return d + + def __setstate__(self, state): + ret = [] + for key in state['path']: + if isinstance(key, tuple): + cls, propkey = key + ret.append(getattr(cls, propkey)) + else: + ret.append(key) + state['path'] = tuple(ret) + self.__dict__ = state + + def _process(self, query, raiseerr): + for val in self._to_bind: + val._bind_loader(query, query._attributes, raiseerr) + + @classmethod + def _from_keys(self, meth, keys, chained, kw): + opt = _UnboundLoad() + + def _split_key(key): + if isinstance(key, util.string_types): + # coerce fooload('*') into "default loader strategy" + if key == _WILDCARD_TOKEN: + return (_DEFAULT_TOKEN, ) + # coerce fooload(".*") into "wildcard on default entity" + elif key.startswith("." + _WILDCARD_TOKEN): + key = key[1:] + return key.split(".") + else: + return (key,) + all_tokens = [token for key in keys for token in _split_key(key)] + + for token in all_tokens[0:-1]: + if chained: + opt = meth(opt, token, **kw) + else: + opt = opt.defaultload(token) + opt._is_chain_link = True + + opt = meth(opt, all_tokens[-1], **kw) + opt._is_chain_link = False + + return opt + + def _chop_path(self, to_chop, path): + i = -1 + for i, (c_token, (p_mapper, p_prop)) in enumerate( + zip(to_chop, path.pairs())): + if isinstance(c_token, util.string_types): + if i == 0 and c_token.endswith(':' + _DEFAULT_TOKEN): + return to_chop + elif c_token != 'relationship:%s' % ( + _WILDCARD_TOKEN,) and c_token != p_prop.key: + return None + elif isinstance(c_token, PropComparator): + if c_token.property is not p_prop: + return None + else: + i += 1 + + return to_chop[i:] + + def _bind_loader(self, query, context, raiseerr): + start_path = self.path + # _current_path implies we're in a + # secondary load with an existing path + + current_path = query._current_path + if current_path: + start_path = self._chop_path(start_path, current_path) + + if not start_path: + return None + + token = start_path[0] + + if isinstance(token, util.string_types): + entity = self._find_entity_basestring(query, token, raiseerr) + elif isinstance(token, PropComparator): + prop = token.property + entity = self._find_entity_prop_comparator( + query, + prop.key, + token._parententity, + raiseerr) + + else: + raise sa_exc.ArgumentError( + "mapper option expects " + "string key or list of attributes") + + if not entity: + return + + path_element = entity.entity_zero + + # transfer our entity-less state into a Load() object + # with a real entity path. + loader = Load(path_element) + loader.context = context + loader.strategy = self.strategy + + path = loader.path + for token in start_path: + loader.path = path = loader._generate_path( + loader.path, token, None, raiseerr) + if path is None: + return + + loader.local_opts.update(self.local_opts) + + if loader.path.has_entity: + effective_path = loader.path.parent + else: + effective_path = loader.path + + # prioritize "first class" options over those + # that were "links in the chain", e.g. "x" and "y" in + # someload("x.y.z") versus someload("x") / someload("x.y") + + if effective_path.is_token: + for path in effective_path.generate_for_superclasses(): + if self._merge_into_path: + # special helper for undefer_group + existing = path.get(context, "loader") + if existing: + existing.local_opts.update(self.local_opts) + else: + path.set(context, "loader", loader) + elif self._is_chain_link: + path.setdefault(context, "loader", loader) + else: + path.set(context, "loader", loader) + else: + # only supported for the undefer_group() wildcard opt + assert not self._merge_into_path + if self._is_chain_link: + effective_path.setdefault(context, "loader", loader) + else: + effective_path.set(context, "loader", loader) + + def _find_entity_prop_comparator(self, query, token, mapper, raiseerr): + if _is_aliased_class(mapper): + searchfor = mapper + else: + searchfor = _class_to_mapper(mapper) + for ent in query._mapper_entities: + if ent.corresponds_to(searchfor): + return ent + else: + if raiseerr: + if not list(query._mapper_entities): + raise sa_exc.ArgumentError( + "Query has only expression-based entities - " + "can't find property named '%s'." + % (token, ) + ) + else: + raise sa_exc.ArgumentError( + "Can't find property '%s' on any entity " + "specified in this Query. Note the full path " + "from root (%s) to target entity must be specified." + % (token, ",".join(str(x) for + x in query._mapper_entities)) + ) + else: + return None + + def _find_entity_basestring(self, query, token, raiseerr): + if token.endswith(':' + _WILDCARD_TOKEN): + if len(list(query._mapper_entities)) != 1: + if raiseerr: + raise sa_exc.ArgumentError( + "Wildcard loader can only be used with exactly " + "one entity. Use Load(ent) to specify " + "specific entities.") + elif token.endswith(_DEFAULT_TOKEN): + raiseerr = False + + for ent in query._mapper_entities: + # return only the first _MapperEntity when searching + # based on string prop name. Ideally object + # attributes are used to specify more exactly. + return ent + else: + if raiseerr: + raise sa_exc.ArgumentError( + "Query has only expression-based entities - " + "can't find property named '%s'." + % (token, ) + ) + else: + return None + + +class loader_option(object): + def __init__(self): + pass + + def __call__(self, fn): + self.name = name = fn.__name__ + self.fn = fn + if hasattr(Load, name): + raise TypeError("Load class already has a %s method." % (name)) + setattr(Load, name, fn) + + return self + + def _add_unbound_fn(self, fn): + self._unbound_fn = fn + fn_doc = self.fn.__doc__ + self.fn.__doc__ = """Produce a new :class:`.Load` object with the +:func:`.orm.%(name)s` option applied. + +See :func:`.orm.%(name)s` for usage examples. + +""" % {"name": self.name} + + fn.__doc__ = fn_doc + return self + + def _add_unbound_all_fn(self, fn): + self._unbound_all_fn = fn + fn.__doc__ = """Produce a standalone "all" option for :func:`.orm.%(name)s`. + +.. deprecated:: 0.9.0 + + The "_all()" style is replaced by method chaining, e.g.:: + + session.query(MyClass).options( + %(name)s("someattribute").%(name)s("anotherattribute") + ) + +""" % {"name": self.name} + return self + + +@loader_option() +def contains_eager(loadopt, attr, alias=None): + """Indicate that the given attribute should be eagerly loaded from + columns stated manually in the query. + + This function is part of the :class:`.Load` interface and supports + both method-chained and standalone operation. + + The option is used in conjunction with an explicit join that loads + the desired rows, i.e.:: + + sess.query(Order).\\ + join(Order.user).\\ + options(contains_eager(Order.user)) + + The above query would join from the ``Order`` entity to its related + ``User`` entity, and the returned ``Order`` objects would have the + ``Order.user`` attribute pre-populated. + + :func:`contains_eager` also accepts an `alias` argument, which is the + string name of an alias, an :func:`~sqlalchemy.sql.expression.alias` + construct, or an :func:`~sqlalchemy.orm.aliased` construct. Use this when + the eagerly-loaded rows are to come from an aliased table:: + + user_alias = aliased(User) + sess.query(Order).\\ + join((user_alias, Order.user)).\\ + options(contains_eager(Order.user, alias=user_alias)) + + .. seealso:: + + :ref:`contains_eager` + + """ + if alias is not None: + if not isinstance(alias, str): + info = inspect(alias) + alias = info.selectable + + cloned = loadopt.set_relationship_strategy( + attr, + {"lazy": "joined"}, + propagate_to_loaders=False + ) + cloned.local_opts['eager_from_alias'] = alias + return cloned + + +@contains_eager._add_unbound_fn +def contains_eager(*keys, **kw): + return _UnboundLoad()._from_keys( + _UnboundLoad.contains_eager, keys, True, kw) + + +@loader_option() +def load_only(loadopt, *attrs): + """Indicate that for a particular entity, only the given list + of column-based attribute names should be loaded; all others will be + deferred. + + This function is part of the :class:`.Load` interface and supports + both method-chained and standalone operation. + + Example - given a class ``User``, load only the ``name`` and ``fullname`` + attributes:: + + session.query(User).options(load_only("name", "fullname")) + + Example - given a relationship ``User.addresses -> Address``, specify + subquery loading for the ``User.addresses`` collection, but on each + ``Address`` object load only the ``email_address`` attribute:: + + session.query(User).options( + subqueryload("addreses").load_only("email_address") + ) + + For a :class:`.Query` that has multiple entities, the lead entity can be + specifically referred to using the :class:`.Load` constructor:: + + session.query(User, Address).join(User.addresses).options( + Load(User).load_only("name", "fullname"), + Load(Address).load_only("email_addres") + ) + + + .. versionadded:: 0.9.0 + + """ + cloned = loadopt.set_column_strategy( + attrs, + {"deferred": False, "instrument": True} + ) + cloned.set_column_strategy("*", + {"deferred": True, "instrument": True}, + {"undefer_pks": True}) + return cloned + + +@load_only._add_unbound_fn +def load_only(*attrs): + return _UnboundLoad().load_only(*attrs) + + +@loader_option() +def joinedload(loadopt, attr, innerjoin=None): + """Indicate that the given attribute should be loaded using joined + eager loading. + + This function is part of the :class:`.Load` interface and supports + both method-chained and standalone operation. + + examples:: + + # joined-load the "orders" collection on "User" + query(User).options(joinedload(User.orders)) + + # joined-load Order.items and then Item.keywords + query(Order).options(joinedload(Order.items).joinedload(Item.keywords)) + + # lazily load Order.items, but when Items are loaded, + # joined-load the keywords collection + query(Order).options(lazyload(Order.items).joinedload(Item.keywords)) + + :param innerjoin: if ``True``, indicates that the joined eager load should + use an inner join instead of the default of left outer join:: + + query(Order).options(joinedload(Order.user, innerjoin=True)) + + In order to chain multiple eager joins together where some may be + OUTER and others INNER, right-nested joins are used to link them:: + + query(A).options( + joinedload(A.bs, innerjoin=False). + joinedload(B.cs, innerjoin=True) + ) + + The above query, linking A.bs via "outer" join and B.cs via "inner" join + would render the joins as "a LEFT OUTER JOIN (b JOIN c)". When using + SQLite, this form of JOIN is translated to use full subqueries as this + syntax is otherwise not directly supported. + + The ``innerjoin`` flag can also be stated with the term ``"unnested"``. + This will prevent joins from being right-nested, and will instead + link an "innerjoin" eagerload to an "outerjoin" eagerload by bypassing + the "inner" join. Using this form as follows:: + + query(A).options( + joinedload(A.bs, innerjoin=False). + joinedload(B.cs, innerjoin="unnested") + ) + + Joins will be rendered as "a LEFT OUTER JOIN b LEFT OUTER JOIN c", so that + all of "a" is matched rather than being incorrectly limited by a "b" that + does not contain a "c". + + .. note:: The "unnested" flag does **not** affect the JOIN rendered + from a many-to-many association table, e.g. a table configured + as :paramref:`.relationship.secondary`, to the target table; for + correctness of results, these joins are always INNER and are + therefore right-nested if linked to an OUTER join. + + .. versionadded:: 0.9.4 Added support for "nesting" of eager "inner" + joins. See :ref:`feature_2976`. + + .. versionchanged:: 1.0.0 ``innerjoin=True`` now implies + ``innerjoin="nested"``, whereas in 0.9 it implied + ``innerjoin="unnested"``. In order to achieve the pre-1.0 "unnested" + inner join behavior, use the value ``innerjoin="unnested"``. + See :ref:`migration_3008`. + + .. note:: + + The joins produced by :func:`.orm.joinedload` are **anonymously + aliased**. The criteria by which the join proceeds cannot be + modified, nor can the :class:`.Query` refer to these joins in any way, + including ordering. + + To produce a specific SQL JOIN which is explicitly available, use + :meth:`.Query.join`. To combine explicit JOINs with eager loading + of collections, use :func:`.orm.contains_eager`; see + :ref:`contains_eager`. + + .. seealso:: + + :ref:`loading_toplevel` + + :ref:`contains_eager` + + :func:`.orm.subqueryload` + + :func:`.orm.lazyload` + + :paramref:`.relationship.lazy` + + :paramref:`.relationship.innerjoin` - :func:`.relationship`-level + version of the :paramref:`.joinedload.innerjoin` option. + + """ + loader = loadopt.set_relationship_strategy(attr, {"lazy": "joined"}) + if innerjoin is not None: + loader.local_opts['innerjoin'] = innerjoin + return loader + + +@joinedload._add_unbound_fn +def joinedload(*keys, **kw): + return _UnboundLoad._from_keys( + _UnboundLoad.joinedload, keys, False, kw) + + +@joinedload._add_unbound_all_fn +def joinedload_all(*keys, **kw): + return _UnboundLoad._from_keys( + _UnboundLoad.joinedload, keys, True, kw) + + +@loader_option() +def subqueryload(loadopt, attr): + """Indicate that the given attribute should be loaded using + subquery eager loading. + + This function is part of the :class:`.Load` interface and supports + both method-chained and standalone operation. + + examples:: + + # subquery-load the "orders" collection on "User" + query(User).options(subqueryload(User.orders)) + + # subquery-load Order.items and then Item.keywords + query(Order).options(subqueryload(Order.items).subqueryload(Item.keywords)) + + # lazily load Order.items, but when Items are loaded, + # subquery-load the keywords collection + query(Order).options(lazyload(Order.items).subqueryload(Item.keywords)) + + + .. seealso:: + + :ref:`loading_toplevel` + + :func:`.orm.joinedload` + + :func:`.orm.lazyload` + + :paramref:`.relationship.lazy` + + """ + return loadopt.set_relationship_strategy(attr, {"lazy": "subquery"}) + + +@subqueryload._add_unbound_fn +def subqueryload(*keys): + return _UnboundLoad._from_keys(_UnboundLoad.subqueryload, keys, False, {}) + + +@subqueryload._add_unbound_all_fn +def subqueryload_all(*keys): + return _UnboundLoad._from_keys(_UnboundLoad.subqueryload, keys, True, {}) + + +@loader_option() +def lazyload(loadopt, attr): + """Indicate that the given attribute should be loaded using "lazy" + loading. + + This function is part of the :class:`.Load` interface and supports + both method-chained and standalone operation. + + .. seealso:: + + :paramref:`.relationship.lazy` + + """ + return loadopt.set_relationship_strategy(attr, {"lazy": "select"}) + + +@lazyload._add_unbound_fn +def lazyload(*keys): + return _UnboundLoad._from_keys(_UnboundLoad.lazyload, keys, False, {}) + + +@lazyload._add_unbound_all_fn +def lazyload_all(*keys): + return _UnboundLoad._from_keys(_UnboundLoad.lazyload, keys, True, {}) + + +@loader_option() +def immediateload(loadopt, attr): + """Indicate that the given attribute should be loaded using + an immediate load with a per-attribute SELECT statement. + + This function is part of the :class:`.Load` interface and supports + both method-chained and standalone operation. + + .. seealso:: + + :ref:`loading_toplevel` + + :func:`.orm.joinedload` + + :func:`.orm.lazyload` + + :paramref:`.relationship.lazy` + + """ + loader = loadopt.set_relationship_strategy(attr, {"lazy": "immediate"}) + return loader + + +@immediateload._add_unbound_fn +def immediateload(*keys): + return _UnboundLoad._from_keys( + _UnboundLoad.immediateload, keys, False, {}) + + +@loader_option() +def noload(loadopt, attr): + """Indicate that the given relationship attribute should remain unloaded. + + This function is part of the :class:`.Load` interface and supports + both method-chained and standalone operation. + + :func:`.orm.noload` applies to :func:`.relationship` attributes; for + column-based attributes, see :func:`.orm.defer`. + + """ + + return loadopt.set_relationship_strategy(attr, {"lazy": "noload"}) + + +@noload._add_unbound_fn +def noload(*keys): + return _UnboundLoad._from_keys(_UnboundLoad.noload, keys, False, {}) + + +@loader_option() +def defaultload(loadopt, attr): + """Indicate an attribute should load using its default loader style. + + This method is used to link to other loader options, such as + to set the :func:`.orm.defer` option on a class that is linked to + a relationship of the parent class being loaded, :func:`.orm.defaultload` + can be used to navigate this path without changing the loading style + of the relationship:: + + session.query(MyClass).options(defaultload("someattr").defer("some_column")) + + .. seealso:: + + :func:`.orm.defer` + + :func:`.orm.undefer` + + """ + return loadopt.set_relationship_strategy( + attr, + None + ) + + +@defaultload._add_unbound_fn +def defaultload(*keys): + return _UnboundLoad._from_keys(_UnboundLoad.defaultload, keys, False, {}) + + +@loader_option() +def defer(loadopt, key): + """Indicate that the given column-oriented attribute should be deferred, e.g. + not loaded until accessed. + + This function is part of the :class:`.Load` interface and supports + both method-chained and standalone operation. + + e.g.:: + + from sqlalchemy.orm import defer + + session.query(MyClass).options( + defer("attribute_one"), + defer("attribute_two")) + + session.query(MyClass).options( + defer(MyClass.attribute_one), + defer(MyClass.attribute_two)) + + To specify a deferred load of an attribute on a related class, + the path can be specified one token at a time, specifying the loading + style for each link along the chain. To leave the loading style + for a link unchanged, use :func:`.orm.defaultload`:: + + session.query(MyClass).options(defaultload("someattr").defer("some_column")) + + A :class:`.Load` object that is present on a certain path can have + :meth:`.Load.defer` called multiple times, each will operate on the same + parent entity:: + + + session.query(MyClass).options( + defaultload("someattr"). + defer("some_column"). + defer("some_other_column"). + defer("another_column") + ) + + :param key: Attribute to be deferred. + + :param \*addl_attrs: Deprecated; this option supports the old 0.8 style + of specifying a path as a series of attributes, which is now superseded + by the method-chained style. + + .. seealso:: + + :ref:`deferred` + + :func:`.orm.undefer` + + """ + return loadopt.set_column_strategy( + (key, ), + {"deferred": True, "instrument": True} + ) + + +@defer._add_unbound_fn +def defer(key, *addl_attrs): + return _UnboundLoad._from_keys( + _UnboundLoad.defer, (key, ) + addl_attrs, False, {}) + + +@loader_option() +def undefer(loadopt, key): + """Indicate that the given column-oriented attribute should be undeferred, + e.g. specified within the SELECT statement of the entity as a whole. + + The column being undeferred is typically set up on the mapping as a + :func:`.deferred` attribute. + + This function is part of the :class:`.Load` interface and supports + both method-chained and standalone operation. + + Examples:: + + # undefer two columns + session.query(MyClass).options(undefer("col1"), undefer("col2")) + + # undefer all columns specific to a single class using Load + * + session.query(MyClass, MyOtherClass).options( + Load(MyClass).undefer("*")) + + :param key: Attribute to be undeferred. + + :param \*addl_attrs: Deprecated; this option supports the old 0.8 style + of specifying a path as a series of attributes, which is now superseded + by the method-chained style. + + .. seealso:: + + :ref:`deferred` + + :func:`.orm.defer` + + :func:`.orm.undefer_group` + + """ + return loadopt.set_column_strategy( + (key, ), + {"deferred": False, "instrument": True} + ) + + +@undefer._add_unbound_fn +def undefer(key, *addl_attrs): + return _UnboundLoad._from_keys( + _UnboundLoad.undefer, (key, ) + addl_attrs, False, {}) + + +@loader_option() +def undefer_group(loadopt, name): + """Indicate that columns within the given deferred group name should be + undeferred. + + The columns being undeferred are set up on the mapping as + :func:`.deferred` attributes and include a "group" name. + + E.g:: + + session.query(MyClass).options(undefer_group("large_attrs")) + + To undefer a group of attributes on a related entity, the path can be + spelled out using relationship loader options, such as + :func:`.orm.defaultload`:: + + session.query(MyClass).options( + defaultload("someattr").undefer_group("large_attrs")) + + .. versionchanged:: 0.9.0 :func:`.orm.undefer_group` is now specific to a + particiular entity load path. + + .. seealso:: + + :ref:`deferred` + + :func:`.orm.defer` + + :func:`.orm.undefer` + + """ + loadopt._merge_into_path = True + return loadopt.set_column_strategy( + "*", + None, + {"undefer_group_%s" % name: True} + ) + + +@undefer_group._add_unbound_fn +def undefer_group(name): + return _UnboundLoad().undefer_group(name) diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/sync.py b/lib/python3.4/site-packages/sqlalchemy/orm/sync.py new file mode 100644 index 0000000..ccca508 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/orm/sync.py @@ -0,0 +1,140 @@ +# orm/sync.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""private module containing functions used for copying data +between instances based on join conditions. + +""" + +from . import exc, util as orm_util, attributes + + +def populate(source, source_mapper, dest, dest_mapper, + synchronize_pairs, uowcommit, flag_cascaded_pks): + source_dict = source.dict + dest_dict = dest.dict + + for l, r in synchronize_pairs: + try: + # inline of source_mapper._get_state_attr_by_column + prop = source_mapper._columntoproperty[l] + value = source.manager[prop.key].impl.get(source, source_dict, + attributes.PASSIVE_OFF) + except exc.UnmappedColumnError: + _raise_col_to_prop(False, source_mapper, l, dest_mapper, r) + + try: + # inline of dest_mapper._set_state_attr_by_column + prop = dest_mapper._columntoproperty[r] + dest.manager[prop.key].impl.set(dest, dest_dict, value, None) + except exc.UnmappedColumnError: + _raise_col_to_prop(True, source_mapper, l, dest_mapper, r) + + # technically the "r.primary_key" check isn't + # needed here, but we check for this condition to limit + # how often this logic is invoked for memory/performance + # reasons, since we only need this info for a primary key + # destination. + if flag_cascaded_pks and l.primary_key and \ + r.primary_key and \ + r.references(l): + uowcommit.attributes[("pk_cascaded", dest, r)] = True + + +def bulk_populate_inherit_keys( + source_dict, source_mapper, synchronize_pairs): + # a simplified version of populate() used by bulk insert mode + for l, r in synchronize_pairs: + try: + prop = source_mapper._columntoproperty[l] + value = source_dict[prop.key] + except exc.UnmappedColumnError: + _raise_col_to_prop(False, source_mapper, l, source_mapper, r) + + try: + prop = source_mapper._columntoproperty[r] + source_dict[prop.key] = value + except exc.UnmappedColumnError: + _raise_col_to_prop(True, source_mapper, l, source_mapper, r) + + +def clear(dest, dest_mapper, synchronize_pairs): + for l, r in synchronize_pairs: + if r.primary_key and \ + dest_mapper._get_state_attr_by_column( + dest, dest.dict, r) not in orm_util._none_set: + + raise AssertionError( + "Dependency rule tried to blank-out primary key " + "column '%s' on instance '%s'" % + (r, orm_util.state_str(dest)) + ) + try: + dest_mapper._set_state_attr_by_column(dest, dest.dict, r, None) + except exc.UnmappedColumnError: + _raise_col_to_prop(True, None, l, dest_mapper, r) + + +def update(source, source_mapper, dest, old_prefix, synchronize_pairs): + for l, r in synchronize_pairs: + try: + oldvalue = source_mapper._get_committed_attr_by_column( + source.obj(), l) + value = source_mapper._get_state_attr_by_column( + source, source.dict, l, passive=attributes.PASSIVE_OFF) + except exc.UnmappedColumnError: + _raise_col_to_prop(False, source_mapper, l, None, r) + dest[r.key] = value + dest[old_prefix + r.key] = oldvalue + + +def populate_dict(source, source_mapper, dict_, synchronize_pairs): + for l, r in synchronize_pairs: + try: + value = source_mapper._get_state_attr_by_column( + source, source.dict, l, passive=attributes.PASSIVE_OFF) + except exc.UnmappedColumnError: + _raise_col_to_prop(False, source_mapper, l, None, r) + + dict_[r.key] = value + + +def source_modified(uowcommit, source, source_mapper, synchronize_pairs): + """return true if the source object has changes from an old to a + new value on the given synchronize pairs + + """ + for l, r in synchronize_pairs: + try: + prop = source_mapper._columntoproperty[l] + except exc.UnmappedColumnError: + _raise_col_to_prop(False, source_mapper, l, None, r) + history = uowcommit.get_attribute_history( + source, prop.key, attributes.PASSIVE_NO_INITIALIZE) + if bool(history.deleted): + return True + else: + return False + + +def _raise_col_to_prop(isdest, source_mapper, source_column, + dest_mapper, dest_column): + if isdest: + raise exc.UnmappedColumnError( + "Can't execute sync rule for " + "destination column '%s'; mapper '%s' does not map " + "this column. Try using an explicit `foreign_keys` " + "collection which does not include this column (or use " + "a viewonly=True relation)." % (dest_column, dest_mapper)) + else: + raise exc.UnmappedColumnError( + "Can't execute sync rule for " + "source column '%s'; mapper '%s' does not map this " + "column. Try using an explicit `foreign_keys` " + "collection which does not include destination column " + "'%s' (or use a viewonly=True relation)." % + (source_column, source_mapper, dest_column)) diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/unitofwork.py b/lib/python3.4/site-packages/sqlalchemy/orm/unitofwork.py new file mode 100644 index 0000000..8b4ae64 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/orm/unitofwork.py @@ -0,0 +1,656 @@ +# orm/unitofwork.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""The internals for the unit of work system. + +The session's flush() process passes objects to a contextual object +here, which assembles flush tasks based on mappers and their properties, +organizes them in order of dependency, and executes. + +""" + +from .. import util, event +from ..util import topological +from . import attributes, persistence, util as orm_util +import itertools + + +def track_cascade_events(descriptor, prop): + """Establish event listeners on object attributes which handle + cascade-on-set/append. + + """ + key = prop.key + + def append(state, item, initiator): + # process "save_update" cascade rules for when + # an instance is appended to the list of another instance + + if item is None: + return + + sess = state.session + if sess: + if sess._warn_on_events: + sess._flush_warning("collection append") + + prop = state.manager.mapper._props[key] + item_state = attributes.instance_state(item) + if prop._cascade.save_update and \ + (prop.cascade_backrefs or key == initiator.key) and \ + not sess._contains_state(item_state): + sess._save_or_update_state(item_state) + return item + + def remove(state, item, initiator): + if item is None: + return + + sess = state.session + if sess: + + prop = state.manager.mapper._props[key] + + if sess._warn_on_events: + sess._flush_warning( + "collection remove" + if prop.uselist + else "related attribute delete") + + # expunge pending orphans + item_state = attributes.instance_state(item) + if prop._cascade.delete_orphan and \ + item_state in sess._new and \ + prop.mapper._is_orphan(item_state): + sess.expunge(item) + + def set_(state, newvalue, oldvalue, initiator): + # process "save_update" cascade rules for when an instance + # is attached to another instance + if oldvalue is newvalue: + return newvalue + + sess = state.session + if sess: + + if sess._warn_on_events: + sess._flush_warning("related attribute set") + + prop = state.manager.mapper._props[key] + if newvalue is not None: + newvalue_state = attributes.instance_state(newvalue) + if prop._cascade.save_update and \ + (prop.cascade_backrefs or key == initiator.key) and \ + not sess._contains_state(newvalue_state): + sess._save_or_update_state(newvalue_state) + + if oldvalue is not None and \ + oldvalue is not attributes.NEVER_SET and \ + oldvalue is not attributes.PASSIVE_NO_RESULT and \ + prop._cascade.delete_orphan: + # possible to reach here with attributes.NEVER_SET ? + oldvalue_state = attributes.instance_state(oldvalue) + + if oldvalue_state in sess._new and \ + prop.mapper._is_orphan(oldvalue_state): + sess.expunge(oldvalue) + return newvalue + + event.listen(descriptor, 'append', append, raw=True, retval=True) + event.listen(descriptor, 'remove', remove, raw=True, retval=True) + event.listen(descriptor, 'set', set_, raw=True, retval=True) + + +class UOWTransaction(object): + def __init__(self, session): + self.session = session + + # dictionary used by external actors to + # store arbitrary state information. + self.attributes = {} + + # dictionary of mappers to sets of + # DependencyProcessors, which are also + # set to be part of the sorted flush actions, + # which have that mapper as a parent. + self.deps = util.defaultdict(set) + + # dictionary of mappers to sets of InstanceState + # items pending for flush which have that mapper + # as a parent. + self.mappers = util.defaultdict(set) + + # a dictionary of Preprocess objects, which gather + # additional states impacted by the flush + # and determine if a flush action is needed + self.presort_actions = {} + + # dictionary of PostSortRec objects, each + # one issues work during the flush within + # a certain ordering. + self.postsort_actions = {} + + # a set of 2-tuples, each containing two + # PostSortRec objects where the second + # is dependent on the first being executed + # first + self.dependencies = set() + + # dictionary of InstanceState-> (isdelete, listonly) + # tuples, indicating if this state is to be deleted + # or insert/updated, or just refreshed + self.states = {} + + # tracks InstanceStates which will be receiving + # a "post update" call. Keys are mappers, + # values are a set of states and a set of the + # columns which should be included in the update. + self.post_update_states = util.defaultdict(lambda: (set(), set())) + + @property + def has_work(self): + return bool(self.states) + + def is_deleted(self, state): + """return true if the given state is marked as deleted + within this uowtransaction.""" + + return state in self.states and self.states[state][0] + + def memo(self, key, callable_): + if key in self.attributes: + return self.attributes[key] + else: + self.attributes[key] = ret = callable_() + return ret + + def remove_state_actions(self, state): + """remove pending actions for a state from the uowtransaction.""" + + isdelete = self.states[state][0] + + self.states[state] = (isdelete, True) + + def get_attribute_history(self, state, key, + passive=attributes.PASSIVE_NO_INITIALIZE): + """facade to attributes.get_state_history(), including + caching of results.""" + + hashkey = ("history", state, key) + + # cache the objects, not the states; the strong reference here + # prevents newly loaded objects from being dereferenced during the + # flush process + + if hashkey in self.attributes: + history, state_history, cached_passive = self.attributes[hashkey] + # if the cached lookup was "passive" and now + # we want non-passive, do a non-passive lookup and re-cache + + if not cached_passive & attributes.SQL_OK \ + and passive & attributes.SQL_OK: + impl = state.manager[key].impl + history = impl.get_history(state, state.dict, + attributes.PASSIVE_OFF | + attributes.LOAD_AGAINST_COMMITTED) + if history and impl.uses_objects: + state_history = history.as_state() + else: + state_history = history + self.attributes[hashkey] = (history, state_history, passive) + else: + impl = state.manager[key].impl + # TODO: store the history as (state, object) tuples + # so we don't have to keep converting here + history = impl.get_history(state, state.dict, passive | + attributes.LOAD_AGAINST_COMMITTED) + if history and impl.uses_objects: + state_history = history.as_state() + else: + state_history = history + self.attributes[hashkey] = (history, state_history, + passive) + + return state_history + + def has_dep(self, processor): + return (processor, True) in self.presort_actions + + def register_preprocessor(self, processor, fromparent): + key = (processor, fromparent) + if key not in self.presort_actions: + self.presort_actions[key] = Preprocess(processor, fromparent) + + def register_object(self, state, isdelete=False, + listonly=False, cancel_delete=False, + operation=None, prop=None): + if not self.session._contains_state(state): + if not state.deleted and operation is not None: + util.warn("Object of type %s not in session, %s operation " + "along '%s' will not proceed" % + (orm_util.state_class_str(state), operation, prop)) + return False + + if state not in self.states: + mapper = state.manager.mapper + + if mapper not in self.mappers: + self._per_mapper_flush_actions(mapper) + + self.mappers[mapper].add(state) + self.states[state] = (isdelete, listonly) + else: + if not listonly and (isdelete or cancel_delete): + self.states[state] = (isdelete, False) + return True + + def issue_post_update(self, state, post_update_cols): + mapper = state.manager.mapper.base_mapper + states, cols = self.post_update_states[mapper] + states.add(state) + cols.update(post_update_cols) + + def _per_mapper_flush_actions(self, mapper): + saves = SaveUpdateAll(self, mapper.base_mapper) + deletes = DeleteAll(self, mapper.base_mapper) + self.dependencies.add((saves, deletes)) + + for dep in mapper._dependency_processors: + dep.per_property_preprocessors(self) + + for prop in mapper.relationships: + if prop.viewonly: + continue + dep = prop._dependency_processor + dep.per_property_preprocessors(self) + + @util.memoized_property + def _mapper_for_dep(self): + """return a dynamic mapping of (Mapper, DependencyProcessor) to + True or False, indicating if the DependencyProcessor operates + on objects of that Mapper. + + The result is stored in the dictionary persistently once + calculated. + + """ + return util.PopulateDict( + lambda tup: tup[0]._props.get(tup[1].key) is tup[1].prop + ) + + def filter_states_for_dep(self, dep, states): + """Filter the given list of InstanceStates to those relevant to the + given DependencyProcessor. + + """ + mapper_for_dep = self._mapper_for_dep + return [s for s in states if mapper_for_dep[(s.manager.mapper, dep)]] + + def states_for_mapper_hierarchy(self, mapper, isdelete, listonly): + checktup = (isdelete, listonly) + for mapper in mapper.base_mapper.self_and_descendants: + for state in self.mappers[mapper]: + if self.states[state] == checktup: + yield state + + def _generate_actions(self): + """Generate the full, unsorted collection of PostSortRecs as + well as dependency pairs for this UOWTransaction. + + """ + # execute presort_actions, until all states + # have been processed. a presort_action might + # add new states to the uow. + while True: + ret = False + for action in list(self.presort_actions.values()): + if action.execute(self): + ret = True + if not ret: + break + + # see if the graph of mapper dependencies has cycles. + self.cycles = cycles = topological.find_cycles( + self.dependencies, + list(self.postsort_actions.values())) + + if cycles: + # if yes, break the per-mapper actions into + # per-state actions + convert = dict( + (rec, set(rec.per_state_flush_actions(self))) + for rec in cycles + ) + + # rewrite the existing dependencies to point to + # the per-state actions for those per-mapper actions + # that were broken up. + for edge in list(self.dependencies): + if None in edge or \ + edge[0].disabled or edge[1].disabled or \ + cycles.issuperset(edge): + self.dependencies.remove(edge) + elif edge[0] in cycles: + self.dependencies.remove(edge) + for dep in convert[edge[0]]: + self.dependencies.add((dep, edge[1])) + elif edge[1] in cycles: + self.dependencies.remove(edge) + for dep in convert[edge[1]]: + self.dependencies.add((edge[0], dep)) + + return set([a for a in self.postsort_actions.values() + if not a.disabled + ] + ).difference(cycles) + + def execute(self): + postsort_actions = self._generate_actions() + + # sort = topological.sort(self.dependencies, postsort_actions) + # print "--------------" + # print "\ndependencies:", self.dependencies + # print "\ncycles:", self.cycles + # print "\nsort:", list(sort) + # print "\nCOUNT OF POSTSORT ACTIONS", len(postsort_actions) + + # execute + if self.cycles: + for set_ in topological.sort_as_subsets( + self.dependencies, + postsort_actions): + while set_: + n = set_.pop() + n.execute_aggregate(self, set_) + else: + for rec in topological.sort( + self.dependencies, + postsort_actions): + rec.execute(self) + + def finalize_flush_changes(self): + """mark processed objects as clean / deleted after a successful + flush(). + + this method is called within the flush() method after the + execute() method has succeeded and the transaction has been committed. + + """ + if not self.states: + return + + states = set(self.states) + isdel = set( + s for (s, (isdelete, listonly)) in self.states.items() + if isdelete + ) + other = states.difference(isdel) + if isdel: + self.session._remove_newly_deleted(isdel) + if other: + self.session._register_newly_persistent(other) + + +class IterateMappersMixin(object): + def _mappers(self, uow): + if self.fromparent: + return iter( + m for m in + self.dependency_processor.parent.self_and_descendants + if uow._mapper_for_dep[(m, self.dependency_processor)] + ) + else: + return self.dependency_processor.mapper.self_and_descendants + + +class Preprocess(IterateMappersMixin): + def __init__(self, dependency_processor, fromparent): + self.dependency_processor = dependency_processor + self.fromparent = fromparent + self.processed = set() + self.setup_flush_actions = False + + def execute(self, uow): + delete_states = set() + save_states = set() + + for mapper in self._mappers(uow): + for state in uow.mappers[mapper].difference(self.processed): + (isdelete, listonly) = uow.states[state] + if not listonly: + if isdelete: + delete_states.add(state) + else: + save_states.add(state) + + if delete_states: + self.dependency_processor.presort_deletes(uow, delete_states) + self.processed.update(delete_states) + if save_states: + self.dependency_processor.presort_saves(uow, save_states) + self.processed.update(save_states) + + if (delete_states or save_states): + if not self.setup_flush_actions and ( + self.dependency_processor. + prop_has_changes(uow, delete_states, True) or + self.dependency_processor. + prop_has_changes(uow, save_states, False) + ): + self.dependency_processor.per_property_flush_actions(uow) + self.setup_flush_actions = True + return True + else: + return False + + +class PostSortRec(object): + disabled = False + + def __new__(cls, uow, *args): + key = (cls, ) + args + if key in uow.postsort_actions: + return uow.postsort_actions[key] + else: + uow.postsort_actions[key] = \ + ret = \ + object.__new__(cls) + return ret + + def execute_aggregate(self, uow, recs): + self.execute(uow) + + def __repr__(self): + return "%s(%s)" % ( + self.__class__.__name__, + ",".join(str(x) for x in self.__dict__.values()) + ) + + +class ProcessAll(IterateMappersMixin, PostSortRec): + def __init__(self, uow, dependency_processor, delete, fromparent): + self.dependency_processor = dependency_processor + self.delete = delete + self.fromparent = fromparent + uow.deps[dependency_processor.parent.base_mapper].\ + add(dependency_processor) + + def execute(self, uow): + states = self._elements(uow) + if self.delete: + self.dependency_processor.process_deletes(uow, states) + else: + self.dependency_processor.process_saves(uow, states) + + def per_state_flush_actions(self, uow): + # this is handled by SaveUpdateAll and DeleteAll, + # since a ProcessAll should unconditionally be pulled + # into per-state if either the parent/child mappers + # are part of a cycle + return iter([]) + + def __repr__(self): + return "%s(%s, delete=%s)" % ( + self.__class__.__name__, + self.dependency_processor, + self.delete + ) + + def _elements(self, uow): + for mapper in self._mappers(uow): + for state in uow.mappers[mapper]: + (isdelete, listonly) = uow.states[state] + if isdelete == self.delete and not listonly: + yield state + + +class IssuePostUpdate(PostSortRec): + def __init__(self, uow, mapper, isdelete): + self.mapper = mapper + self.isdelete = isdelete + + def execute(self, uow): + states, cols = uow.post_update_states[self.mapper] + states = [s for s in states if uow.states[s][0] == self.isdelete] + + persistence.post_update(self.mapper, states, uow, cols) + + +class SaveUpdateAll(PostSortRec): + def __init__(self, uow, mapper): + self.mapper = mapper + assert mapper is mapper.base_mapper + + def execute(self, uow): + persistence.save_obj(self.mapper, + uow.states_for_mapper_hierarchy( + self.mapper, False, False), + uow + ) + + def per_state_flush_actions(self, uow): + states = list(uow.states_for_mapper_hierarchy( + self.mapper, False, False)) + base_mapper = self.mapper.base_mapper + delete_all = DeleteAll(uow, base_mapper) + for state in states: + # keep saves before deletes - + # this ensures 'row switch' operations work + action = SaveUpdateState(uow, state, base_mapper) + uow.dependencies.add((action, delete_all)) + yield action + + for dep in uow.deps[self.mapper]: + states_for_prop = uow.filter_states_for_dep(dep, states) + dep.per_state_flush_actions(uow, states_for_prop, False) + + +class DeleteAll(PostSortRec): + def __init__(self, uow, mapper): + self.mapper = mapper + assert mapper is mapper.base_mapper + + def execute(self, uow): + persistence.delete_obj(self.mapper, + uow.states_for_mapper_hierarchy( + self.mapper, True, False), + uow + ) + + def per_state_flush_actions(self, uow): + states = list(uow.states_for_mapper_hierarchy( + self.mapper, True, False)) + base_mapper = self.mapper.base_mapper + save_all = SaveUpdateAll(uow, base_mapper) + for state in states: + # keep saves before deletes - + # this ensures 'row switch' operations work + action = DeleteState(uow, state, base_mapper) + uow.dependencies.add((save_all, action)) + yield action + + for dep in uow.deps[self.mapper]: + states_for_prop = uow.filter_states_for_dep(dep, states) + dep.per_state_flush_actions(uow, states_for_prop, True) + + +class ProcessState(PostSortRec): + def __init__(self, uow, dependency_processor, delete, state): + self.dependency_processor = dependency_processor + self.delete = delete + self.state = state + + def execute_aggregate(self, uow, recs): + cls_ = self.__class__ + dependency_processor = self.dependency_processor + delete = self.delete + our_recs = [r for r in recs + if r.__class__ is cls_ and + r.dependency_processor is dependency_processor and + r.delete is delete] + recs.difference_update(our_recs) + states = [self.state] + [r.state for r in our_recs] + if delete: + dependency_processor.process_deletes(uow, states) + else: + dependency_processor.process_saves(uow, states) + + def __repr__(self): + return "%s(%s, %s, delete=%s)" % ( + self.__class__.__name__, + self.dependency_processor, + orm_util.state_str(self.state), + self.delete + ) + + +class SaveUpdateState(PostSortRec): + def __init__(self, uow, state, mapper): + self.state = state + self.mapper = mapper + + def execute_aggregate(self, uow, recs): + cls_ = self.__class__ + mapper = self.mapper + our_recs = [r for r in recs + if r.__class__ is cls_ and + r.mapper is mapper] + recs.difference_update(our_recs) + persistence.save_obj(mapper, + [self.state] + + [r.state for r in our_recs], + uow) + + def __repr__(self): + return "%s(%s)" % ( + self.__class__.__name__, + orm_util.state_str(self.state) + ) + + +class DeleteState(PostSortRec): + def __init__(self, uow, state, mapper): + self.state = state + self.mapper = mapper + + def execute_aggregate(self, uow, recs): + cls_ = self.__class__ + mapper = self.mapper + our_recs = [r for r in recs + if r.__class__ is cls_ and + r.mapper is mapper] + recs.difference_update(our_recs) + states = [self.state] + [r.state for r in our_recs] + persistence.delete_obj(mapper, + [s for s in states if uow.states[s][0]], + uow) + + def __repr__(self): + return "%s(%s)" % ( + self.__class__.__name__, + orm_util.state_str(self.state) + ) diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/util.py b/lib/python3.4/site-packages/sqlalchemy/orm/util.py new file mode 100644 index 0000000..42fadca --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/orm/util.py @@ -0,0 +1,1034 @@ +# orm/util.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + + +from .. import sql, util, event, exc as sa_exc, inspection +from ..sql import expression, util as sql_util, operators +from .interfaces import PropComparator, MapperProperty +from . import attributes +import re + +from .base import instance_str, state_str, state_class_str, attribute_str, \ + state_attribute_str, object_mapper, object_state, _none_set, _never_set +from .base import class_mapper, _class_to_mapper +from .base import InspectionAttr +from .path_registry import PathRegistry + +all_cascades = frozenset(("delete", "delete-orphan", "all", "merge", + "expunge", "save-update", "refresh-expire", + "none")) + + +class CascadeOptions(frozenset): + """Keeps track of the options sent to relationship().cascade""" + + _add_w_all_cascades = all_cascades.difference([ + 'all', 'none', 'delete-orphan']) + _allowed_cascades = all_cascades + + __slots__ = ( + 'save_update', 'delete', 'refresh_expire', 'merge', + 'expunge', 'delete_orphan') + + def __new__(cls, value_list): + if isinstance(value_list, util.string_types) or value_list is None: + return cls.from_string(value_list) + values = set(value_list) + if values.difference(cls._allowed_cascades): + raise sa_exc.ArgumentError( + "Invalid cascade option(s): %s" % + ", ".join([repr(x) for x in + sorted(values.difference(cls._allowed_cascades))])) + + if "all" in values: + values.update(cls._add_w_all_cascades) + if "none" in values: + values.clear() + values.discard('all') + + self = frozenset.__new__(CascadeOptions, values) + self.save_update = 'save-update' in values + self.delete = 'delete' in values + self.refresh_expire = 'refresh-expire' in values + self.merge = 'merge' in values + self.expunge = 'expunge' in values + self.delete_orphan = "delete-orphan" in values + + if self.delete_orphan and not self.delete: + util.warn("The 'delete-orphan' cascade " + "option requires 'delete'.") + return self + + def __repr__(self): + return "CascadeOptions(%r)" % ( + ",".join([x for x in sorted(self)]) + ) + + @classmethod + def from_string(cls, arg): + values = [ + c for c + in re.split('\s*,\s*', arg or "") + if c + ] + return cls(values) + + +def _validator_events( + desc, key, validator, include_removes, include_backrefs): + """Runs a validation method on an attribute value to be set or + appended. + """ + + if not include_backrefs: + def detect_is_backref(state, initiator): + impl = state.manager[key].impl + return initiator.impl is not impl + + if include_removes: + def append(state, value, initiator): + if include_backrefs or not detect_is_backref(state, initiator): + return validator(state.obj(), key, value, False) + else: + return value + + def set_(state, value, oldvalue, initiator): + if include_backrefs or not detect_is_backref(state, initiator): + return validator(state.obj(), key, value, False) + else: + return value + + def remove(state, value, initiator): + if include_backrefs or not detect_is_backref(state, initiator): + validator(state.obj(), key, value, True) + + else: + def append(state, value, initiator): + if include_backrefs or not detect_is_backref(state, initiator): + return validator(state.obj(), key, value) + else: + return value + + def set_(state, value, oldvalue, initiator): + if include_backrefs or not detect_is_backref(state, initiator): + return validator(state.obj(), key, value) + else: + return value + + event.listen(desc, 'append', append, raw=True, retval=True) + event.listen(desc, 'set', set_, raw=True, retval=True) + if include_removes: + event.listen(desc, "remove", remove, raw=True, retval=True) + + +def polymorphic_union(table_map, typecolname, + aliasname='p_union', cast_nulls=True): + """Create a ``UNION`` statement used by a polymorphic mapper. + + See :ref:`concrete_inheritance` for an example of how + this is used. + + :param table_map: mapping of polymorphic identities to + :class:`.Table` objects. + :param typecolname: string name of a "discriminator" column, which will be + derived from the query, producing the polymorphic identity for + each row. If ``None``, no polymorphic discriminator is generated. + :param aliasname: name of the :func:`~sqlalchemy.sql.expression.alias()` + construct generated. + :param cast_nulls: if True, non-existent columns, which are represented + as labeled NULLs, will be passed into CAST. This is a legacy behavior + that is problematic on some backends such as Oracle - in which case it + can be set to False. + + """ + + colnames = util.OrderedSet() + colnamemaps = {} + types = {} + for key in table_map: + table = table_map[key] + + # mysql doesn't like selecting from a select; + # make it an alias of the select + if isinstance(table, sql.Select): + table = table.alias() + table_map[key] = table + + m = {} + for c in table.c: + colnames.add(c.key) + m[c.key] = c + types[c.key] = c.type + colnamemaps[table] = m + + def col(name, table): + try: + return colnamemaps[table][name] + except KeyError: + if cast_nulls: + return sql.cast(sql.null(), types[name]).label(name) + else: + return sql.type_coerce(sql.null(), types[name]).label(name) + + result = [] + for type, table in table_map.items(): + if typecolname is not None: + result.append( + sql.select([col(name, table) for name in colnames] + + [sql.literal_column( + sql_util._quote_ddl_expr(type)). + label(typecolname)], + from_obj=[table])) + else: + result.append(sql.select([col(name, table) for name in colnames], + from_obj=[table])) + return sql.union_all(*result).alias(aliasname) + + +def identity_key(*args, **kwargs): + """Generate "identity key" tuples, as are used as keys in the + :attr:`.Session.identity_map` dictionary. + + This function has several call styles: + + * ``identity_key(class, ident)`` + + This form receives a mapped class and a primary key scalar or + tuple as an argument. + + E.g.:: + + >>> identity_key(MyClass, (1, 2)) + (, (1, 2)) + + :param class: mapped class (must be a positional argument) + :param ident: primary key, may be a scalar or tuple argument. + + + * ``identity_key(instance=instance)`` + + This form will produce the identity key for a given instance. The + instance need not be persistent, only that its primary key attributes + are populated (else the key will contain ``None`` for those missing + values). + + E.g.:: + + >>> instance = MyClass(1, 2) + >>> identity_key(instance=instance) + (, (1, 2)) + + In this form, the given instance is ultimately run though + :meth:`.Mapper.identity_key_from_instance`, which will have the + effect of performing a database check for the corresponding row + if the object is expired. + + :param instance: object instance (must be given as a keyword arg) + + * ``identity_key(class, row=row)`` + + This form is similar to the class/tuple form, except is passed a + database result row as a :class:`.RowProxy` object. + + E.g.:: + + >>> row = engine.execute("select * from table where a=1 and b=2").\ +first() + >>> identity_key(MyClass, row=row) + (, (1, 2)) + + :param class: mapped class (must be a positional argument) + :param row: :class:`.RowProxy` row returned by a :class:`.ResultProxy` + (must be given as a keyword arg) + + """ + if args: + if len(args) == 1: + class_ = args[0] + try: + row = kwargs.pop("row") + except KeyError: + ident = kwargs.pop("ident") + elif len(args) == 2: + class_, ident = args + elif len(args) == 3: + class_, ident = args + else: + raise sa_exc.ArgumentError( + "expected up to three positional arguments, " + "got %s" % len(args)) + if kwargs: + raise sa_exc.ArgumentError("unknown keyword arguments: %s" + % ", ".join(kwargs)) + mapper = class_mapper(class_) + if "ident" in locals(): + return mapper.identity_key_from_primary_key(util.to_list(ident)) + return mapper.identity_key_from_row(row) + instance = kwargs.pop("instance") + if kwargs: + raise sa_exc.ArgumentError("unknown keyword arguments: %s" + % ", ".join(kwargs.keys)) + mapper = object_mapper(instance) + return mapper.identity_key_from_instance(instance) + + +class ORMAdapter(sql_util.ColumnAdapter): + """ColumnAdapter subclass which excludes adaptation of entities from + non-matching mappers. + + """ + + def __init__(self, entity, equivalents=None, adapt_required=False, + chain_to=None, allow_label_resolve=True, + anonymize_labels=False): + info = inspection.inspect(entity) + + self.mapper = info.mapper + selectable = info.selectable + is_aliased_class = info.is_aliased_class + if is_aliased_class: + self.aliased_class = entity + else: + self.aliased_class = None + + sql_util.ColumnAdapter.__init__( + self, selectable, equivalents, chain_to, + adapt_required=adapt_required, + allow_label_resolve=allow_label_resolve, + anonymize_labels=anonymize_labels, + include_fn=self._include_fn + ) + + def _include_fn(self, elem): + entity = elem._annotations.get('parentmapper', None) + return not entity or entity.isa(self.mapper) + + +class AliasedClass(object): + """Represents an "aliased" form of a mapped class for usage with Query. + + The ORM equivalent of a :func:`sqlalchemy.sql.expression.alias` + construct, this object mimics the mapped class using a + __getattr__ scheme and maintains a reference to a + real :class:`~sqlalchemy.sql.expression.Alias` object. + + Usage is via the :func:`.orm.aliased` function, or alternatively + via the :func:`.orm.with_polymorphic` function. + + Usage example:: + + # find all pairs of users with the same name + user_alias = aliased(User) + session.query(User, user_alias).\\ + join((user_alias, User.id > user_alias.id)).\\ + filter(User.name==user_alias.name) + + The resulting object is an instance of :class:`.AliasedClass`. + This object implements an attribute scheme which produces the + same attribute and method interface as the original mapped + class, allowing :class:`.AliasedClass` to be compatible + with any attribute technique which works on the original class, + including hybrid attributes (see :ref:`hybrids_toplevel`). + + The :class:`.AliasedClass` can be inspected for its underlying + :class:`.Mapper`, aliased selectable, and other information + using :func:`.inspect`:: + + from sqlalchemy import inspect + my_alias = aliased(MyClass) + insp = inspect(my_alias) + + The resulting inspection object is an instance of :class:`.AliasedInsp`. + + See :func:`.aliased` and :func:`.with_polymorphic` for construction + argument descriptions. + + """ + + def __init__(self, cls, alias=None, + name=None, + flat=False, + adapt_on_names=False, + # TODO: None for default here? + with_polymorphic_mappers=(), + with_polymorphic_discriminator=None, + base_alias=None, + use_mapper_path=False): + mapper = _class_to_mapper(cls) + if alias is None: + alias = mapper._with_polymorphic_selectable.alias( + name=name, flat=flat) + + self._aliased_insp = AliasedInsp( + self, + mapper, + alias, + name, + with_polymorphic_mappers + if with_polymorphic_mappers + else mapper.with_polymorphic_mappers, + with_polymorphic_discriminator + if with_polymorphic_discriminator is not None + else mapper.polymorphic_on, + base_alias, + use_mapper_path, + adapt_on_names + ) + + self.__name__ = 'AliasedClass_%s' % mapper.class_.__name__ + + def __getattr__(self, key): + try: + _aliased_insp = self.__dict__['_aliased_insp'] + except KeyError: + raise AttributeError() + else: + for base in _aliased_insp._target.__mro__: + try: + attr = object.__getattribute__(base, key) + except AttributeError: + continue + else: + break + else: + raise AttributeError(key) + + if isinstance(attr, PropComparator): + ret = attr.adapt_to_entity(_aliased_insp) + setattr(self, key, ret) + return ret + elif hasattr(attr, 'func_code'): + is_method = getattr(_aliased_insp._target, key, None) + if is_method and is_method.__self__ is not None: + return util.types.MethodType(attr.__func__, self, self) + else: + return None + elif hasattr(attr, '__get__'): + ret = attr.__get__(None, self) + if isinstance(ret, PropComparator): + return ret.adapt_to_entity(_aliased_insp) + else: + return ret + else: + return attr + + def __repr__(self): + return '' % ( + id(self), self._aliased_insp._target.__name__) + + +class AliasedInsp(InspectionAttr): + """Provide an inspection interface for an + :class:`.AliasedClass` object. + + The :class:`.AliasedInsp` object is returned + given an :class:`.AliasedClass` using the + :func:`.inspect` function:: + + from sqlalchemy import inspect + from sqlalchemy.orm import aliased + + my_alias = aliased(MyMappedClass) + insp = inspect(my_alias) + + Attributes on :class:`.AliasedInsp` + include: + + * ``entity`` - the :class:`.AliasedClass` represented. + * ``mapper`` - the :class:`.Mapper` mapping the underlying class. + * ``selectable`` - the :class:`.Alias` construct which ultimately + represents an aliased :class:`.Table` or :class:`.Select` + construct. + * ``name`` - the name of the alias. Also is used as the attribute + name when returned in a result tuple from :class:`.Query`. + * ``with_polymorphic_mappers`` - collection of :class:`.Mapper` objects + indicating all those mappers expressed in the select construct + for the :class:`.AliasedClass`. + * ``polymorphic_on`` - an alternate column or SQL expression which + will be used as the "discriminator" for a polymorphic load. + + .. seealso:: + + :ref:`inspection_toplevel` + + """ + + def __init__(self, entity, mapper, selectable, name, + with_polymorphic_mappers, polymorphic_on, + _base_alias, _use_mapper_path, adapt_on_names): + self.entity = entity + self.mapper = mapper + self.selectable = selectable + self.name = name + self.with_polymorphic_mappers = with_polymorphic_mappers + self.polymorphic_on = polymorphic_on + self._base_alias = _base_alias or self + self._use_mapper_path = _use_mapper_path + + self._adapter = sql_util.ColumnAdapter( + selectable, equivalents=mapper._equivalent_columns, + adapt_on_names=adapt_on_names, anonymize_labels=True) + + self._adapt_on_names = adapt_on_names + self._target = mapper.class_ + + for poly in self.with_polymorphic_mappers: + if poly is not mapper: + setattr(self.entity, poly.class_.__name__, + AliasedClass(poly.class_, selectable, base_alias=self, + adapt_on_names=adapt_on_names, + use_mapper_path=_use_mapper_path)) + + is_aliased_class = True + "always returns True" + + @property + def class_(self): + """Return the mapped class ultimately represented by this + :class:`.AliasedInsp`.""" + return self.mapper.class_ + + @util.memoized_property + def _path_registry(self): + if self._use_mapper_path: + return self.mapper._path_registry + else: + return PathRegistry.per_mapper(self) + + def __getstate__(self): + return { + 'entity': self.entity, + 'mapper': self.mapper, + 'alias': self.selectable, + 'name': self.name, + 'adapt_on_names': self._adapt_on_names, + 'with_polymorphic_mappers': + self.with_polymorphic_mappers, + 'with_polymorphic_discriminator': + self.polymorphic_on, + 'base_alias': self._base_alias, + 'use_mapper_path': self._use_mapper_path + } + + def __setstate__(self, state): + self.__init__( + state['entity'], + state['mapper'], + state['alias'], + state['name'], + state['with_polymorphic_mappers'], + state['with_polymorphic_discriminator'], + state['base_alias'], + state['use_mapper_path'], + state['adapt_on_names'] + ) + + def _adapt_element(self, elem): + return self._adapter.traverse(elem).\ + _annotate({ + 'parententity': self, + 'parentmapper': self.mapper} + ) + + def _entity_for_mapper(self, mapper): + self_poly = self.with_polymorphic_mappers + if mapper in self_poly: + if mapper is self.mapper: + return self + else: + return getattr( + self.entity, mapper.class_.__name__)._aliased_insp + elif mapper.isa(self.mapper): + return self + else: + assert False, "mapper %s doesn't correspond to %s" % ( + mapper, self) + + def __repr__(self): + if self.with_polymorphic_mappers: + with_poly = "(%s)" % ", ".join( + mp.class_.__name__ for mp in self.with_polymorphic_mappers) + else: + with_poly = "" + return '' % ( + id(self), self.class_.__name__, with_poly) + + +inspection._inspects(AliasedClass)(lambda target: target._aliased_insp) +inspection._inspects(AliasedInsp)(lambda target: target) + + +def aliased(element, alias=None, name=None, flat=False, adapt_on_names=False): + """Produce an alias of the given element, usually an :class:`.AliasedClass` + instance. + + E.g.:: + + my_alias = aliased(MyClass) + + session.query(MyClass, my_alias).filter(MyClass.id > my_alias.id) + + The :func:`.aliased` function is used to create an ad-hoc mapping + of a mapped class to a new selectable. By default, a selectable + is generated from the normally mapped selectable (typically a + :class:`.Table`) using the :meth:`.FromClause.alias` method. + However, :func:`.aliased` can also be used to link the class to + a new :func:`.select` statement. Also, the :func:`.with_polymorphic` + function is a variant of :func:`.aliased` that is intended to specify + a so-called "polymorphic selectable", that corresponds to the union + of several joined-inheritance subclasses at once. + + For convenience, the :func:`.aliased` function also accepts plain + :class:`.FromClause` constructs, such as a :class:`.Table` or + :func:`.select` construct. In those cases, the :meth:`.FromClause.alias` + method is called on the object and the new :class:`.Alias` object + returned. The returned :class:`.Alias` is not ORM-mapped in this case. + + :param element: element to be aliased. Is normally a mapped class, + but for convenience can also be a :class:`.FromClause` element. + + :param alias: Optional selectable unit to map the element to. This should + normally be a :class:`.Alias` object corresponding to the :class:`.Table` + to which the class is mapped, or to a :func:`.select` construct that + is compatible with the mapping. By default, a simple anonymous + alias of the mapped table is generated. + + :param name: optional string name to use for the alias, if not specified + by the ``alias`` parameter. The name, among other things, forms the + attribute name that will be accessible via tuples returned by a + :class:`.Query` object. + + :param flat: Boolean, will be passed through to the + :meth:`.FromClause.alias` call so that aliases of :class:`.Join` objects + don't include an enclosing SELECT. This can lead to more efficient + queries in many circumstances. A JOIN against a nested JOIN will be + rewritten as a JOIN against an aliased SELECT subquery on backends that + don't support this syntax. + + .. versionadded:: 0.9.0 + + .. seealso:: :meth:`.Join.alias` + + :param adapt_on_names: if True, more liberal "matching" will be used when + mapping the mapped columns of the ORM entity to those of the + given selectable - a name-based match will be performed if the + given selectable doesn't otherwise have a column that corresponds + to one on the entity. The use case for this is when associating + an entity with some derived selectable such as one that uses + aggregate functions:: + + class UnitPrice(Base): + __tablename__ = 'unit_price' + ... + unit_id = Column(Integer) + price = Column(Numeric) + + aggregated_unit_price = Session.query( + func.sum(UnitPrice.price).label('price') + ).group_by(UnitPrice.unit_id).subquery() + + aggregated_unit_price = aliased(UnitPrice, + alias=aggregated_unit_price, adapt_on_names=True) + + Above, functions on ``aggregated_unit_price`` which refer to + ``.price`` will return the + ``fund.sum(UnitPrice.price).label('price')`` column, as it is + matched on the name "price". Ordinarily, the "price" function + wouldn't have any "column correspondence" to the actual + ``UnitPrice.price`` column as it is not a proxy of the original. + + .. versionadded:: 0.7.3 + + + """ + if isinstance(element, expression.FromClause): + if adapt_on_names: + raise sa_exc.ArgumentError( + "adapt_on_names only applies to ORM elements" + ) + return element.alias(name, flat=flat) + else: + return AliasedClass(element, alias=alias, flat=flat, + name=name, adapt_on_names=adapt_on_names) + + +def with_polymorphic(base, classes, selectable=False, + flat=False, + polymorphic_on=None, aliased=False, + innerjoin=False, _use_mapper_path=False, + _existing_alias=None): + """Produce an :class:`.AliasedClass` construct which specifies + columns for descendant mappers of the given base. + + .. versionadded:: 0.8 + :func:`.orm.with_polymorphic` is in addition to the existing + :class:`.Query` method :meth:`.Query.with_polymorphic`, + which has the same purpose but is not as flexible in its usage. + + Using this method will ensure that each descendant mapper's + tables are included in the FROM clause, and will allow filter() + criterion to be used against those tables. The resulting + instances will also have those columns already loaded so that + no "post fetch" of those columns will be required. + + See the examples at :ref:`with_polymorphic`. + + :param base: Base class to be aliased. + + :param classes: a single class or mapper, or list of + class/mappers, which inherit from the base class. + Alternatively, it may also be the string ``'*'``, in which case + all descending mapped classes will be added to the FROM clause. + + :param aliased: when True, the selectable will be wrapped in an + alias, that is ``(SELECT * FROM ) AS anon_1``. + This can be important when using the with_polymorphic() + to create the target of a JOIN on a backend that does not + support parenthesized joins, such as SQLite and older + versions of MySQL. + + :param flat: Boolean, will be passed through to the + :meth:`.FromClause.alias` call so that aliases of :class:`.Join` + objects don't include an enclosing SELECT. This can lead to more + efficient queries in many circumstances. A JOIN against a nested JOIN + will be rewritten as a JOIN against an aliased SELECT subquery on + backends that don't support this syntax. + + Setting ``flat`` to ``True`` implies the ``aliased`` flag is + also ``True``. + + .. versionadded:: 0.9.0 + + .. seealso:: :meth:`.Join.alias` + + :param selectable: a table or select() statement that will + be used in place of the generated FROM clause. This argument is + required if any of the desired classes use concrete table + inheritance, since SQLAlchemy currently cannot generate UNIONs + among tables automatically. If used, the ``selectable`` argument + must represent the full set of tables and columns mapped by every + mapped class. Otherwise, the unaccounted mapped columns will + result in their table being appended directly to the FROM clause + which will usually lead to incorrect results. + + :param polymorphic_on: a column to be used as the "discriminator" + column for the given selectable. If not given, the polymorphic_on + attribute of the base classes' mapper will be used, if any. This + is useful for mappings that don't have polymorphic loading + behavior by default. + + :param innerjoin: if True, an INNER JOIN will be used. This should + only be specified if querying for one specific subtype only + """ + primary_mapper = _class_to_mapper(base) + if _existing_alias: + assert _existing_alias.mapper is primary_mapper + classes = util.to_set(classes) + new_classes = set([ + mp.class_ for mp in + _existing_alias.with_polymorphic_mappers]) + if classes == new_classes: + return _existing_alias + else: + classes = classes.union(new_classes) + mappers, selectable = primary_mapper.\ + _with_polymorphic_args(classes, selectable, + innerjoin=innerjoin) + if aliased or flat: + selectable = selectable.alias(flat=flat) + return AliasedClass(base, + selectable, + with_polymorphic_mappers=mappers, + with_polymorphic_discriminator=polymorphic_on, + use_mapper_path=_use_mapper_path) + + +def _orm_annotate(element, exclude=None): + """Deep copy the given ClauseElement, annotating each element with the + "_orm_adapt" flag. + + Elements within the exclude collection will be cloned but not annotated. + + """ + return sql_util._deep_annotate(element, {'_orm_adapt': True}, exclude) + + +def _orm_deannotate(element): + """Remove annotations that link a column to a particular mapping. + + Note this doesn't affect "remote" and "foreign" annotations + passed by the :func:`.orm.foreign` and :func:`.orm.remote` + annotators. + + """ + + return sql_util._deep_deannotate(element, + values=("_orm_adapt", "parententity") + ) + + +def _orm_full_deannotate(element): + return sql_util._deep_deannotate(element) + + +class _ORMJoin(expression.Join): + """Extend Join to support ORM constructs as input.""" + + __visit_name__ = expression.Join.__visit_name__ + + def __init__( + self, + left, right, onclause=None, isouter=False, + _left_memo=None, _right_memo=None): + + left_info = inspection.inspect(left) + left_orm_info = getattr(left, '_joined_from_info', left_info) + + right_info = inspection.inspect(right) + adapt_to = right_info.selectable + + self._joined_from_info = right_info + + self._left_memo = _left_memo + self._right_memo = _right_memo + + if isinstance(onclause, util.string_types): + onclause = getattr(left_orm_info.entity, onclause) + + if isinstance(onclause, attributes.QueryableAttribute): + on_selectable = onclause.comparator._source_selectable() + prop = onclause.property + elif isinstance(onclause, MapperProperty): + prop = onclause + on_selectable = prop.parent.selectable + else: + prop = None + + if prop: + if sql_util.clause_is_present( + on_selectable, left_info.selectable): + adapt_from = on_selectable + else: + adapt_from = left_info.selectable + + pj, sj, source, dest, \ + secondary, target_adapter = prop._create_joins( + source_selectable=adapt_from, + dest_selectable=adapt_to, + source_polymorphic=True, + dest_polymorphic=True, + of_type=right_info.mapper) + + if sj is not None: + if isouter: + # note this is an inner join from secondary->right + right = sql.join(secondary, right, sj) + onclause = pj + else: + left = sql.join(left, secondary, pj, isouter) + onclause = sj + else: + onclause = pj + self._target_adapter = target_adapter + + expression.Join.__init__(self, left, right, onclause, isouter) + + if not prop and getattr(right_info, 'mapper', None) \ + and right_info.mapper.single: + # if single inheritance target and we are using a manual + # or implicit ON clause, augment it the same way we'd augment the + # WHERE. + single_crit = right_info.mapper._single_table_criterion + if single_crit is not None: + if right_info.is_aliased_class: + single_crit = right_info._adapter.traverse(single_crit) + self.onclause = self.onclause & single_crit + + def _splice_into_center(self, other): + """Splice a join into the center. + + Given join(a, b) and join(b, c), return join(a, b).join(c) + + """ + leftmost = other + while isinstance(leftmost, sql.Join): + leftmost = leftmost.left + + assert self.right is leftmost + + left = _ORMJoin( + self.left, other.left, + self.onclause, isouter=self.isouter, + _left_memo=self._left_memo, + _right_memo=other._left_memo + ) + + return _ORMJoin( + left, + other.right, + other.onclause, isouter=other.isouter, + _right_memo=other._right_memo + ) + + def join(self, right, onclause=None, isouter=False, join_to_left=None): + return _ORMJoin(self, right, onclause, isouter) + + def outerjoin(self, right, onclause=None, join_to_left=None): + return _ORMJoin(self, right, onclause, True) + + +def join(left, right, onclause=None, isouter=False, join_to_left=None): + """Produce an inner join between left and right clauses. + + :func:`.orm.join` is an extension to the core join interface + provided by :func:`.sql.expression.join()`, where the + left and right selectables may be not only core selectable + objects such as :class:`.Table`, but also mapped classes or + :class:`.AliasedClass` instances. The "on" clause can + be a SQL expression, or an attribute or string name + referencing a configured :func:`.relationship`. + + :func:`.orm.join` is not commonly needed in modern usage, + as its functionality is encapsulated within that of the + :meth:`.Query.join` method, which features a + significant amount of automation beyond :func:`.orm.join` + by itself. Explicit usage of :func:`.orm.join` + with :class:`.Query` involves usage of the + :meth:`.Query.select_from` method, as in:: + + from sqlalchemy.orm import join + session.query(User).\\ + select_from(join(User, Address, User.addresses)).\\ + filter(Address.email_address=='foo@bar.com') + + In modern SQLAlchemy the above join can be written more + succinctly as:: + + session.query(User).\\ + join(User.addresses).\\ + filter(Address.email_address=='foo@bar.com') + + See :meth:`.Query.join` for information on modern usage + of ORM level joins. + + .. versionchanged:: 0.8.1 - the ``join_to_left`` parameter + is no longer used, and is deprecated. + + """ + return _ORMJoin(left, right, onclause, isouter) + + +def outerjoin(left, right, onclause=None, join_to_left=None): + """Produce a left outer join between left and right clauses. + + This is the "outer join" version of the :func:`.orm.join` function, + featuring the same behavior except that an OUTER JOIN is generated. + See that function's documentation for other usage details. + + """ + return _ORMJoin(left, right, onclause, True) + + +def with_parent(instance, prop): + """Create filtering criterion that relates this query's primary entity + to the given related instance, using established :func:`.relationship()` + configuration. + + The SQL rendered is the same as that rendered when a lazy loader + would fire off from the given parent on that attribute, meaning + that the appropriate state is taken from the parent object in + Python without the need to render joins to the parent table + in the rendered statement. + + .. versionchanged:: 0.6.4 + This method accepts parent instances in all + persistence states, including transient, persistent, and detached. + Only the requisite primary key/foreign key attributes need to + be populated. Previous versions didn't work with transient + instances. + + :param instance: + An instance which has some :func:`.relationship`. + + :param property: + String property name, or class-bound attribute, which indicates + what relationship from the instance should be used to reconcile the + parent/child relationship. + + """ + if isinstance(prop, util.string_types): + mapper = object_mapper(instance) + prop = getattr(mapper.class_, prop).property + elif isinstance(prop, attributes.QueryableAttribute): + prop = prop.property + + return prop._with_parent(instance) + + +def has_identity(object): + """Return True if the given object has a database + identity. + + This typically corresponds to the object being + in either the persistent or detached state. + + .. seealso:: + + :func:`.was_deleted` + + """ + state = attributes.instance_state(object) + return state.has_identity + + +def was_deleted(object): + """Return True if the given object was deleted + within a session flush. + + .. versionadded:: 0.8.0 + + """ + + state = attributes.instance_state(object) + return state.deleted + + +def randomize_unitofwork(): + """Use random-ordering sets within the unit of work in order + to detect unit of work sorting issues. + + This is a utility function that can be used to help reproduce + inconsistent unit of work sorting issues. For example, + if two kinds of objects A and B are being inserted, and + B has a foreign key reference to A - the A must be inserted first. + However, if there is no relationship between A and B, the unit of work + won't know to perform this sorting, and an operation may or may not + fail, depending on how the ordering works out. Since Python sets + and dictionaries have non-deterministic ordering, such an issue may + occur on some runs and not on others, and in practice it tends to + have a great dependence on the state of the interpreter. This leads + to so-called "heisenbugs" where changing entirely irrelevant aspects + of the test program still cause the failure behavior to change. + + By calling ``randomize_unitofwork()`` when a script first runs, the + ordering of a key series of sets within the unit of work implementation + are randomized, so that the script can be minimized down to the + fundamental mapping and operation that's failing, while still reproducing + the issue on at least some runs. + + This utility is also available when running the test suite via the + ``--reversetop`` flag. + + .. versionadded:: 0.8.1 created a standalone version of the + ``--reversetop`` feature. + + """ + from sqlalchemy.orm import unitofwork, session, mapper, dependency + from sqlalchemy.util import topological + from sqlalchemy.testing.util import RandomSet + topological.set = unitofwork.set = session.set = mapper.set = \ + dependency.set = RandomSet diff --git a/lib/python3.4/site-packages/sqlalchemy/pool.py b/lib/python3.4/site-packages/sqlalchemy/pool.py new file mode 100644 index 0000000..32b4736 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/pool.py @@ -0,0 +1,1367 @@ +# sqlalchemy/pool.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + + +"""Connection pooling for DB-API connections. + +Provides a number of connection pool implementations for a variety of +usage scenarios and thread behavior requirements imposed by the +application, DB-API or database itself. + +Also provides a DB-API 2.0 connection proxying mechanism allowing +regular DB-API connect() methods to be transparently managed by a +SQLAlchemy connection pool. +""" + +import time +import traceback +import weakref + +from . import exc, log, event, interfaces, util +from .util import queue as sqla_queue +from .util import threading, memoized_property, \ + chop_traceback + +from collections import deque +proxies = {} + + +def manage(module, **params): + """Return a proxy for a DB-API module that automatically + pools connections. + + Given a DB-API 2.0 module and pool management parameters, returns + a proxy for the module that will automatically pool connections, + creating new connection pools for each distinct set of connection + arguments sent to the decorated module's connect() function. + + :param module: a DB-API 2.0 database module + + :param poolclass: the class used by the pool module to provide + pooling. Defaults to :class:`.QueuePool`. + + :param \*\*params: will be passed through to *poolclass* + + """ + try: + return proxies[module] + except KeyError: + return proxies.setdefault(module, _DBProxy(module, **params)) + + +def clear_managers(): + """Remove all current DB-API 2.0 managers. + + All pools and connections are disposed. + """ + + for manager in proxies.values(): + manager.close() + proxies.clear() + +reset_rollback = util.symbol('reset_rollback') +reset_commit = util.symbol('reset_commit') +reset_none = util.symbol('reset_none') + + +class _ConnDialect(object): + + """partial implementation of :class:`.Dialect` + which provides DBAPI connection methods. + + When a :class:`.Pool` is combined with an :class:`.Engine`, + the :class:`.Engine` replaces this with its own + :class:`.Dialect`. + + """ + + def do_rollback(self, dbapi_connection): + dbapi_connection.rollback() + + def do_commit(self, dbapi_connection): + dbapi_connection.commit() + + def do_close(self, dbapi_connection): + dbapi_connection.close() + + +class Pool(log.Identified): + + """Abstract base class for connection pools.""" + + _dialect = _ConnDialect() + + def __init__(self, + creator, recycle=-1, echo=None, + use_threadlocal=False, + logging_name=None, + reset_on_return=True, + listeners=None, + events=None, + _dispatch=None, + _dialect=None): + """ + Construct a Pool. + + :param creator: a callable function that returns a DB-API + connection object. The function will be called with + parameters. + + :param recycle: If set to non -1, number of seconds between + connection recycling, which means upon checkout, if this + timeout is surpassed the connection will be closed and + replaced with a newly opened connection. Defaults to -1. + + :param logging_name: String identifier which will be used within + the "name" field of logging records generated within the + "sqlalchemy.pool" logger. Defaults to a hexstring of the object's + id. + + :param echo: If True, connections being pulled and retrieved + from the pool will be logged to the standard output, as well + as pool sizing information. Echoing can also be achieved by + enabling logging for the "sqlalchemy.pool" + namespace. Defaults to False. + + :param use_threadlocal: If set to True, repeated calls to + :meth:`connect` within the same application thread will be + guaranteed to return the same connection object, if one has + already been retrieved from the pool and has not been + returned yet. Offers a slight performance advantage at the + cost of individual transactions by default. The + :meth:`.Pool.unique_connection` method is provided to return + a consistenty unique connection to bypass this behavior + when the flag is set. + + .. warning:: The :paramref:`.Pool.use_threadlocal` flag + **does not affect the behavior** of :meth:`.Engine.connect`. + :meth:`.Engine.connect` makes use of the + :meth:`.Pool.unique_connection` method which **does not use thread + local context**. To produce a :class:`.Connection` which refers + to the :meth:`.Pool.connect` method, use + :meth:`.Engine.contextual_connect`. + + Note that other SQLAlchemy connectivity systems such as + :meth:`.Engine.execute` as well as the orm + :class:`.Session` make use of + :meth:`.Engine.contextual_connect` internally, so these functions + are compatible with the :paramref:`.Pool.use_threadlocal` setting. + + .. seealso:: + + :ref:`threadlocal_strategy` - contains detail on the + "threadlocal" engine strategy, which provides a more comprehensive + approach to "threadlocal" connectivity for the specific + use case of using :class:`.Engine` and :class:`.Connection` objects + directly. + + :param reset_on_return: Determine steps to take on + connections as they are returned to the pool. + reset_on_return can have any of these values: + + * ``"rollback"`` - call rollback() on the connection, + to release locks and transaction resources. + This is the default value. The vast majority + of use cases should leave this value set. + * ``True`` - same as 'rollback', this is here for + backwards compatibility. + * ``"commit"`` - call commit() on the connection, + to release locks and transaction resources. + A commit here may be desirable for databases that + cache query plans if a commit is emitted, + such as Microsoft SQL Server. However, this + value is more dangerous than 'rollback' because + any data changes present on the transaction + are committed unconditionally. + * ``None`` - don't do anything on the connection. + This setting should only be made on a database + that has no transaction support at all, + namely MySQL MyISAM. By not doing anything, + performance can be improved. This + setting should **never be selected** for a + database that supports transactions, + as it will lead to deadlocks and stale + state. + * ``"none"`` - same as ``None`` + + .. versionadded:: 0.9.10 + + * ``False`` - same as None, this is here for + backwards compatibility. + + .. versionchanged:: 0.7.6 + :paramref:`.Pool.reset_on_return` accepts ``"rollback"`` + and ``"commit"`` arguments. + + :param events: a list of 2-tuples, each of the form + ``(callable, target)`` which will be passed to :func:`.event.listen` + upon construction. Provided here so that event listeners + can be assigned via :func:`.create_engine` before dialect-level + listeners are applied. + + :param listeners: Deprecated. A list of + :class:`~sqlalchemy.interfaces.PoolListener`-like objects or + dictionaries of callables that receive events when DB-API + connections are created, checked out and checked in to the + pool. This has been superseded by + :func:`~sqlalchemy.event.listen`. + + """ + if logging_name: + self.logging_name = self._orig_logging_name = logging_name + else: + self._orig_logging_name = None + + log.instance_logger(self, echoflag=echo) + self._threadconns = threading.local() + self._creator = creator + self._recycle = recycle + self._invalidate_time = 0 + self._use_threadlocal = use_threadlocal + if reset_on_return in ('rollback', True, reset_rollback): + self._reset_on_return = reset_rollback + elif reset_on_return in ('none', None, False, reset_none): + self._reset_on_return = reset_none + elif reset_on_return in ('commit', reset_commit): + self._reset_on_return = reset_commit + else: + raise exc.ArgumentError( + "Invalid value for 'reset_on_return': %r" + % reset_on_return) + + self.echo = echo + + if _dispatch: + self.dispatch._update(_dispatch, only_propagate=False) + if _dialect: + self._dialect = _dialect + if events: + for fn, target in events: + event.listen(self, target, fn) + if listeners: + util.warn_deprecated( + "The 'listeners' argument to Pool (and " + "create_engine()) is deprecated. Use event.listen().") + for l in listeners: + self.add_listener(l) + + @property + def _creator(self): + return self.__dict__['_creator'] + + @_creator.setter + def _creator(self, creator): + self.__dict__['_creator'] = creator + self._invoke_creator = self._should_wrap_creator(creator) + + def _should_wrap_creator(self, creator): + """Detect if creator accepts a single argument, or is sent + as a legacy style no-arg function. + + """ + + try: + argspec = util.get_callable_argspec(self._creator, no_self=True) + except TypeError: + return lambda crec: creator() + + defaulted = argspec[3] is not None and len(argspec[3]) or 0 + positionals = len(argspec[0]) - defaulted + + # look for the exact arg signature that DefaultStrategy + # sends us + if (argspec[0], argspec[3]) == (['connection_record'], (None,)): + return creator + # or just a single positional + elif positionals == 1: + return creator + # all other cases, just wrap and assume legacy "creator" callable + # thing + else: + return lambda crec: creator() + + def _close_connection(self, connection): + self.logger.debug("Closing connection %r", connection) + try: + self._dialect.do_close(connection) + except Exception: + self.logger.error("Exception closing connection %r", + connection, exc_info=True) + + @util.deprecated( + 2.7, "Pool.add_listener is deprecated. Use event.listen()") + def add_listener(self, listener): + """Add a :class:`.PoolListener`-like object to this pool. + + ``listener`` may be an object that implements some or all of + PoolListener, or a dictionary of callables containing implementations + of some or all of the named methods in PoolListener. + + """ + interfaces.PoolListener._adapt_listener(self, listener) + + def unique_connection(self): + """Produce a DBAPI connection that is not referenced by any + thread-local context. + + This method is equivalent to :meth:`.Pool.connect` when the + :paramref:`.Pool.use_threadlocal` flag is not set to True. + When :paramref:`.Pool.use_threadlocal` is True, the + :meth:`.Pool.unique_connection` method provides a means of bypassing + the threadlocal context. + + """ + return _ConnectionFairy._checkout(self) + + def _create_connection(self): + """Called by subclasses to create a new ConnectionRecord.""" + + return _ConnectionRecord(self) + + def _invalidate(self, connection, exception=None): + """Mark all connections established within the generation + of the given connection as invalidated. + + If this pool's last invalidate time is before when the given + connection was created, update the timestamp til now. Otherwise, + no action is performed. + + Connections with a start time prior to this pool's invalidation + time will be recycled upon next checkout. + """ + rec = getattr(connection, "_connection_record", None) + if not rec or self._invalidate_time < rec.starttime: + self._invalidate_time = time.time() + if getattr(connection, 'is_valid', False): + connection.invalidate(exception) + + def recreate(self): + """Return a new :class:`.Pool`, of the same class as this one + and configured with identical creation arguments. + + This method is used in conjunction with :meth:`dispose` + to close out an entire :class:`.Pool` and create a new one in + its place. + + """ + + raise NotImplementedError() + + def dispose(self): + """Dispose of this pool. + + This method leaves the possibility of checked-out connections + remaining open, as it only affects connections that are + idle in the pool. + + See also the :meth:`Pool.recreate` method. + + """ + + raise NotImplementedError() + + def connect(self): + """Return a DBAPI connection from the pool. + + The connection is instrumented such that when its + ``close()`` method is called, the connection will be returned to + the pool. + + """ + if not self._use_threadlocal: + return _ConnectionFairy._checkout(self) + + try: + rec = self._threadconns.current() + except AttributeError: + pass + else: + if rec is not None: + return rec._checkout_existing() + + return _ConnectionFairy._checkout(self, self._threadconns) + + def _return_conn(self, record): + """Given a _ConnectionRecord, return it to the :class:`.Pool`. + + This method is called when an instrumented DBAPI connection + has its ``close()`` method called. + + """ + if self._use_threadlocal: + try: + del self._threadconns.current + except AttributeError: + pass + self._do_return_conn(record) + + def _do_get(self): + """Implementation for :meth:`get`, supplied by subclasses.""" + + raise NotImplementedError() + + def _do_return_conn(self, conn): + """Implementation for :meth:`return_conn`, supplied by subclasses.""" + + raise NotImplementedError() + + def status(self): + raise NotImplementedError() + + +class _ConnectionRecord(object): + + """Internal object which maintains an individual DBAPI connection + referenced by a :class:`.Pool`. + + The :class:`._ConnectionRecord` object always exists for any particular + DBAPI connection whether or not that DBAPI connection has been + "checked out". This is in contrast to the :class:`._ConnectionFairy` + which is only a public facade to the DBAPI connection while it is checked + out. + + A :class:`._ConnectionRecord` may exist for a span longer than that + of a single DBAPI connection. For example, if the + :meth:`._ConnectionRecord.invalidate` + method is called, the DBAPI connection associated with this + :class:`._ConnectionRecord` + will be discarded, but the :class:`._ConnectionRecord` may be used again, + in which case a new DBAPI connection is produced when the :class:`.Pool` + next uses this record. + + The :class:`._ConnectionRecord` is delivered along with connection + pool events, including :meth:`.PoolEvents.connect` and + :meth:`.PoolEvents.checkout`, however :class:`._ConnectionRecord` still + remains an internal object whose API and internals may change. + + .. seealso:: + + :class:`._ConnectionFairy` + + """ + + def __init__(self, pool): + self.__pool = pool + self.connection = self.__connect() + self.finalize_callback = deque() + + pool.dispatch.first_connect.\ + for_modify(pool.dispatch).\ + exec_once(self.connection, self) + pool.dispatch.connect(self.connection, self) + + connection = None + """A reference to the actual DBAPI connection being tracked. + + May be ``None`` if this :class:`._ConnectionRecord` has been marked + as invalidated; a new DBAPI connection may replace it if the owning + pool calls upon this :class:`._ConnectionRecord` to reconnect. + + """ + + _soft_invalidate_time = 0 + + @util.memoized_property + def info(self): + """The ``.info`` dictionary associated with the DBAPI connection. + + This dictionary is shared among the :attr:`._ConnectionFairy.info` + and :attr:`.Connection.info` accessors. + + """ + return {} + + @classmethod + def checkout(cls, pool): + rec = pool._do_get() + try: + dbapi_connection = rec.get_connection() + except: + with util.safe_reraise(): + rec.checkin() + echo = pool._should_log_debug() + fairy = _ConnectionFairy(dbapi_connection, rec, echo) + rec.fairy_ref = weakref.ref( + fairy, + lambda ref: _finalize_fairy and + _finalize_fairy( + dbapi_connection, + rec, pool, ref, echo) + ) + _refs.add(rec) + if echo: + pool.logger.debug("Connection %r checked out from pool", + dbapi_connection) + return fairy + + def checkin(self): + self.fairy_ref = None + connection = self.connection + pool = self.__pool + while self.finalize_callback: + finalizer = self.finalize_callback.pop() + finalizer(connection) + if pool.dispatch.checkin: + pool.dispatch.checkin(connection, self) + pool._return_conn(self) + + def close(self): + if self.connection is not None: + self.__close() + + def invalidate(self, e=None, soft=False): + """Invalidate the DBAPI connection held by this :class:`._ConnectionRecord`. + + This method is called for all connection invalidations, including + when the :meth:`._ConnectionFairy.invalidate` or + :meth:`.Connection.invalidate` methods are called, as well as when any + so-called "automatic invalidation" condition occurs. + + :param e: an exception object indicating a reason for the invalidation. + + :param soft: if True, the connection isn't closed; instead, this + connection will be recycled on next checkout. + + .. versionadded:: 1.0.3 + + .. seealso:: + + :ref:`pool_connection_invalidation` + + """ + # already invalidated + if self.connection is None: + return + if soft: + self.__pool.dispatch.soft_invalidate(self.connection, self, e) + else: + self.__pool.dispatch.invalidate(self.connection, self, e) + if e is not None: + self.__pool.logger.info( + "%sInvalidate connection %r (reason: %s:%s)", + "Soft " if soft else "", + self.connection, e.__class__.__name__, e) + else: + self.__pool.logger.info( + "%sInvalidate connection %r", + "Soft " if soft else "", + self.connection) + if soft: + self._soft_invalidate_time = time.time() + else: + self.__close() + self.connection = None + + def get_connection(self): + recycle = False + if self.connection is None: + self.info.clear() + self.connection = self.__connect() + if self.__pool.dispatch.connect: + self.__pool.dispatch.connect(self.connection, self) + elif self.__pool._recycle > -1 and \ + time.time() - self.starttime > self.__pool._recycle: + self.__pool.logger.info( + "Connection %r exceeded timeout; recycling", + self.connection) + recycle = True + elif self.__pool._invalidate_time > self.starttime: + self.__pool.logger.info( + "Connection %r invalidated due to pool invalidation; " + + "recycling", + self.connection + ) + recycle = True + elif self._soft_invalidate_time > self.starttime: + self.__pool.logger.info( + "Connection %r invalidated due to local soft invalidation; " + + "recycling", + self.connection + ) + recycle = True + + if recycle: + self.__close() + self.info.clear() + + # ensure that if self.__connect() fails, + # we are not referring to the previous stale connection here + self.connection = None + self.connection = self.__connect() + + if self.__pool.dispatch.connect: + self.__pool.dispatch.connect(self.connection, self) + return self.connection + + def __close(self): + self.finalize_callback.clear() + self.__pool._close_connection(self.connection) + + def __connect(self): + try: + self.starttime = time.time() + connection = self.__pool._invoke_creator(self) + self.__pool.logger.debug("Created new connection %r", connection) + return connection + except Exception as e: + self.__pool.logger.debug("Error on connect(): %s", e) + raise + + +def _finalize_fairy(connection, connection_record, + pool, ref, echo, fairy=None): + """Cleanup for a :class:`._ConnectionFairy` whether or not it's already + been garbage collected. + + """ + _refs.discard(connection_record) + + if ref is not None and \ + connection_record.fairy_ref is not ref: + return + + if connection is not None: + if connection_record and echo: + pool.logger.debug("Connection %r being returned to pool", + connection) + + try: + fairy = fairy or _ConnectionFairy( + connection, connection_record, echo) + assert fairy.connection is connection + fairy._reset(pool) + + # Immediately close detached instances + if not connection_record: + pool._close_connection(connection) + except BaseException as e: + pool.logger.error( + "Exception during reset or similar", exc_info=True) + if connection_record: + connection_record.invalidate(e=e) + if not isinstance(e, Exception): + raise + + if connection_record: + connection_record.checkin() + + +_refs = set() + + +class _ConnectionFairy(object): + + """Proxies a DBAPI connection and provides return-on-dereference + support. + + This is an internal object used by the :class:`.Pool` implementation + to provide context management to a DBAPI connection delivered by + that :class:`.Pool`. + + The name "fairy" is inspired by the fact that the + :class:`._ConnectionFairy` object's lifespan is transitory, as it lasts + only for the length of a specific DBAPI connection being checked out from + the pool, and additionally that as a transparent proxy, it is mostly + invisible. + + .. seealso:: + + :class:`._ConnectionRecord` + + """ + + def __init__(self, dbapi_connection, connection_record, echo): + self.connection = dbapi_connection + self._connection_record = connection_record + self._echo = echo + + connection = None + """A reference to the actual DBAPI connection being tracked.""" + + _connection_record = None + """A reference to the :class:`._ConnectionRecord` object associated + with the DBAPI connection. + + This is currently an internal accessor which is subject to change. + + """ + + _reset_agent = None + """Refer to an object with a ``.commit()`` and ``.rollback()`` method; + if non-None, the "reset-on-return" feature will call upon this object + rather than directly against the dialect-level do_rollback() and + do_commit() methods. + + In practice, a :class:`.Connection` assigns a :class:`.Transaction` object + to this variable when one is in scope so that the :class:`.Transaction` + takes the job of committing or rolling back on return if + :meth:`.Connection.close` is called while the :class:`.Transaction` + still exists. + + This is essentially an "event handler" of sorts but is simplified as an + instance variable both for performance/simplicity as well as that there + can only be one "reset agent" at a time. + """ + + @classmethod + def _checkout(cls, pool, threadconns=None, fairy=None): + if not fairy: + fairy = _ConnectionRecord.checkout(pool) + + fairy._pool = pool + fairy._counter = 0 + + if threadconns is not None: + threadconns.current = weakref.ref(fairy) + + if fairy.connection is None: + raise exc.InvalidRequestError("This connection is closed") + fairy._counter += 1 + + if not pool.dispatch.checkout or fairy._counter != 1: + return fairy + + # Pool listeners can trigger a reconnection on checkout + attempts = 2 + while attempts > 0: + try: + pool.dispatch.checkout(fairy.connection, + fairy._connection_record, + fairy) + return fairy + except exc.DisconnectionError as e: + pool.logger.info( + "Disconnection detected on checkout: %s", e) + fairy._connection_record.invalidate(e) + try: + fairy.connection = \ + fairy._connection_record.get_connection() + except: + with util.safe_reraise(): + fairy._connection_record.checkin() + + attempts -= 1 + + pool.logger.info("Reconnection attempts exhausted on checkout") + fairy.invalidate() + raise exc.InvalidRequestError("This connection is closed") + + def _checkout_existing(self): + return _ConnectionFairy._checkout(self._pool, fairy=self) + + def _checkin(self): + _finalize_fairy(self.connection, self._connection_record, + self._pool, None, self._echo, fairy=self) + self.connection = None + self._connection_record = None + + _close = _checkin + + def _reset(self, pool): + if pool.dispatch.reset: + pool.dispatch.reset(self, self._connection_record) + if pool._reset_on_return is reset_rollback: + if self._echo: + pool.logger.debug("Connection %s rollback-on-return%s", + self.connection, + ", via agent" + if self._reset_agent else "") + if self._reset_agent: + self._reset_agent.rollback() + else: + pool._dialect.do_rollback(self) + elif pool._reset_on_return is reset_commit: + if self._echo: + pool.logger.debug("Connection %s commit-on-return%s", + self.connection, + ", via agent" + if self._reset_agent else "") + if self._reset_agent: + self._reset_agent.commit() + else: + pool._dialect.do_commit(self) + + @property + def _logger(self): + return self._pool.logger + + @property + def is_valid(self): + """Return True if this :class:`._ConnectionFairy` still refers + to an active DBAPI connection.""" + + return self.connection is not None + + @util.memoized_property + def info(self): + """Info dictionary associated with the underlying DBAPI connection + referred to by this :class:`.ConnectionFairy`, allowing user-defined + data to be associated with the connection. + + The data here will follow along with the DBAPI connection including + after it is returned to the connection pool and used again + in subsequent instances of :class:`._ConnectionFairy`. It is shared + with the :attr:`._ConnectionRecord.info` and :attr:`.Connection.info` + accessors. + + """ + return self._connection_record.info + + def invalidate(self, e=None, soft=False): + """Mark this connection as invalidated. + + This method can be called directly, and is also called as a result + of the :meth:`.Connection.invalidate` method. When invoked, + the DBAPI connection is immediately closed and discarded from + further use by the pool. The invalidation mechanism proceeds + via the :meth:`._ConnectionRecord.invalidate` internal method. + + :param e: an exception object indicating a reason for the invalidation. + + :param soft: if True, the connection isn't closed; instead, this + connection will be recycled on next checkout. + + .. versionadded:: 1.0.3 + + .. seealso:: + + :ref:`pool_connection_invalidation` + + """ + + if self.connection is None: + util.warn("Can't invalidate an already-closed connection.") + return + if self._connection_record: + self._connection_record.invalidate(e=e, soft=soft) + if not soft: + self.connection = None + self._checkin() + + def cursor(self, *args, **kwargs): + """Return a new DBAPI cursor for the underlying connection. + + This method is a proxy for the ``connection.cursor()`` DBAPI + method. + + """ + return self.connection.cursor(*args, **kwargs) + + def __getattr__(self, key): + return getattr(self.connection, key) + + def detach(self): + """Separate this connection from its Pool. + + This means that the connection will no longer be returned to the + pool when closed, and will instead be literally closed. The + containing ConnectionRecord is separated from the DB-API connection, + and will create a new connection when next used. + + Note that any overall connection limiting constraints imposed by a + Pool implementation may be violated after a detach, as the detached + connection is removed from the pool's knowledge and control. + """ + + if self._connection_record is not None: + _refs.remove(self._connection_record) + self._connection_record.fairy_ref = None + self._connection_record.connection = None + # TODO: should this be _return_conn? + self._pool._do_return_conn(self._connection_record) + self.info = self.info.copy() + self._connection_record = None + + def close(self): + self._counter -= 1 + if self._counter == 0: + self._checkin() + + +class SingletonThreadPool(Pool): + + """A Pool that maintains one connection per thread. + + Maintains one connection per each thread, never moving a connection to a + thread other than the one which it was created in. + + .. warning:: the :class:`.SingletonThreadPool` will call ``.close()`` + on arbitrary connections that exist beyond the size setting of + ``pool_size``, e.g. if more unique **thread identities** + than what ``pool_size`` states are used. This cleanup is + non-deterministic and not sensitive to whether or not the connections + linked to those thread identities are currently in use. + + :class:`.SingletonThreadPool` may be improved in a future release, + however in its current status it is generally used only for test + scenarios using a SQLite ``:memory:`` database and is not recommended + for production use. + + + Options are the same as those of :class:`.Pool`, as well as: + + :param pool_size: The number of threads in which to maintain connections + at once. Defaults to five. + + :class:`.SingletonThreadPool` is used by the SQLite dialect + automatically when a memory-based database is used. + See :ref:`sqlite_toplevel`. + + """ + + def __init__(self, creator, pool_size=5, **kw): + kw['use_threadlocal'] = True + Pool.__init__(self, creator, **kw) + self._conn = threading.local() + self._all_conns = set() + self.size = pool_size + + def recreate(self): + self.logger.info("Pool recreating") + return self.__class__(self._creator, + pool_size=self.size, + recycle=self._recycle, + echo=self.echo, + logging_name=self._orig_logging_name, + use_threadlocal=self._use_threadlocal, + reset_on_return=self._reset_on_return, + _dispatch=self.dispatch, + _dialect=self._dialect) + + def dispose(self): + """Dispose of this pool.""" + + for conn in self._all_conns: + try: + conn.close() + except Exception: + # pysqlite won't even let you close a conn from a thread + # that didn't create it + pass + + self._all_conns.clear() + + def _cleanup(self): + while len(self._all_conns) >= self.size: + c = self._all_conns.pop() + c.close() + + def status(self): + return "SingletonThreadPool id:%d size: %d" % \ + (id(self), len(self._all_conns)) + + def _do_return_conn(self, conn): + pass + + def _do_get(self): + try: + c = self._conn.current() + if c: + return c + except AttributeError: + pass + c = self._create_connection() + self._conn.current = weakref.ref(c) + if len(self._all_conns) >= self.size: + self._cleanup() + self._all_conns.add(c) + return c + + +class QueuePool(Pool): + + """A :class:`.Pool` that imposes a limit on the number of open connections. + + :class:`.QueuePool` is the default pooling implementation used for + all :class:`.Engine` objects, unless the SQLite dialect is in use. + + """ + + def __init__(self, creator, pool_size=5, max_overflow=10, timeout=30, + **kw): + """ + Construct a QueuePool. + + :param creator: a callable function that returns a DB-API + connection object, same as that of :paramref:`.Pool.creator`. + + :param pool_size: The size of the pool to be maintained, + defaults to 5. This is the largest number of connections that + will be kept persistently in the pool. Note that the pool + begins with no connections; once this number of connections + is requested, that number of connections will remain. + ``pool_size`` can be set to 0 to indicate no size limit; to + disable pooling, use a :class:`~sqlalchemy.pool.NullPool` + instead. + + :param max_overflow: The maximum overflow size of the + pool. When the number of checked-out connections reaches the + size set in pool_size, additional connections will be + returned up to this limit. When those additional connections + are returned to the pool, they are disconnected and + discarded. It follows then that the total number of + simultaneous connections the pool will allow is pool_size + + `max_overflow`, and the total number of "sleeping" + connections the pool will allow is pool_size. `max_overflow` + can be set to -1 to indicate no overflow limit; no limit + will be placed on the total number of concurrent + connections. Defaults to 10. + + :param timeout: The number of seconds to wait before giving up + on returning a connection. Defaults to 30. + + :param \**kw: Other keyword arguments including + :paramref:`.Pool.recycle`, :paramref:`.Pool.echo`, + :paramref:`.Pool.reset_on_return` and others are passed to the + :class:`.Pool` constructor. + + """ + Pool.__init__(self, creator, **kw) + self._pool = sqla_queue.Queue(pool_size) + self._overflow = 0 - pool_size + self._max_overflow = max_overflow + self._timeout = timeout + self._overflow_lock = threading.Lock() + + def _do_return_conn(self, conn): + try: + self._pool.put(conn, False) + except sqla_queue.Full: + try: + conn.close() + finally: + self._dec_overflow() + + def _do_get(self): + use_overflow = self._max_overflow > -1 + + try: + wait = use_overflow and self._overflow >= self._max_overflow + return self._pool.get(wait, self._timeout) + except sqla_queue.Empty: + if use_overflow and self._overflow >= self._max_overflow: + if not wait: + return self._do_get() + else: + raise exc.TimeoutError( + "QueuePool limit of size %d overflow %d reached, " + "connection timed out, timeout %d" % + (self.size(), self.overflow(), self._timeout)) + + if self._inc_overflow(): + try: + return self._create_connection() + except: + with util.safe_reraise(): + self._dec_overflow() + else: + return self._do_get() + + def _inc_overflow(self): + if self._max_overflow == -1: + self._overflow += 1 + return True + with self._overflow_lock: + if self._overflow < self._max_overflow: + self._overflow += 1 + return True + else: + return False + + def _dec_overflow(self): + if self._max_overflow == -1: + self._overflow -= 1 + return True + with self._overflow_lock: + self._overflow -= 1 + return True + + def recreate(self): + self.logger.info("Pool recreating") + return self.__class__(self._creator, pool_size=self._pool.maxsize, + max_overflow=self._max_overflow, + timeout=self._timeout, + recycle=self._recycle, echo=self.echo, + logging_name=self._orig_logging_name, + use_threadlocal=self._use_threadlocal, + reset_on_return=self._reset_on_return, + _dispatch=self.dispatch, + _dialect=self._dialect) + + def dispose(self): + while True: + try: + conn = self._pool.get(False) + conn.close() + except sqla_queue.Empty: + break + + self._overflow = 0 - self.size() + self.logger.info("Pool disposed. %s", self.status()) + + def status(self): + return "Pool size: %d Connections in pool: %d "\ + "Current Overflow: %d Current Checked out "\ + "connections: %d" % (self.size(), + self.checkedin(), + self.overflow(), + self.checkedout()) + + def size(self): + return self._pool.maxsize + + def checkedin(self): + return self._pool.qsize() + + def overflow(self): + return self._overflow + + def checkedout(self): + return self._pool.maxsize - self._pool.qsize() + self._overflow + + +class NullPool(Pool): + + """A Pool which does not pool connections. + + Instead it literally opens and closes the underlying DB-API connection + per each connection open/close. + + Reconnect-related functions such as ``recycle`` and connection + invalidation are not supported by this Pool implementation, since + no connections are held persistently. + + .. versionchanged:: 0.7 + :class:`.NullPool` is used by the SQlite dialect automatically + when a file-based database is used. See :ref:`sqlite_toplevel`. + + """ + + def status(self): + return "NullPool" + + def _do_return_conn(self, conn): + conn.close() + + def _do_get(self): + return self._create_connection() + + def recreate(self): + self.logger.info("Pool recreating") + + return self.__class__(self._creator, + recycle=self._recycle, + echo=self.echo, + logging_name=self._orig_logging_name, + use_threadlocal=self._use_threadlocal, + reset_on_return=self._reset_on_return, + _dispatch=self.dispatch, + _dialect=self._dialect) + + def dispose(self): + pass + + +class StaticPool(Pool): + + """A Pool of exactly one connection, used for all requests. + + Reconnect-related functions such as ``recycle`` and connection + invalidation (which is also used to support auto-reconnect) are not + currently supported by this Pool implementation but may be implemented + in a future release. + + """ + + @memoized_property + def _conn(self): + return self._creator() + + @memoized_property + def connection(self): + return _ConnectionRecord(self) + + def status(self): + return "StaticPool" + + def dispose(self): + if '_conn' in self.__dict__: + self._conn.close() + self._conn = None + + def recreate(self): + self.logger.info("Pool recreating") + return self.__class__(creator=self._creator, + recycle=self._recycle, + use_threadlocal=self._use_threadlocal, + reset_on_return=self._reset_on_return, + echo=self.echo, + logging_name=self._orig_logging_name, + _dispatch=self.dispatch, + _dialect=self._dialect) + + def _create_connection(self): + return self._conn + + def _do_return_conn(self, conn): + pass + + def _do_get(self): + return self.connection + + +class AssertionPool(Pool): + + """A :class:`.Pool` that allows at most one checked out connection at + any given time. + + This will raise an exception if more than one connection is checked out + at a time. Useful for debugging code that is using more connections + than desired. + + .. versionchanged:: 0.7 + :class:`.AssertionPool` also logs a traceback of where + the original connection was checked out, and reports + this in the assertion error raised. + + """ + + def __init__(self, *args, **kw): + self._conn = None + self._checked_out = False + self._store_traceback = kw.pop('store_traceback', True) + self._checkout_traceback = None + Pool.__init__(self, *args, **kw) + + def status(self): + return "AssertionPool" + + def _do_return_conn(self, conn): + if not self._checked_out: + raise AssertionError("connection is not checked out") + self._checked_out = False + assert conn is self._conn + + def dispose(self): + self._checked_out = False + if self._conn: + self._conn.close() + + def recreate(self): + self.logger.info("Pool recreating") + return self.__class__(self._creator, echo=self.echo, + logging_name=self._orig_logging_name, + _dispatch=self.dispatch, + _dialect=self._dialect) + + def _do_get(self): + if self._checked_out: + if self._checkout_traceback: + suffix = ' at:\n%s' % ''.join( + chop_traceback(self._checkout_traceback)) + else: + suffix = '' + raise AssertionError("connection is already checked out" + suffix) + + if not self._conn: + self._conn = self._create_connection() + + self._checked_out = True + if self._store_traceback: + self._checkout_traceback = traceback.format_stack() + return self._conn + + +class _DBProxy(object): + + """Layers connection pooling behavior on top of a standard DB-API module. + + Proxies a DB-API 2.0 connect() call to a connection pool keyed to the + specific connect parameters. Other functions and attributes are delegated + to the underlying DB-API module. + """ + + def __init__(self, module, poolclass=QueuePool, **kw): + """Initializes a new proxy. + + module + a DB-API 2.0 module + + poolclass + a Pool class, defaulting to QueuePool + + Other parameters are sent to the Pool object's constructor. + + """ + + self.module = module + self.kw = kw + self.poolclass = poolclass + self.pools = {} + self._create_pool_mutex = threading.Lock() + + def close(self): + for key in list(self.pools): + del self.pools[key] + + def __del__(self): + self.close() + + def __getattr__(self, key): + return getattr(self.module, key) + + def get_pool(self, *args, **kw): + key = self._serialize(*args, **kw) + try: + return self.pools[key] + except KeyError: + self._create_pool_mutex.acquire() + try: + if key not in self.pools: + kw.pop('sa_pool_key', None) + pool = self.poolclass( + lambda: self.module.connect(*args, **kw), **self.kw) + self.pools[key] = pool + return pool + else: + return self.pools[key] + finally: + self._create_pool_mutex.release() + + def connect(self, *args, **kw): + """Activate a connection to the database. + + Connect to the database using this DBProxy's module and the given + connect arguments. If the arguments match an existing pool, the + connection will be returned from the pool's current thread-local + connection instance, or if there is no thread-local connection + instance it will be checked out from the set of pooled connections. + + If the pool has no available connections and allows new connections + to be created, a new database connection will be made. + + """ + + return self.get_pool(*args, **kw).connect() + + def dispose(self, *args, **kw): + """Dispose the pool referenced by the given connect arguments.""" + + key = self._serialize(*args, **kw) + try: + del self.pools[key] + except KeyError: + pass + + def _serialize(self, *args, **kw): + if "sa_pool_key" in kw: + return kw['sa_pool_key'] + + return tuple( + list(args) + + [(k, kw[k]) for k in sorted(kw)] + ) diff --git a/lib/python3.4/site-packages/sqlalchemy/processors.py b/lib/python3.4/site-packages/sqlalchemy/processors.py new file mode 100644 index 0000000..b57e674 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/processors.py @@ -0,0 +1,155 @@ +# sqlalchemy/processors.py +# Copyright (C) 2010-2016 the SQLAlchemy authors and contributors +# +# Copyright (C) 2010 Gaetan de Menten gdementen@gmail.com +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""defines generic type conversion functions, as used in bind and result +processors. + +They all share one common characteristic: None is passed through unchanged. + +""" + +import codecs +import re +import datetime +from . import util + + +def str_to_datetime_processor_factory(regexp, type_): + rmatch = regexp.match + # Even on python2.6 datetime.strptime is both slower than this code + # and it does not support microseconds. + has_named_groups = bool(regexp.groupindex) + + def process(value): + if value is None: + return None + else: + try: + m = rmatch(value) + except TypeError: + raise ValueError("Couldn't parse %s string '%r' " + "- value is not a string." % + (type_.__name__, value)) + if m is None: + raise ValueError("Couldn't parse %s string: " + "'%s'" % (type_.__name__, value)) + if has_named_groups: + groups = m.groupdict(0) + return type_(**dict(list(zip( + iter(groups.keys()), + list(map(int, iter(groups.values()))) + )))) + else: + return type_(*list(map(int, m.groups(0)))) + return process + + +def boolean_to_int(value): + if value is None: + return None + else: + return int(value) + + +def py_fallback(): + def to_unicode_processor_factory(encoding, errors=None): + decoder = codecs.getdecoder(encoding) + + def process(value): + if value is None: + return None + else: + # decoder returns a tuple: (value, len). Simply dropping the + # len part is safe: it is done that way in the normal + # 'xx'.decode(encoding) code path. + return decoder(value, errors)[0] + return process + + def to_conditional_unicode_processor_factory(encoding, errors=None): + decoder = codecs.getdecoder(encoding) + + def process(value): + if value is None: + return None + elif isinstance(value, util.text_type): + return value + else: + # decoder returns a tuple: (value, len). Simply dropping the + # len part is safe: it is done that way in the normal + # 'xx'.decode(encoding) code path. + return decoder(value, errors)[0] + return process + + def to_decimal_processor_factory(target_class, scale): + fstring = "%%.%df" % scale + + def process(value): + if value is None: + return None + else: + return target_class(fstring % value) + return process + + def to_float(value): + if value is None: + return None + else: + return float(value) + + def to_str(value): + if value is None: + return None + else: + return str(value) + + def int_to_boolean(value): + if value is None: + return None + else: + return value and True or False + + DATETIME_RE = re.compile( + "(\d+)-(\d+)-(\d+) (\d+):(\d+):(\d+)(?:\.(\d+))?") + TIME_RE = re.compile("(\d+):(\d+):(\d+)(?:\.(\d+))?") + DATE_RE = re.compile("(\d+)-(\d+)-(\d+)") + + str_to_datetime = str_to_datetime_processor_factory(DATETIME_RE, + datetime.datetime) + str_to_time = str_to_datetime_processor_factory(TIME_RE, datetime.time) + str_to_date = str_to_datetime_processor_factory(DATE_RE, datetime.date) + return locals() + +try: + from sqlalchemy.cprocessors import UnicodeResultProcessor, \ + DecimalResultProcessor, \ + to_float, to_str, int_to_boolean, \ + str_to_datetime, str_to_time, \ + str_to_date + + def to_unicode_processor_factory(encoding, errors=None): + if errors is not None: + return UnicodeResultProcessor(encoding, errors).process + else: + return UnicodeResultProcessor(encoding).process + + def to_conditional_unicode_processor_factory(encoding, errors=None): + if errors is not None: + return UnicodeResultProcessor(encoding, errors).conditional_process + else: + return UnicodeResultProcessor(encoding).conditional_process + + def to_decimal_processor_factory(target_class, scale): + # Note that the scale argument is not taken into account for integer + # values in the C implementation while it is in the Python one. + # For example, the Python implementation might return + # Decimal('5.00000') whereas the C implementation will + # return Decimal('5'). These are equivalent of course. + return DecimalResultProcessor(target_class, "%%.%df" % scale).process + +except ImportError: + globals().update(py_fallback()) diff --git a/lib/python3.4/site-packages/sqlalchemy/schema.py b/lib/python3.4/site-packages/sqlalchemy/schema.py new file mode 100644 index 0000000..5b703f7 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/schema.py @@ -0,0 +1,65 @@ +# schema.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Compatibility namespace for sqlalchemy.sql.schema and related. + +""" + +from .sql.base import ( + SchemaVisitor + ) + + +from .sql.schema import ( + CheckConstraint, + Column, + ColumnDefault, + Constraint, + DefaultClause, + DefaultGenerator, + FetchedValue, + ForeignKey, + ForeignKeyConstraint, + Index, + MetaData, + PassiveDefault, + PrimaryKeyConstraint, + SchemaItem, + Sequence, + Table, + ThreadLocalMetaData, + UniqueConstraint, + _get_table_key, + ColumnCollectionConstraint, + ColumnCollectionMixin + ) + + +from .sql.naming import conv + + +from .sql.ddl import ( + DDL, + CreateTable, + DropTable, + CreateSequence, + DropSequence, + CreateIndex, + DropIndex, + CreateSchema, + DropSchema, + _DropView, + CreateColumn, + AddConstraint, + DropConstraint, + DDLBase, + DDLElement, + _CreateDropBase, + _DDLCompiles, + sort_tables, + sort_tables_and_constraints +) diff --git a/lib/python3.4/site-packages/sqlalchemy/sql/__init__.py b/lib/python3.4/site-packages/sqlalchemy/sql/__init__.py new file mode 100644 index 0000000..eb305a8 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/sql/__init__.py @@ -0,0 +1,92 @@ +# sql/__init__.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from .expression import ( + Alias, + ClauseElement, + ColumnCollection, + ColumnElement, + CompoundSelect, + Delete, + FromClause, + Insert, + Join, + Select, + Selectable, + TableClause, + Update, + alias, + and_, + asc, + between, + bindparam, + case, + cast, + collate, + column, + delete, + desc, + distinct, + except_, + except_all, + exists, + extract, + false, + False_, + func, + funcfilter, + insert, + intersect, + intersect_all, + join, + label, + literal, + literal_column, + modifier, + not_, + null, + or_, + outerjoin, + outparam, + over, + select, + subquery, + table, + text, + true, + True_, + tuple_, + type_coerce, + union, + union_all, + update, +) + +from .visitors import ClauseVisitor + + +def __go(lcls): + global __all__ + from .. import util as _sa_util + + import inspect as _inspect + + __all__ = sorted(name for name, obj in lcls.items() + if not (name.startswith('_') or _inspect.ismodule(obj))) + + from .annotation import _prepare_annotations, Annotated + from .elements import AnnotatedColumnElement, ClauseList + from .selectable import AnnotatedFromClause + _prepare_annotations(ColumnElement, AnnotatedColumnElement) + _prepare_annotations(FromClause, AnnotatedFromClause) + _prepare_annotations(ClauseList, Annotated) + + _sa_util.dependencies.resolve_all("sqlalchemy.sql") + + from . import naming + +__go(locals()) diff --git a/lib/python3.4/site-packages/sqlalchemy/sql/annotation.py b/lib/python3.4/site-packages/sqlalchemy/sql/annotation.py new file mode 100644 index 0000000..6ad25ab --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/sql/annotation.py @@ -0,0 +1,196 @@ +# sql/annotation.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""The :class:`.Annotated` class and related routines; creates hash-equivalent +copies of SQL constructs which contain context-specific markers and +associations. + +""" + +from .. import util +from . import operators + + +class Annotated(object): + """clones a ClauseElement and applies an 'annotations' dictionary. + + Unlike regular clones, this clone also mimics __hash__() and + __cmp__() of the original element so that it takes its place + in hashed collections. + + A reference to the original element is maintained, for the important + reason of keeping its hash value current. When GC'ed, the + hash value may be reused, causing conflicts. + + """ + + def __new__(cls, *args): + if not args: + # clone constructor + return object.__new__(cls) + else: + element, values = args + # pull appropriate subclass from registry of annotated + # classes + try: + cls = annotated_classes[element.__class__] + except KeyError: + cls = _new_annotation_type(element.__class__, cls) + return object.__new__(cls) + + def __init__(self, element, values): + self.__dict__ = element.__dict__.copy() + self.__element = element + self._annotations = values + self._hash = hash(element) + + def _annotate(self, values): + _values = self._annotations.copy() + _values.update(values) + return self._with_annotations(_values) + + def _with_annotations(self, values): + clone = self.__class__.__new__(self.__class__) + clone.__dict__ = self.__dict__.copy() + clone._annotations = values + return clone + + def _deannotate(self, values=None, clone=True): + if values is None: + return self.__element + else: + _values = self._annotations.copy() + for v in values: + _values.pop(v, None) + return self._with_annotations(_values) + + def _compiler_dispatch(self, visitor, **kw): + return self.__element.__class__._compiler_dispatch( + self, visitor, **kw) + + @property + def _constructor(self): + return self.__element._constructor + + def _clone(self): + clone = self.__element._clone() + if clone is self.__element: + # detect immutable, don't change anything + return self + else: + # update the clone with any changes that have occurred + # to this object's __dict__. + clone.__dict__.update(self.__dict__) + return self.__class__(clone, self._annotations) + + def __hash__(self): + return self._hash + + def __eq__(self, other): + if isinstance(self.__element, operators.ColumnOperators): + return self.__element.__class__.__eq__(self, other) + else: + return hash(other) == hash(self) + + +# hard-generate Annotated subclasses. this technique +# is used instead of on-the-fly types (i.e. type.__new__()) +# so that the resulting objects are pickleable. +annotated_classes = {} + + +def _deep_annotate(element, annotations, exclude=None): + """Deep copy the given ClauseElement, annotating each element + with the given annotations dictionary. + + Elements within the exclude collection will be cloned but not annotated. + + """ + def clone(elem): + if exclude and \ + hasattr(elem, 'proxy_set') and \ + elem.proxy_set.intersection(exclude): + newelem = elem._clone() + elif annotations != elem._annotations: + newelem = elem._annotate(annotations) + else: + newelem = elem + newelem._copy_internals(clone=clone) + return newelem + + if element is not None: + element = clone(element) + return element + + +def _deep_deannotate(element, values=None): + """Deep copy the given element, removing annotations.""" + + cloned = util.column_dict() + + def clone(elem): + # if a values dict is given, + # the elem must be cloned each time it appears, + # as there may be different annotations in source + # elements that are remaining. if totally + # removing all annotations, can assume the same + # slate... + if values or elem not in cloned: + newelem = elem._deannotate(values=values, clone=True) + newelem._copy_internals(clone=clone) + if not values: + cloned[elem] = newelem + return newelem + else: + return cloned[elem] + + if element is not None: + element = clone(element) + return element + + +def _shallow_annotate(element, annotations): + """Annotate the given ClauseElement and copy its internals so that + internal objects refer to the new annotated object. + + Basically used to apply a "dont traverse" annotation to a + selectable, without digging throughout the whole + structure wasting time. + """ + element = element._annotate(annotations) + element._copy_internals() + return element + + +def _new_annotation_type(cls, base_cls): + if issubclass(cls, Annotated): + return cls + elif cls in annotated_classes: + return annotated_classes[cls] + + for super_ in cls.__mro__: + # check if an Annotated subclass more specific than + # the given base_cls is already registered, such + # as AnnotatedColumnElement. + if super_ in annotated_classes: + base_cls = annotated_classes[super_] + break + + annotated_classes[cls] = anno_cls = type( + "Annotated%s" % cls.__name__, + (base_cls, cls), {}) + globals()["Annotated%s" % cls.__name__] = anno_cls + return anno_cls + + +def _prepare_annotations(target_hierarchy, base_cls): + stack = [target_hierarchy] + while stack: + cls = stack.pop() + stack.extend(cls.__subclasses__()) + + _new_annotation_type(cls, base_cls) diff --git a/lib/python3.4/site-packages/sqlalchemy/sql/base.py b/lib/python3.4/site-packages/sqlalchemy/sql/base.py new file mode 100644 index 0000000..cf7dcfd --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/sql/base.py @@ -0,0 +1,635 @@ +# sql/base.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Foundational utilities common to many sql modules. + +""" + + +from .. import util, exc +import itertools +from .visitors import ClauseVisitor +import re +import collections + +PARSE_AUTOCOMMIT = util.symbol('PARSE_AUTOCOMMIT') +NO_ARG = util.symbol('NO_ARG') + + +class Immutable(object): + """mark a ClauseElement as 'immutable' when expressions are cloned.""" + + def unique_params(self, *optionaldict, **kwargs): + raise NotImplementedError("Immutable objects do not support copying") + + def params(self, *optionaldict, **kwargs): + raise NotImplementedError("Immutable objects do not support copying") + + def _clone(self): + return self + + +def _from_objects(*elements): + return itertools.chain(*[element._from_objects for element in elements]) + + +@util.decorator +def _generative(fn, *args, **kw): + """Mark a method as generative.""" + + self = args[0]._generate() + fn(self, *args[1:], **kw) + return self + + +class _DialectArgView(collections.MutableMapping): + """A dictionary view of dialect-level arguments in the form + _. + + """ + + def __init__(self, obj): + self.obj = obj + + def _key(self, key): + try: + dialect, value_key = key.split("_", 1) + except ValueError: + raise KeyError(key) + else: + return dialect, value_key + + def __getitem__(self, key): + dialect, value_key = self._key(key) + + try: + opt = self.obj.dialect_options[dialect] + except exc.NoSuchModuleError: + raise KeyError(key) + else: + return opt[value_key] + + def __setitem__(self, key, value): + try: + dialect, value_key = self._key(key) + except KeyError: + raise exc.ArgumentError( + "Keys must be of the form _") + else: + self.obj.dialect_options[dialect][value_key] = value + + def __delitem__(self, key): + dialect, value_key = self._key(key) + del self.obj.dialect_options[dialect][value_key] + + def __len__(self): + return sum(len(args._non_defaults) for args in + self.obj.dialect_options.values()) + + def __iter__(self): + return ( + util.safe_kwarg("%s_%s" % (dialect_name, value_name)) + for dialect_name in self.obj.dialect_options + for value_name in + self.obj.dialect_options[dialect_name]._non_defaults + ) + + +class _DialectArgDict(collections.MutableMapping): + """A dictionary view of dialect-level arguments for a specific + dialect. + + Maintains a separate collection of user-specified arguments + and dialect-specified default arguments. + + """ + + def __init__(self): + self._non_defaults = {} + self._defaults = {} + + def __len__(self): + return len(set(self._non_defaults).union(self._defaults)) + + def __iter__(self): + return iter(set(self._non_defaults).union(self._defaults)) + + def __getitem__(self, key): + if key in self._non_defaults: + return self._non_defaults[key] + else: + return self._defaults[key] + + def __setitem__(self, key, value): + self._non_defaults[key] = value + + def __delitem__(self, key): + del self._non_defaults[key] + + +class DialectKWArgs(object): + """Establish the ability for a class to have dialect-specific arguments + with defaults and constructor validation. + + The :class:`.DialectKWArgs` interacts with the + :attr:`.DefaultDialect.construct_arguments` present on a dialect. + + .. seealso:: + + :attr:`.DefaultDialect.construct_arguments` + + """ + + @classmethod + def argument_for(cls, dialect_name, argument_name, default): + """Add a new kind of dialect-specific keyword argument for this class. + + E.g.:: + + Index.argument_for("mydialect", "length", None) + + some_index = Index('a', 'b', mydialect_length=5) + + The :meth:`.DialectKWArgs.argument_for` method is a per-argument + way adding extra arguments to the + :attr:`.DefaultDialect.construct_arguments` dictionary. This + dictionary provides a list of argument names accepted by various + schema-level constructs on behalf of a dialect. + + New dialects should typically specify this dictionary all at once as a + data member of the dialect class. The use case for ad-hoc addition of + argument names is typically for end-user code that is also using + a custom compilation scheme which consumes the additional arguments. + + :param dialect_name: name of a dialect. The dialect must be + locatable, else a :class:`.NoSuchModuleError` is raised. The + dialect must also include an existing + :attr:`.DefaultDialect.construct_arguments` collection, indicating + that it participates in the keyword-argument validation and default + system, else :class:`.ArgumentError` is raised. If the dialect does + not include this collection, then any keyword argument can be + specified on behalf of this dialect already. All dialects packaged + within SQLAlchemy include this collection, however for third party + dialects, support may vary. + + :param argument_name: name of the parameter. + + :param default: default value of the parameter. + + .. versionadded:: 0.9.4 + + """ + + construct_arg_dictionary = DialectKWArgs._kw_registry[dialect_name] + if construct_arg_dictionary is None: + raise exc.ArgumentError( + "Dialect '%s' does have keyword-argument " + "validation and defaults enabled configured" % + dialect_name) + if cls not in construct_arg_dictionary: + construct_arg_dictionary[cls] = {} + construct_arg_dictionary[cls][argument_name] = default + + @util.memoized_property + def dialect_kwargs(self): + """A collection of keyword arguments specified as dialect-specific + options to this construct. + + The arguments are present here in their original ``_`` + format. Only arguments that were actually passed are included; + unlike the :attr:`.DialectKWArgs.dialect_options` collection, which + contains all options known by this dialect including defaults. + + The collection is also writable; keys are accepted of the + form ``_`` where the value will be assembled + into the list of options. + + .. versionadded:: 0.9.2 + + .. versionchanged:: 0.9.4 The :attr:`.DialectKWArgs.dialect_kwargs` + collection is now writable. + + .. seealso:: + + :attr:`.DialectKWArgs.dialect_options` - nested dictionary form + + """ + return _DialectArgView(self) + + @property + def kwargs(self): + """A synonym for :attr:`.DialectKWArgs.dialect_kwargs`.""" + return self.dialect_kwargs + + @util.dependencies("sqlalchemy.dialects") + def _kw_reg_for_dialect(dialects, dialect_name): + dialect_cls = dialects.registry.load(dialect_name) + if dialect_cls.construct_arguments is None: + return None + return dict(dialect_cls.construct_arguments) + _kw_registry = util.PopulateDict(_kw_reg_for_dialect) + + def _kw_reg_for_dialect_cls(self, dialect_name): + construct_arg_dictionary = DialectKWArgs._kw_registry[dialect_name] + d = _DialectArgDict() + + if construct_arg_dictionary is None: + d._defaults.update({"*": None}) + else: + for cls in reversed(self.__class__.__mro__): + if cls in construct_arg_dictionary: + d._defaults.update(construct_arg_dictionary[cls]) + return d + + @util.memoized_property + def dialect_options(self): + """A collection of keyword arguments specified as dialect-specific + options to this construct. + + This is a two-level nested registry, keyed to ```` + and ````. For example, the ``postgresql_where`` + argument would be locatable as:: + + arg = my_object.dialect_options['postgresql']['where'] + + .. versionadded:: 0.9.2 + + .. seealso:: + + :attr:`.DialectKWArgs.dialect_kwargs` - flat dictionary form + + """ + + return util.PopulateDict( + util.portable_instancemethod(self._kw_reg_for_dialect_cls) + ) + + def _validate_dialect_kwargs(self, kwargs): + # validate remaining kwargs that they all specify DB prefixes + + if not kwargs: + return + + for k in kwargs: + m = re.match('^(.+?)_(.+)$', k) + if not m: + raise TypeError( + "Additional arguments should be " + "named _, got '%s'" % k) + dialect_name, arg_name = m.group(1, 2) + + try: + construct_arg_dictionary = self.dialect_options[dialect_name] + except exc.NoSuchModuleError: + util.warn( + "Can't validate argument %r; can't " + "locate any SQLAlchemy dialect named %r" % + (k, dialect_name)) + self.dialect_options[dialect_name] = d = _DialectArgDict() + d._defaults.update({"*": None}) + d._non_defaults[arg_name] = kwargs[k] + else: + if "*" not in construct_arg_dictionary and \ + arg_name not in construct_arg_dictionary: + raise exc.ArgumentError( + "Argument %r is not accepted by " + "dialect %r on behalf of %r" % ( + k, + dialect_name, self.__class__ + )) + else: + construct_arg_dictionary[arg_name] = kwargs[k] + + +class Generative(object): + """Allow a ClauseElement to generate itself via the + @_generative decorator. + + """ + + def _generate(self): + s = self.__class__.__new__(self.__class__) + s.__dict__ = self.__dict__.copy() + return s + + +class Executable(Generative): + """Mark a ClauseElement as supporting execution. + + :class:`.Executable` is a superclass for all "statement" types + of objects, including :func:`select`, :func:`delete`, :func:`update`, + :func:`insert`, :func:`text`. + + """ + + supports_execution = True + _execution_options = util.immutabledict() + _bind = None + + @_generative + def execution_options(self, **kw): + """ Set non-SQL options for the statement which take effect during + execution. + + Execution options can be set on a per-statement or + per :class:`.Connection` basis. Additionally, the + :class:`.Engine` and ORM :class:`~.orm.query.Query` objects provide + access to execution options which they in turn configure upon + connections. + + The :meth:`execution_options` method is generative. A new + instance of this statement is returned that contains the options:: + + statement = select([table.c.x, table.c.y]) + statement = statement.execution_options(autocommit=True) + + Note that only a subset of possible execution options can be applied + to a statement - these include "autocommit" and "stream_results", + but not "isolation_level" or "compiled_cache". + See :meth:`.Connection.execution_options` for a full list of + possible options. + + .. seealso:: + + :meth:`.Connection.execution_options()` + + :meth:`.Query.execution_options()` + + """ + if 'isolation_level' in kw: + raise exc.ArgumentError( + "'isolation_level' execution option may only be specified " + "on Connection.execution_options(), or " + "per-engine using the isolation_level " + "argument to create_engine()." + ) + if 'compiled_cache' in kw: + raise exc.ArgumentError( + "'compiled_cache' execution option may only be specified " + "on Connection.execution_options(), not per statement." + ) + self._execution_options = self._execution_options.union(kw) + + def execute(self, *multiparams, **params): + """Compile and execute this :class:`.Executable`.""" + e = self.bind + if e is None: + label = getattr(self, 'description', self.__class__.__name__) + msg = ('This %s is not directly bound to a Connection or Engine.' + 'Use the .execute() method of a Connection or Engine ' + 'to execute this construct.' % label) + raise exc.UnboundExecutionError(msg) + return e._execute_clauseelement(self, multiparams, params) + + def scalar(self, *multiparams, **params): + """Compile and execute this :class:`.Executable`, returning the + result's scalar representation. + + """ + return self.execute(*multiparams, **params).scalar() + + @property + def bind(self): + """Returns the :class:`.Engine` or :class:`.Connection` to + which this :class:`.Executable` is bound, or None if none found. + + This is a traversal which checks locally, then + checks among the "from" clauses of associated objects + until a bound engine or connection is found. + + """ + if self._bind is not None: + return self._bind + + for f in _from_objects(self): + if f is self: + continue + engine = f.bind + if engine is not None: + return engine + else: + return None + + +class SchemaEventTarget(object): + """Base class for elements that are the targets of :class:`.DDLEvents` + events. + + This includes :class:`.SchemaItem` as well as :class:`.SchemaType`. + + """ + + def _set_parent(self, parent): + """Associate with this SchemaEvent's parent object.""" + + raise NotImplementedError() + + def _set_parent_with_dispatch(self, parent): + self.dispatch.before_parent_attach(self, parent) + self._set_parent(parent) + self.dispatch.after_parent_attach(self, parent) + + +class SchemaVisitor(ClauseVisitor): + """Define the visiting for ``SchemaItem`` objects.""" + + __traverse_options__ = {'schema_visitor': True} + + +class ColumnCollection(util.OrderedProperties): + """An ordered dictionary that stores a list of ColumnElement + instances. + + Overrides the ``__eq__()`` method to produce SQL clauses between + sets of correlated columns. + + """ + + __slots__ = '_all_columns' + + def __init__(self, *columns): + super(ColumnCollection, self).__init__() + object.__setattr__(self, '_all_columns', []) + for c in columns: + self.add(c) + + def __str__(self): + return repr([str(c) for c in self]) + + def replace(self, column): + """add the given column to this collection, removing unaliased + versions of this column as well as existing columns with the + same key. + + e.g.:: + + t = Table('sometable', metadata, Column('col1', Integer)) + t.columns.replace(Column('col1', Integer, key='columnone')) + + will remove the original 'col1' from the collection, and add + the new column under the name 'columnname'. + + Used by schema.Column to override columns during table reflection. + + """ + remove_col = None + if column.name in self and column.key != column.name: + other = self[column.name] + if other.name == other.key: + remove_col = other + del self._data[other.key] + + if column.key in self._data: + remove_col = self._data[column.key] + + self._data[column.key] = column + if remove_col is not None: + self._all_columns[:] = [column if c is remove_col + else c for c in self._all_columns] + else: + self._all_columns.append(column) + + def add(self, column): + """Add a column to this collection. + + The key attribute of the column will be used as the hash key + for this dictionary. + + """ + if not column.key: + raise exc.ArgumentError( + "Can't add unnamed column to column collection") + self[column.key] = column + + def __delitem__(self, key): + raise NotImplementedError() + + def __setattr__(self, key, object): + raise NotImplementedError() + + def __setitem__(self, key, value): + if key in self: + + # this warning is primarily to catch select() statements + # which have conflicting column names in their exported + # columns collection + + existing = self[key] + if not existing.shares_lineage(value): + util.warn('Column %r on table %r being replaced by ' + '%r, which has the same key. Consider ' + 'use_labels for select() statements.' % + (key, getattr(existing, 'table', None), value)) + + # pop out memoized proxy_set as this + # operation may very well be occurring + # in a _make_proxy operation + util.memoized_property.reset(value, "proxy_set") + + self._all_columns.append(value) + self._data[key] = value + + def clear(self): + raise NotImplementedError() + + def remove(self, column): + del self._data[column.key] + self._all_columns[:] = [ + c for c in self._all_columns if c is not column] + + def update(self, iter): + cols = list(iter) + all_col_set = set(self._all_columns) + self._all_columns.extend( + c for label, c in cols if c not in all_col_set) + self._data.update((label, c) for label, c in cols) + + def extend(self, iter): + cols = list(iter) + all_col_set = set(self._all_columns) + self._all_columns.extend(c for c in cols if c not in all_col_set) + self._data.update((c.key, c) for c in cols) + + __hash__ = None + + @util.dependencies("sqlalchemy.sql.elements") + def __eq__(self, elements, other): + l = [] + for c in getattr(other, "_all_columns", other): + for local in self._all_columns: + if c.shares_lineage(local): + l.append(c == local) + return elements.and_(*l) + + def __contains__(self, other): + if not isinstance(other, util.string_types): + raise exc.ArgumentError("__contains__ requires a string argument") + return util.OrderedProperties.__contains__(self, other) + + def __getstate__(self): + return {'_data': self._data, + '_all_columns': self._all_columns} + + def __setstate__(self, state): + object.__setattr__(self, '_data', state['_data']) + object.__setattr__(self, '_all_columns', state['_all_columns']) + + def contains_column(self, col): + return col in set(self._all_columns) + + def as_immutable(self): + return ImmutableColumnCollection(self._data, self._all_columns) + + +class ImmutableColumnCollection(util.ImmutableProperties, ColumnCollection): + def __init__(self, data, all_columns): + util.ImmutableProperties.__init__(self, data) + object.__setattr__(self, '_all_columns', all_columns) + + extend = remove = util.ImmutableProperties._immutable + + +class ColumnSet(util.ordered_column_set): + def contains_column(self, col): + return col in self + + def extend(self, cols): + for col in cols: + self.add(col) + + def __add__(self, other): + return list(self) + list(other) + + @util.dependencies("sqlalchemy.sql.elements") + def __eq__(self, elements, other): + l = [] + for c in other: + for local in self: + if c.shares_lineage(local): + l.append(c == local) + return elements.and_(*l) + + def __hash__(self): + return hash(tuple(x for x in self)) + + +def _bind_or_error(schemaitem, msg=None): + bind = schemaitem.bind + if not bind: + name = schemaitem.__class__.__name__ + label = getattr(schemaitem, 'fullname', + getattr(schemaitem, 'name', None)) + if label: + item = '%s object %r' % (name, label) + else: + item = '%s object' % name + if msg is None: + msg = "%s is not bound to an Engine or Connection. "\ + "Execution can not proceed without a database to execute "\ + "against." % item + raise exc.UnboundExecutionError(msg) + return bind diff --git a/lib/python3.4/site-packages/sqlalchemy/sql/compiler.py b/lib/python3.4/site-packages/sqlalchemy/sql/compiler.py new file mode 100644 index 0000000..722beb1 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/sql/compiler.py @@ -0,0 +1,2822 @@ +# sql/compiler.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Base SQL and DDL compiler implementations. + +Classes provided include: + +:class:`.compiler.SQLCompiler` - renders SQL +strings + +:class:`.compiler.DDLCompiler` - renders DDL +(data definition language) strings + +:class:`.compiler.GenericTypeCompiler` - renders +type specification strings. + +To generate user-defined SQL strings, see +:doc:`/ext/compiler`. + +""" + +import contextlib +import re +from . import schema, sqltypes, operators, functions, visitors, \ + elements, selectable, crud +from .. import util, exc +import itertools + +RESERVED_WORDS = set([ + 'all', 'analyse', 'analyze', 'and', 'any', 'array', + 'as', 'asc', 'asymmetric', 'authorization', 'between', + 'binary', 'both', 'case', 'cast', 'check', 'collate', + 'column', 'constraint', 'create', 'cross', 'current_date', + 'current_role', 'current_time', 'current_timestamp', + 'current_user', 'default', 'deferrable', 'desc', + 'distinct', 'do', 'else', 'end', 'except', 'false', + 'for', 'foreign', 'freeze', 'from', 'full', 'grant', + 'group', 'having', 'ilike', 'in', 'initially', 'inner', + 'intersect', 'into', 'is', 'isnull', 'join', 'leading', + 'left', 'like', 'limit', 'localtime', 'localtimestamp', + 'natural', 'new', 'not', 'notnull', 'null', 'off', 'offset', + 'old', 'on', 'only', 'or', 'order', 'outer', 'overlaps', + 'placing', 'primary', 'references', 'right', 'select', + 'session_user', 'set', 'similar', 'some', 'symmetric', 'table', + 'then', 'to', 'trailing', 'true', 'union', 'unique', 'user', + 'using', 'verbose', 'when', 'where']) + +LEGAL_CHARACTERS = re.compile(r'^[A-Z0-9_$]+$', re.I) +ILLEGAL_INITIAL_CHARACTERS = set([str(x) for x in range(0, 10)]).union(['$']) + +BIND_PARAMS = re.compile(r'(? ', + operators.ge: ' >= ', + operators.eq: ' = ', + operators.concat_op: ' || ', + operators.match_op: ' MATCH ', + operators.notmatch_op: ' NOT MATCH ', + operators.in_op: ' IN ', + operators.notin_op: ' NOT IN ', + operators.comma_op: ', ', + operators.from_: ' FROM ', + operators.as_: ' AS ', + operators.is_: ' IS ', + operators.isnot: ' IS NOT ', + operators.collate: ' COLLATE ', + + # unary + operators.exists: 'EXISTS ', + operators.distinct_op: 'DISTINCT ', + operators.inv: 'NOT ', + + # modifiers + operators.desc_op: ' DESC', + operators.asc_op: ' ASC', + operators.nullsfirst_op: ' NULLS FIRST', + operators.nullslast_op: ' NULLS LAST', + +} + +FUNCTIONS = { + functions.coalesce: 'coalesce%(expr)s', + functions.current_date: 'CURRENT_DATE', + functions.current_time: 'CURRENT_TIME', + functions.current_timestamp: 'CURRENT_TIMESTAMP', + functions.current_user: 'CURRENT_USER', + functions.localtime: 'LOCALTIME', + functions.localtimestamp: 'LOCALTIMESTAMP', + functions.random: 'random%(expr)s', + functions.sysdate: 'sysdate', + functions.session_user: 'SESSION_USER', + functions.user: 'USER' +} + +EXTRACT_MAP = { + 'month': 'month', + 'day': 'day', + 'year': 'year', + 'second': 'second', + 'hour': 'hour', + 'doy': 'doy', + 'minute': 'minute', + 'quarter': 'quarter', + 'dow': 'dow', + 'week': 'week', + 'epoch': 'epoch', + 'milliseconds': 'milliseconds', + 'microseconds': 'microseconds', + 'timezone_hour': 'timezone_hour', + 'timezone_minute': 'timezone_minute' +} + +COMPOUND_KEYWORDS = { + selectable.CompoundSelect.UNION: 'UNION', + selectable.CompoundSelect.UNION_ALL: 'UNION ALL', + selectable.CompoundSelect.EXCEPT: 'EXCEPT', + selectable.CompoundSelect.EXCEPT_ALL: 'EXCEPT ALL', + selectable.CompoundSelect.INTERSECT: 'INTERSECT', + selectable.CompoundSelect.INTERSECT_ALL: 'INTERSECT ALL' +} + + +class Compiled(object): + + """Represent a compiled SQL or DDL expression. + + The ``__str__`` method of the ``Compiled`` object should produce + the actual text of the statement. ``Compiled`` objects are + specific to their underlying database dialect, and also may + or may not be specific to the columns referenced within a + particular set of bind parameters. In no case should the + ``Compiled`` object be dependent on the actual values of those + bind parameters, even though it may reference those values as + defaults. + """ + + _cached_metadata = None + + def __init__(self, dialect, statement, bind=None, + compile_kwargs=util.immutabledict()): + """Construct a new ``Compiled`` object. + + :param dialect: ``Dialect`` to compile against. + + :param statement: ``ClauseElement`` to be compiled. + + :param bind: Optional Engine or Connection to compile this + statement against. + + :param compile_kwargs: additional kwargs that will be + passed to the initial call to :meth:`.Compiled.process`. + + .. versionadded:: 0.8 + + """ + + self.dialect = dialect + self.bind = bind + if statement is not None: + self.statement = statement + self.can_execute = statement.supports_execution + self.string = self.process(self.statement, **compile_kwargs) + + @util.deprecated("0.7", ":class:`.Compiled` objects now compile " + "within the constructor.") + def compile(self): + """Produce the internal string representation of this element. + """ + pass + + def _execute_on_connection(self, connection, multiparams, params): + return connection._execute_compiled(self, multiparams, params) + + @property + def sql_compiler(self): + """Return a Compiled that is capable of processing SQL expressions. + + If this compiler is one, it would likely just return 'self'. + + """ + + raise NotImplementedError() + + def process(self, obj, **kwargs): + return obj._compiler_dispatch(self, **kwargs) + + def __str__(self): + """Return the string text of the generated SQL or DDL.""" + + return self.string or '' + + def construct_params(self, params=None): + """Return the bind params for this compiled object. + + :param params: a dict of string/object pairs whose values will + override bind values compiled in to the + statement. + """ + + raise NotImplementedError() + + @property + def params(self): + """Return the bind params for this compiled object.""" + return self.construct_params() + + def execute(self, *multiparams, **params): + """Execute this compiled object.""" + + e = self.bind + if e is None: + raise exc.UnboundExecutionError( + "This Compiled object is not bound to any Engine " + "or Connection.") + return e._execute_compiled(self, multiparams, params) + + def scalar(self, *multiparams, **params): + """Execute this compiled object and return the result's + scalar value.""" + + return self.execute(*multiparams, **params).scalar() + + +class TypeCompiler(util.with_metaclass(util.EnsureKWArgType, object)): + """Produces DDL specification for TypeEngine objects.""" + + ensure_kwarg = 'visit_\w+' + + def __init__(self, dialect): + self.dialect = dialect + + def process(self, type_, **kw): + return type_._compiler_dispatch(self, **kw) + + +class _CompileLabel(visitors.Visitable): + + """lightweight label object which acts as an expression.Label.""" + + __visit_name__ = 'label' + __slots__ = 'element', 'name' + + def __init__(self, col, name, alt_names=()): + self.element = col + self.name = name + self._alt_names = (col,) + alt_names + + @property + def proxy_set(self): + return self.element.proxy_set + + @property + def type(self): + return self.element.type + + + +class SQLCompiler(Compiled): + + """Default implementation of Compiled. + + Compiles ClauseElements into SQL strings. Uses a similar visit + paradigm as visitors.ClauseVisitor but implements its own traversal. + + """ + + extract_map = EXTRACT_MAP + + compound_keywords = COMPOUND_KEYWORDS + + isdelete = isinsert = isupdate = False + """class-level defaults which can be set at the instance + level to define if this Compiled instance represents + INSERT/UPDATE/DELETE + """ + + isplaintext = False + + returning = None + """holds the "returning" collection of columns if + the statement is CRUD and defines returning columns + either implicitly or explicitly + """ + + returning_precedes_values = False + """set to True classwide to generate RETURNING + clauses before the VALUES or WHERE clause (i.e. MSSQL) + """ + + render_table_with_column_in_update_from = False + """set to True classwide to indicate the SET clause + in a multi-table UPDATE statement should qualify + columns with the table name (i.e. MySQL only) + """ + + ansi_bind_rules = False + """SQL 92 doesn't allow bind parameters to be used + in the columns clause of a SELECT, nor does it allow + ambiguous expressions like "? = ?". A compiler + subclass can set this flag to False if the target + driver/DB enforces this + """ + + def __init__(self, dialect, statement, column_keys=None, + inline=False, **kwargs): + """Construct a new ``DefaultCompiler`` object. + + dialect + Dialect to be used + + statement + ClauseElement to be compiled + + column_keys + a list of column names to be compiled into an INSERT or UPDATE + statement. + + """ + self.column_keys = column_keys + + # compile INSERT/UPDATE defaults/sequences inlined (no pre- + # execute) + self.inline = inline or getattr(statement, 'inline', False) + + # a dictionary of bind parameter keys to BindParameter + # instances. + self.binds = {} + + # a dictionary of BindParameter instances to "compiled" names + # that are actually present in the generated SQL + self.bind_names = util.column_dict() + + # stack which keeps track of nested SELECT statements + self.stack = [] + + # relates label names in the final SQL to a tuple of local + # column/label name, ColumnElement object (if any) and + # TypeEngine. ResultProxy uses this for type processing and + # column targeting + self._result_columns = [] + + # if False, means we can't be sure the list of entries + # in _result_columns is actually the rendered order. This + # gets flipped when we use TextAsFrom, for example. + self._ordered_columns = True + + # true if the paramstyle is positional + self.positional = dialect.positional + if self.positional: + self.positiontup = [] + self.bindtemplate = BIND_TEMPLATES[dialect.paramstyle] + + self.ctes = None + + # an IdentifierPreparer that formats the quoting of identifiers + self.preparer = dialect.identifier_preparer + self.label_length = dialect.label_length \ + or dialect.max_identifier_length + + # a map which tracks "anonymous" identifiers that are created on + # the fly here + self.anon_map = util.PopulateDict(self._process_anon) + + # a map which tracks "truncated" names based on + # dialect.label_length or dialect.max_identifier_length + self.truncated_names = {} + Compiled.__init__(self, dialect, statement, **kwargs) + + if self.positional and dialect.paramstyle == 'numeric': + self._apply_numbered_params() + + @util.memoized_instancemethod + def _init_cte_state(self): + """Initialize collections related to CTEs only if + a CTE is located, to save on the overhead of + these collections otherwise. + + """ + # collect CTEs to tack on top of a SELECT + self.ctes = util.OrderedDict() + self.ctes_by_name = {} + self.ctes_recursive = False + if self.positional: + self.cte_positional = {} + + @contextlib.contextmanager + def _nested_result(self): + """special API to support the use case of 'nested result sets'""" + result_columns, ordered_columns = ( + self._result_columns, self._ordered_columns) + self._result_columns, self._ordered_columns = [], False + + try: + if self.stack: + entry = self.stack[-1] + entry['need_result_map_for_nested'] = True + else: + entry = None + yield self._result_columns, self._ordered_columns + finally: + if entry: + entry.pop('need_result_map_for_nested') + self._result_columns, self._ordered_columns = ( + result_columns, ordered_columns) + + def _apply_numbered_params(self): + poscount = itertools.count(1) + self.string = re.sub( + r'\[_POSITION\]', + lambda m: str(util.next(poscount)), + self.string) + + @util.memoized_property + def _bind_processors(self): + return dict( + (key, value) for key, value in + ((self.bind_names[bindparam], + bindparam.type._cached_bind_processor(self.dialect)) + for bindparam in self.bind_names) + if value is not None + ) + + def is_subquery(self): + return len(self.stack) > 1 + + @property + def sql_compiler(self): + return self + + def construct_params(self, params=None, _group_number=None, _check=True): + """return a dictionary of bind parameter keys and values""" + + if params: + pd = {} + for bindparam in self.bind_names: + name = self.bind_names[bindparam] + if bindparam.key in params: + pd[name] = params[bindparam.key] + elif name in params: + pd[name] = params[name] + + elif _check and bindparam.required: + if _group_number: + raise exc.InvalidRequestError( + "A value is required for bind parameter %r, " + "in parameter group %d" % + (bindparam.key, _group_number)) + else: + raise exc.InvalidRequestError( + "A value is required for bind parameter %r" + % bindparam.key) + + elif bindparam.callable: + pd[name] = bindparam.effective_value + else: + pd[name] = bindparam.value + return pd + else: + pd = {} + for bindparam in self.bind_names: + if _check and bindparam.required: + if _group_number: + raise exc.InvalidRequestError( + "A value is required for bind parameter %r, " + "in parameter group %d" % + (bindparam.key, _group_number)) + else: + raise exc.InvalidRequestError( + "A value is required for bind parameter %r" + % bindparam.key) + + if bindparam.callable: + pd[self.bind_names[bindparam]] = bindparam.effective_value + else: + pd[self.bind_names[bindparam]] = bindparam.value + return pd + + @property + def params(self): + """Return the bind param dictionary embedded into this + compiled object, for those values that are present.""" + return self.construct_params(_check=False) + + @util.dependencies("sqlalchemy.engine.result") + def _create_result_map(self, result): + """utility method used for unit tests only.""" + return result.ResultMetaData._create_result_map(self._result_columns) + + def default_from(self): + """Called when a SELECT statement has no froms, and no FROM clause is + to be appended. + + Gives Oracle a chance to tack on a ``FROM DUAL`` to the string output. + + """ + return "" + + def visit_grouping(self, grouping, asfrom=False, **kwargs): + return "(" + grouping.element._compiler_dispatch(self, **kwargs) + ")" + + def visit_label_reference( + self, element, within_columns_clause=False, **kwargs): + if self.stack and self.dialect.supports_simple_order_by_label: + selectable = self.stack[-1]['selectable'] + + with_cols, only_froms = selectable._label_resolve_dict + if within_columns_clause: + resolve_dict = only_froms + else: + resolve_dict = with_cols + + # this can be None in the case that a _label_reference() + # were subject to a replacement operation, in which case + # the replacement of the Label element may have changed + # to something else like a ColumnClause expression. + order_by_elem = element.element._order_by_label_element + + if order_by_elem is not None and order_by_elem.name in \ + resolve_dict: + + kwargs['render_label_as_label'] = \ + element.element._order_by_label_element + + return self.process( + element.element, within_columns_clause=within_columns_clause, + **kwargs) + + def visit_textual_label_reference( + self, element, within_columns_clause=False, **kwargs): + if not self.stack: + # compiling the element outside of the context of a SELECT + return self.process( + element._text_clause + ) + + selectable = self.stack[-1]['selectable'] + with_cols, only_froms = selectable._label_resolve_dict + try: + if within_columns_clause: + col = only_froms[element.element] + else: + col = with_cols[element.element] + except KeyError: + # treat it like text() + util.warn_limited( + "Can't resolve label reference %r; converting to text()", + util.ellipses_string(element.element)) + return self.process( + element._text_clause + ) + else: + kwargs['render_label_as_label'] = col + return self.process( + col, within_columns_clause=within_columns_clause, **kwargs) + + def visit_label(self, label, + add_to_result_map=None, + within_label_clause=False, + within_columns_clause=False, + render_label_as_label=None, + **kw): + # only render labels within the columns clause + # or ORDER BY clause of a select. dialect-specific compilers + # can modify this behavior. + render_label_with_as = (within_columns_clause and not + within_label_clause) + render_label_only = render_label_as_label is label + + if render_label_only or render_label_with_as: + if isinstance(label.name, elements._truncated_label): + labelname = self._truncated_identifier("colident", label.name) + else: + labelname = label.name + + if render_label_with_as: + if add_to_result_map is not None: + add_to_result_map( + labelname, + label.name, + (label, labelname, ) + label._alt_names, + label.type + ) + + return label.element._compiler_dispatch( + self, within_columns_clause=True, + within_label_clause=True, **kw) + \ + OPERATORS[operators.as_] + \ + self.preparer.format_label(label, labelname) + elif render_label_only: + return self.preparer.format_label(label, labelname) + else: + return label.element._compiler_dispatch( + self, within_columns_clause=False, **kw) + + def visit_column(self, column, add_to_result_map=None, + include_table=True, **kwargs): + name = orig_name = column.name + if name is None: + raise exc.CompileError("Cannot compile Column object until " + "its 'name' is assigned.") + + is_literal = column.is_literal + if not is_literal and isinstance(name, elements._truncated_label): + name = self._truncated_identifier("colident", name) + + if add_to_result_map is not None: + add_to_result_map( + name, + orig_name, + (column, name, column.key), + column.type + ) + + if is_literal: + name = self.escape_literal_column(name) + else: + name = self.preparer.quote(name) + + table = column.table + if table is None or not include_table or not table.named_with_column: + return name + else: + if table.schema: + schema_prefix = self.preparer.quote_schema(table.schema) + '.' + else: + schema_prefix = '' + tablename = table.name + if isinstance(tablename, elements._truncated_label): + tablename = self._truncated_identifier("alias", tablename) + + return schema_prefix + \ + self.preparer.quote(tablename) + \ + "." + name + + def escape_literal_column(self, text): + """provide escaping for the literal_column() construct.""" + + # TODO: some dialects might need different behavior here + return text.replace('%', '%%') + + def visit_fromclause(self, fromclause, **kwargs): + return fromclause.name + + def visit_index(self, index, **kwargs): + return index.name + + def visit_typeclause(self, typeclause, **kw): + kw['type_expression'] = typeclause + return self.dialect.type_compiler.process(typeclause.type, **kw) + + def post_process_text(self, text): + return text + + def visit_textclause(self, textclause, **kw): + def do_bindparam(m): + name = m.group(1) + if name in textclause._bindparams: + return self.process(textclause._bindparams[name], **kw) + else: + return self.bindparam_string(name, **kw) + + if not self.stack: + self.isplaintext = True + + # un-escape any \:params + return BIND_PARAMS_ESC.sub( + lambda m: m.group(1), + BIND_PARAMS.sub( + do_bindparam, + self.post_process_text(textclause.text)) + ) + + def visit_text_as_from(self, taf, + compound_index=None, + asfrom=False, + parens=True, **kw): + + toplevel = not self.stack + entry = self._default_stack_entry if toplevel else self.stack[-1] + + populate_result_map = toplevel or \ + ( + compound_index == 0 and entry.get( + 'need_result_map_for_compound', False) + ) or entry.get('need_result_map_for_nested', False) + + if populate_result_map: + self._ordered_columns = False + for c in taf.column_args: + self.process(c, within_columns_clause=True, + add_to_result_map=self._add_to_result_map) + + text = self.process(taf.element, **kw) + if asfrom and parens: + text = "(%s)" % text + return text + + def visit_null(self, expr, **kw): + return 'NULL' + + def visit_true(self, expr, **kw): + if self.dialect.supports_native_boolean: + return 'true' + else: + return "1" + + def visit_false(self, expr, **kw): + if self.dialect.supports_native_boolean: + return 'false' + else: + return "0" + + def visit_clauselist(self, clauselist, **kw): + sep = clauselist.operator + if sep is None: + sep = " " + else: + sep = OPERATORS[clauselist.operator] + return sep.join( + s for s in + ( + c._compiler_dispatch(self, **kw) + for c in clauselist.clauses) + if s) + + def visit_case(self, clause, **kwargs): + x = "CASE " + if clause.value is not None: + x += clause.value._compiler_dispatch(self, **kwargs) + " " + for cond, result in clause.whens: + x += "WHEN " + cond._compiler_dispatch( + self, **kwargs + ) + " THEN " + result._compiler_dispatch( + self, **kwargs) + " " + if clause.else_ is not None: + x += "ELSE " + clause.else_._compiler_dispatch( + self, **kwargs + ) + " " + x += "END" + return x + + def visit_cast(self, cast, **kwargs): + return "CAST(%s AS %s)" % \ + (cast.clause._compiler_dispatch(self, **kwargs), + cast.typeclause._compiler_dispatch(self, **kwargs)) + + def visit_over(self, over, **kwargs): + return "%s OVER (%s)" % ( + over.func._compiler_dispatch(self, **kwargs), + ' '.join( + '%s BY %s' % (word, clause._compiler_dispatch(self, **kwargs)) + for word, clause in ( + ('PARTITION', over.partition_by), + ('ORDER', over.order_by) + ) + if clause is not None and len(clause) + ) + ) + + def visit_funcfilter(self, funcfilter, **kwargs): + return "%s FILTER (WHERE %s)" % ( + funcfilter.func._compiler_dispatch(self, **kwargs), + funcfilter.criterion._compiler_dispatch(self, **kwargs) + ) + + def visit_extract(self, extract, **kwargs): + field = self.extract_map.get(extract.field, extract.field) + return "EXTRACT(%s FROM %s)" % ( + field, extract.expr._compiler_dispatch(self, **kwargs)) + + def visit_function(self, func, add_to_result_map=None, **kwargs): + if add_to_result_map is not None: + add_to_result_map( + func.name, func.name, (), func.type + ) + + disp = getattr(self, "visit_%s_func" % func.name.lower(), None) + if disp: + return disp(func, **kwargs) + else: + name = FUNCTIONS.get(func.__class__, func.name + "%(expr)s") + return ".".join(list(func.packagenames) + [name]) % \ + {'expr': self.function_argspec(func, **kwargs)} + + def visit_next_value_func(self, next_value, **kw): + return self.visit_sequence(next_value.sequence) + + def visit_sequence(self, sequence): + raise NotImplementedError( + "Dialect '%s' does not support sequence increments." % + self.dialect.name + ) + + def function_argspec(self, func, **kwargs): + return func.clause_expr._compiler_dispatch(self, **kwargs) + + def visit_compound_select(self, cs, asfrom=False, + parens=True, compound_index=0, **kwargs): + toplevel = not self.stack + entry = self._default_stack_entry if toplevel else self.stack[-1] + need_result_map = toplevel or \ + (compound_index == 0 + and entry.get('need_result_map_for_compound', False)) + + self.stack.append( + { + 'correlate_froms': entry['correlate_froms'], + 'asfrom_froms': entry['asfrom_froms'], + 'selectable': cs, + 'need_result_map_for_compound': need_result_map + }) + + keyword = self.compound_keywords.get(cs.keyword) + + text = (" " + keyword + " ").join( + (c._compiler_dispatch(self, + asfrom=asfrom, parens=False, + compound_index=i, **kwargs) + for i, c in enumerate(cs.selects)) + ) + + group_by = cs._group_by_clause._compiler_dispatch( + self, asfrom=asfrom, **kwargs) + if group_by: + text += " GROUP BY " + group_by + + text += self.order_by_clause(cs, **kwargs) + text += (cs._limit_clause is not None + or cs._offset_clause is not None) and \ + self.limit_clause(cs, **kwargs) or "" + + if self.ctes and toplevel: + text = self._render_cte_clause() + text + + self.stack.pop(-1) + if asfrom and parens: + return "(" + text + ")" + else: + return text + + def visit_unary(self, unary, **kw): + if unary.operator: + if unary.modifier: + raise exc.CompileError( + "Unary expression does not support operator " + "and modifier simultaneously") + disp = getattr(self, "visit_%s_unary_operator" % + unary.operator.__name__, None) + if disp: + return disp(unary, unary.operator, **kw) + else: + return self._generate_generic_unary_operator( + unary, OPERATORS[unary.operator], **kw) + elif unary.modifier: + disp = getattr(self, "visit_%s_unary_modifier" % + unary.modifier.__name__, None) + if disp: + return disp(unary, unary.modifier, **kw) + else: + return self._generate_generic_unary_modifier( + unary, OPERATORS[unary.modifier], **kw) + else: + raise exc.CompileError( + "Unary expression has no operator or modifier") + + def visit_istrue_unary_operator(self, element, operator, **kw): + if self.dialect.supports_native_boolean: + return self.process(element.element, **kw) + else: + return "%s = 1" % self.process(element.element, **kw) + + def visit_isfalse_unary_operator(self, element, operator, **kw): + if self.dialect.supports_native_boolean: + return "NOT %s" % self.process(element.element, **kw) + else: + return "%s = 0" % self.process(element.element, **kw) + + def visit_notmatch_op_binary(self, binary, operator, **kw): + return "NOT %s" % self.visit_binary( + binary, override_operator=operators.match_op) + + def visit_binary(self, binary, override_operator=None, **kw): + # don't allow "? = ?" to render + if self.ansi_bind_rules and \ + isinstance(binary.left, elements.BindParameter) and \ + isinstance(binary.right, elements.BindParameter): + kw['literal_binds'] = True + + operator_ = override_operator or binary.operator + disp = getattr(self, "visit_%s_binary" % operator_.__name__, None) + if disp: + return disp(binary, operator_, **kw) + else: + try: + opstring = OPERATORS[operator_] + except KeyError: + raise exc.UnsupportedCompilationError(self, operator_) + else: + return self._generate_generic_binary(binary, opstring, **kw) + + def visit_custom_op_binary(self, element, operator, **kw): + return self._generate_generic_binary( + element, " " + operator.opstring + " ", **kw) + + def visit_custom_op_unary_operator(self, element, operator, **kw): + return self._generate_generic_unary_operator( + element, operator.opstring + " ", **kw) + + def visit_custom_op_unary_modifier(self, element, operator, **kw): + return self._generate_generic_unary_modifier( + element, " " + operator.opstring, **kw) + + def _generate_generic_binary(self, binary, opstring, **kw): + return binary.left._compiler_dispatch(self, **kw) + \ + opstring + \ + binary.right._compiler_dispatch(self, **kw) + + def _generate_generic_unary_operator(self, unary, opstring, **kw): + return opstring + unary.element._compiler_dispatch(self, **kw) + + def _generate_generic_unary_modifier(self, unary, opstring, **kw): + return unary.element._compiler_dispatch(self, **kw) + opstring + + @util.memoized_property + def _like_percent_literal(self): + return elements.literal_column("'%'", type_=sqltypes.STRINGTYPE) + + def visit_contains_op_binary(self, binary, operator, **kw): + binary = binary._clone() + percent = self._like_percent_literal + binary.right = percent.__add__(binary.right).__add__(percent) + return self.visit_like_op_binary(binary, operator, **kw) + + def visit_notcontains_op_binary(self, binary, operator, **kw): + binary = binary._clone() + percent = self._like_percent_literal + binary.right = percent.__add__(binary.right).__add__(percent) + return self.visit_notlike_op_binary(binary, operator, **kw) + + def visit_startswith_op_binary(self, binary, operator, **kw): + binary = binary._clone() + percent = self._like_percent_literal + binary.right = percent.__radd__( + binary.right + ) + return self.visit_like_op_binary(binary, operator, **kw) + + def visit_notstartswith_op_binary(self, binary, operator, **kw): + binary = binary._clone() + percent = self._like_percent_literal + binary.right = percent.__radd__( + binary.right + ) + return self.visit_notlike_op_binary(binary, operator, **kw) + + def visit_endswith_op_binary(self, binary, operator, **kw): + binary = binary._clone() + percent = self._like_percent_literal + binary.right = percent.__add__(binary.right) + return self.visit_like_op_binary(binary, operator, **kw) + + def visit_notendswith_op_binary(self, binary, operator, **kw): + binary = binary._clone() + percent = self._like_percent_literal + binary.right = percent.__add__(binary.right) + return self.visit_notlike_op_binary(binary, operator, **kw) + + def visit_like_op_binary(self, binary, operator, **kw): + escape = binary.modifiers.get("escape", None) + + # TODO: use ternary here, not "and"/ "or" + return '%s LIKE %s' % ( + binary.left._compiler_dispatch(self, **kw), + binary.right._compiler_dispatch(self, **kw)) \ + + ( + ' ESCAPE ' + + self.render_literal_value(escape, sqltypes.STRINGTYPE) + if escape else '' + ) + + def visit_notlike_op_binary(self, binary, operator, **kw): + escape = binary.modifiers.get("escape", None) + return '%s NOT LIKE %s' % ( + binary.left._compiler_dispatch(self, **kw), + binary.right._compiler_dispatch(self, **kw)) \ + + ( + ' ESCAPE ' + + self.render_literal_value(escape, sqltypes.STRINGTYPE) + if escape else '' + ) + + def visit_ilike_op_binary(self, binary, operator, **kw): + escape = binary.modifiers.get("escape", None) + return 'lower(%s) LIKE lower(%s)' % ( + binary.left._compiler_dispatch(self, **kw), + binary.right._compiler_dispatch(self, **kw)) \ + + ( + ' ESCAPE ' + + self.render_literal_value(escape, sqltypes.STRINGTYPE) + if escape else '' + ) + + def visit_notilike_op_binary(self, binary, operator, **kw): + escape = binary.modifiers.get("escape", None) + return 'lower(%s) NOT LIKE lower(%s)' % ( + binary.left._compiler_dispatch(self, **kw), + binary.right._compiler_dispatch(self, **kw)) \ + + ( + ' ESCAPE ' + + self.render_literal_value(escape, sqltypes.STRINGTYPE) + if escape else '' + ) + + def visit_between_op_binary(self, binary, operator, **kw): + symmetric = binary.modifiers.get("symmetric", False) + return self._generate_generic_binary( + binary, " BETWEEN SYMMETRIC " + if symmetric else " BETWEEN ", **kw) + + def visit_notbetween_op_binary(self, binary, operator, **kw): + symmetric = binary.modifiers.get("symmetric", False) + return self._generate_generic_binary( + binary, " NOT BETWEEN SYMMETRIC " + if symmetric else " NOT BETWEEN ", **kw) + + def visit_bindparam(self, bindparam, within_columns_clause=False, + literal_binds=False, + skip_bind_expression=False, + **kwargs): + if not skip_bind_expression and bindparam.type._has_bind_expression: + bind_expression = bindparam.type.bind_expression(bindparam) + return self.process(bind_expression, + skip_bind_expression=True) + + if literal_binds or \ + (within_columns_clause and + self.ansi_bind_rules): + if bindparam.value is None and bindparam.callable is None: + raise exc.CompileError("Bind parameter '%s' without a " + "renderable value not allowed here." + % bindparam.key) + return self.render_literal_bindparam( + bindparam, within_columns_clause=True, **kwargs) + + name = self._truncate_bindparam(bindparam) + + if name in self.binds: + existing = self.binds[name] + if existing is not bindparam: + if (existing.unique or bindparam.unique) and \ + not existing.proxy_set.intersection( + bindparam.proxy_set): + raise exc.CompileError( + "Bind parameter '%s' conflicts with " + "unique bind parameter of the same name" % + bindparam.key + ) + elif existing._is_crud or bindparam._is_crud: + raise exc.CompileError( + "bindparam() name '%s' is reserved " + "for automatic usage in the VALUES or SET " + "clause of this " + "insert/update statement. Please use a " + "name other than column name when using bindparam() " + "with insert() or update() (for example, 'b_%s')." % + (bindparam.key, bindparam.key) + ) + + self.binds[bindparam.key] = self.binds[name] = bindparam + + return self.bindparam_string(name, **kwargs) + + def render_literal_bindparam(self, bindparam, **kw): + value = bindparam.effective_value + return self.render_literal_value(value, bindparam.type) + + def render_literal_value(self, value, type_): + """Render the value of a bind parameter as a quoted literal. + + This is used for statement sections that do not accept bind parameters + on the target driver/database. + + This should be implemented by subclasses using the quoting services + of the DBAPI. + + """ + + processor = type_._cached_literal_processor(self.dialect) + if processor: + return processor(value) + else: + raise NotImplementedError( + "Don't know how to literal-quote value %r" % value) + + def _truncate_bindparam(self, bindparam): + if bindparam in self.bind_names: + return self.bind_names[bindparam] + + bind_name = bindparam.key + if isinstance(bind_name, elements._truncated_label): + bind_name = self._truncated_identifier("bindparam", bind_name) + + # add to bind_names for translation + self.bind_names[bindparam] = bind_name + + return bind_name + + def _truncated_identifier(self, ident_class, name): + if (ident_class, name) in self.truncated_names: + return self.truncated_names[(ident_class, name)] + + anonname = name.apply_map(self.anon_map) + + if len(anonname) > self.label_length - 6: + counter = self.truncated_names.get(ident_class, 1) + truncname = anonname[0:max(self.label_length - 6, 0)] + \ + "_" + hex(counter)[2:] + self.truncated_names[ident_class] = counter + 1 + else: + truncname = anonname + self.truncated_names[(ident_class, name)] = truncname + return truncname + + def _anonymize(self, name): + return name % self.anon_map + + def _process_anon(self, key): + (ident, derived) = key.split(' ', 1) + anonymous_counter = self.anon_map.get(derived, 1) + self.anon_map[derived] = anonymous_counter + 1 + return derived + "_" + str(anonymous_counter) + + def bindparam_string(self, name, positional_names=None, **kw): + if self.positional: + if positional_names is not None: + positional_names.append(name) + else: + self.positiontup.append(name) + return self.bindtemplate % {'name': name} + + def visit_cte(self, cte, asfrom=False, ashint=False, + fromhints=None, + **kwargs): + self._init_cte_state() + + if isinstance(cte.name, elements._truncated_label): + cte_name = self._truncated_identifier("alias", cte.name) + else: + cte_name = cte.name + + if cte_name in self.ctes_by_name: + existing_cte = self.ctes_by_name[cte_name] + # we've generated a same-named CTE that we are enclosed in, + # or this is the same CTE. just return the name. + if cte in existing_cte._restates or cte is existing_cte: + return self.preparer.format_alias(cte, cte_name) + elif existing_cte in cte._restates: + # we've generated a same-named CTE that is + # enclosed in us - we take precedence, so + # discard the text for the "inner". + del self.ctes[existing_cte] + else: + raise exc.CompileError( + "Multiple, unrelated CTEs found with " + "the same name: %r" % + cte_name) + + self.ctes_by_name[cte_name] = cte + + if cte._cte_alias is not None: + orig_cte = cte._cte_alias + if orig_cte not in self.ctes: + self.visit_cte(orig_cte, **kwargs) + cte_alias_name = cte._cte_alias.name + if isinstance(cte_alias_name, elements._truncated_label): + cte_alias_name = self._truncated_identifier( + "alias", cte_alias_name) + else: + orig_cte = cte + cte_alias_name = None + if not cte_alias_name and cte not in self.ctes: + if cte.recursive: + self.ctes_recursive = True + text = self.preparer.format_alias(cte, cte_name) + if cte.recursive: + if isinstance(cte.original, selectable.Select): + col_source = cte.original + elif isinstance(cte.original, selectable.CompoundSelect): + col_source = cte.original.selects[0] + else: + assert False + recur_cols = [c for c in + util.unique_list(col_source.inner_columns) + if c is not None] + + text += "(%s)" % (", ".join( + self.preparer.format_column(ident) + for ident in recur_cols)) + + if self.positional: + kwargs['positional_names'] = self.cte_positional[cte] = [] + + text += " AS \n" + \ + cte.original._compiler_dispatch( + self, asfrom=True, **kwargs + ) + + if cte._suffixes: + text += " " + self._generate_prefixes( + cte, cte._suffixes, **kwargs) + + self.ctes[cte] = text + + if asfrom: + if cte_alias_name: + text = self.preparer.format_alias(cte, cte_alias_name) + text += self.get_render_as_alias_suffix(cte_name) + else: + return self.preparer.format_alias(cte, cte_name) + return text + + def visit_alias(self, alias, asfrom=False, ashint=False, + iscrud=False, + fromhints=None, **kwargs): + if asfrom or ashint: + if isinstance(alias.name, elements._truncated_label): + alias_name = self._truncated_identifier("alias", alias.name) + else: + alias_name = alias.name + + if ashint: + return self.preparer.format_alias(alias, alias_name) + elif asfrom: + ret = alias.original._compiler_dispatch(self, + asfrom=True, **kwargs) + \ + self.get_render_as_alias_suffix( + self.preparer.format_alias(alias, alias_name)) + + if fromhints and alias in fromhints: + ret = self.format_from_hint_text(ret, alias, + fromhints[alias], iscrud) + + return ret + else: + return alias.original._compiler_dispatch(self, **kwargs) + + def get_render_as_alias_suffix(self, alias_name_text): + return " AS " + alias_name_text + + def _add_to_result_map(self, keyname, name, objects, type_): + self._result_columns.append((keyname, name, objects, type_)) + + def _label_select_column(self, select, column, + populate_result_map, + asfrom, column_clause_args, + name=None, + within_columns_clause=True): + """produce labeled columns present in a select().""" + + if column.type._has_column_expression and \ + populate_result_map: + col_expr = column.type.column_expression(column) + add_to_result_map = lambda keyname, name, objects, type_: \ + self._add_to_result_map( + keyname, name, + objects + (column,), type_) + else: + col_expr = column + if populate_result_map: + add_to_result_map = self._add_to_result_map + else: + add_to_result_map = None + + if not within_columns_clause: + result_expr = col_expr + elif isinstance(column, elements.Label): + if col_expr is not column: + result_expr = _CompileLabel( + col_expr, + column.name, + alt_names=(column.element,) + ) + else: + result_expr = col_expr + + elif select is not None and name: + result_expr = _CompileLabel( + col_expr, + name, + alt_names=(column._key_label,) + ) + + elif \ + asfrom and \ + isinstance(column, elements.ColumnClause) and \ + not column.is_literal and \ + column.table is not None and \ + not isinstance(column.table, selectable.Select): + result_expr = _CompileLabel(col_expr, + elements._as_truncated(column.name), + alt_names=(column.key,)) + elif ( + not isinstance(column, elements.TextClause) and + ( + not isinstance(column, elements.UnaryExpression) or + column.wraps_column_expression + ) and + ( + not hasattr(column, 'name') or + isinstance(column, functions.Function) + ) + ): + result_expr = _CompileLabel(col_expr, column.anon_label) + elif col_expr is not column: + # TODO: are we sure "column" has a .name and .key here ? + # assert isinstance(column, elements.ColumnClause) + result_expr = _CompileLabel(col_expr, + elements._as_truncated(column.name), + alt_names=(column.key,)) + else: + result_expr = col_expr + + column_clause_args.update( + within_columns_clause=within_columns_clause, + add_to_result_map=add_to_result_map + ) + return result_expr._compiler_dispatch( + self, + **column_clause_args + ) + + def format_from_hint_text(self, sqltext, table, hint, iscrud): + hinttext = self.get_from_hint_text(table, hint) + if hinttext: + sqltext += " " + hinttext + return sqltext + + def get_select_hint_text(self, byfroms): + return None + + def get_from_hint_text(self, table, text): + return None + + def get_crud_hint_text(self, table, text): + return None + + def get_statement_hint_text(self, hint_texts): + return " ".join(hint_texts) + + def _transform_select_for_nested_joins(self, select): + """Rewrite any "a JOIN (b JOIN c)" expression as + "a JOIN (select * from b JOIN c) AS anon", to support + databases that can't parse a parenthesized join correctly + (i.e. sqlite the main one). + + """ + cloned = {} + column_translate = [{}] + + def visit(element, **kw): + if element in column_translate[-1]: + return column_translate[-1][element] + + elif element in cloned: + return cloned[element] + + newelem = cloned[element] = element._clone() + + if newelem.is_selectable and newelem._is_join and \ + isinstance(newelem.right, selectable.FromGrouping): + + newelem._reset_exported() + newelem.left = visit(newelem.left, **kw) + + right = visit(newelem.right, **kw) + + selectable_ = selectable.Select( + [right.element], + use_labels=True).alias() + + for c in selectable_.c: + c._key_label = c.key + c._label = c.name + + translate_dict = dict( + zip(newelem.right.element.c, selectable_.c) + ) + + # translating from both the old and the new + # because different select() structures will lead us + # to traverse differently + translate_dict[right.element.left] = selectable_ + translate_dict[right.element.right] = selectable_ + translate_dict[newelem.right.element.left] = selectable_ + translate_dict[newelem.right.element.right] = selectable_ + + # propagate translations that we've gained + # from nested visit(newelem.right) outwards + # to the enclosing select here. this happens + # only when we have more than one level of right + # join nesting, i.e. "a JOIN (b JOIN (c JOIN d))" + for k, v in list(column_translate[-1].items()): + if v in translate_dict: + # remarkably, no current ORM tests (May 2013) + # hit this condition, only test_join_rewriting + # does. + column_translate[-1][k] = translate_dict[v] + + column_translate[-1].update(translate_dict) + + newelem.right = selectable_ + + newelem.onclause = visit(newelem.onclause, **kw) + + elif newelem._is_from_container: + # if we hit an Alias, CompoundSelect or ScalarSelect, put a + # marker in the stack. + kw['transform_clue'] = 'select_container' + newelem._copy_internals(clone=visit, **kw) + elif newelem.is_selectable and newelem._is_select: + barrier_select = kw.get('transform_clue', None) == \ + 'select_container' + # if we're still descended from an + # Alias/CompoundSelect/ScalarSelect, we're + # in a FROM clause, so start with a new translate collection + if barrier_select: + column_translate.append({}) + kw['transform_clue'] = 'inside_select' + newelem._copy_internals(clone=visit, **kw) + if barrier_select: + del column_translate[-1] + else: + newelem._copy_internals(clone=visit, **kw) + + return newelem + + return visit(select) + + def _transform_result_map_for_nested_joins( + self, select, transformed_select): + inner_col = dict((c._key_label, c) for + c in transformed_select.inner_columns) + + d = dict( + (inner_col[c._key_label], c) + for c in select.inner_columns + ) + + self._result_columns = [ + (key, name, tuple([d.get(col, col) for col in objs]), typ) + for key, name, objs, typ in self._result_columns + ] + + _default_stack_entry = util.immutabledict([ + ('correlate_froms', frozenset()), + ('asfrom_froms', frozenset()) + ]) + + def _display_froms_for_select(self, select, asfrom): + # utility method to help external dialects + # get the correct from list for a select. + # specifically the oracle dialect needs this feature + # right now. + toplevel = not self.stack + entry = self._default_stack_entry if toplevel else self.stack[-1] + + correlate_froms = entry['correlate_froms'] + asfrom_froms = entry['asfrom_froms'] + + if asfrom: + froms = select._get_display_froms( + explicit_correlate_froms=correlate_froms.difference( + asfrom_froms), + implicit_correlate_froms=()) + else: + froms = select._get_display_froms( + explicit_correlate_froms=correlate_froms, + implicit_correlate_froms=asfrom_froms) + return froms + + def visit_select(self, select, asfrom=False, parens=True, + fromhints=None, + compound_index=0, + nested_join_translation=False, + select_wraps_for=None, + **kwargs): + + needs_nested_translation = \ + select.use_labels and \ + not nested_join_translation and \ + not self.stack and \ + not self.dialect.supports_right_nested_joins + + if needs_nested_translation: + transformed_select = self._transform_select_for_nested_joins( + select) + text = self.visit_select( + transformed_select, asfrom=asfrom, parens=parens, + fromhints=fromhints, + compound_index=compound_index, + nested_join_translation=True, **kwargs + ) + + toplevel = not self.stack + entry = self._default_stack_entry if toplevel else self.stack[-1] + + populate_result_map = toplevel or \ + ( + compound_index == 0 and entry.get( + 'need_result_map_for_compound', False) + ) or entry.get('need_result_map_for_nested', False) + + # this was first proposed as part of #3372; however, it is not + # reached in current tests and could possibly be an assertion + # instead. + if not populate_result_map and 'add_to_result_map' in kwargs: + del kwargs['add_to_result_map'] + + if needs_nested_translation: + if populate_result_map: + self._transform_result_map_for_nested_joins( + select, transformed_select) + return text + + froms = self._setup_select_stack(select, entry, asfrom) + + column_clause_args = kwargs.copy() + column_clause_args.update({ + 'within_label_clause': False, + 'within_columns_clause': False + }) + + text = "SELECT " # we're off to a good start ! + + if select._hints: + hint_text, byfrom = self._setup_select_hints(select) + if hint_text: + text += hint_text + " " + else: + byfrom = None + + if select._prefixes: + text += self._generate_prefixes( + select, select._prefixes, **kwargs) + + text += self.get_select_precolumns(select, **kwargs) + + # the actual list of columns to print in the SELECT column list. + inner_columns = [ + c for c in [ + self._label_select_column( + select, + column, + populate_result_map, asfrom, + column_clause_args, + name=name) + for name, column in select._columns_plus_names + ] + if c is not None + ] + + if populate_result_map and select_wraps_for is not None: + # if this select is a compiler-generated wrapper, + # rewrite the targeted columns in the result map + wrapped_inner_columns = set(select_wraps_for.inner_columns) + translate = dict( + (outer, inner.pop()) for outer, inner in [ + ( + outer, + outer.proxy_set.intersection(wrapped_inner_columns)) + for outer in select.inner_columns + ] if inner + ) + self._result_columns = [ + (key, name, tuple(translate.get(o, o) for o in obj), type_) + for key, name, obj, type_ in self._result_columns + ] + + text = self._compose_select_body( + text, select, inner_columns, froms, byfrom, kwargs) + + if select._statement_hints: + per_dialect = [ + ht for (dialect_name, ht) + in select._statement_hints + if dialect_name in ('*', self.dialect.name) + ] + if per_dialect: + text += " " + self.get_statement_hint_text(per_dialect) + + if self.ctes and self._is_toplevel_select(select): + text = self._render_cte_clause() + text + + if select._suffixes: + text += " " + self._generate_prefixes( + select, select._suffixes, **kwargs) + + self.stack.pop(-1) + + if asfrom and parens: + return "(" + text + ")" + else: + return text + + def _is_toplevel_select(self, select): + """Return True if the stack is placed at the given select, and + is also the outermost SELECT, meaning there is either no stack + before this one, or the enclosing stack is a topmost INSERT. + + """ + return ( + self.stack[-1]['selectable'] is select and + ( + len(self.stack) == 1 or self.isinsert and len(self.stack) == 2 + and self.statement is self.stack[0]['selectable'] + ) + ) + + def _setup_select_hints(self, select): + byfrom = dict([ + (from_, hinttext % { + 'name': from_._compiler_dispatch( + self, ashint=True) + }) + for (from_, dialect), hinttext in + select._hints.items() + if dialect in ('*', self.dialect.name) + ]) + hint_text = self.get_select_hint_text(byfrom) + return hint_text, byfrom + + def _setup_select_stack(self, select, entry, asfrom): + correlate_froms = entry['correlate_froms'] + asfrom_froms = entry['asfrom_froms'] + + if asfrom: + froms = select._get_display_froms( + explicit_correlate_froms=correlate_froms.difference( + asfrom_froms), + implicit_correlate_froms=()) + else: + froms = select._get_display_froms( + explicit_correlate_froms=correlate_froms, + implicit_correlate_froms=asfrom_froms) + + new_correlate_froms = set(selectable._from_objects(*froms)) + all_correlate_froms = new_correlate_froms.union(correlate_froms) + + new_entry = { + 'asfrom_froms': new_correlate_froms, + 'correlate_froms': all_correlate_froms, + 'selectable': select, + } + self.stack.append(new_entry) + return froms + + def _compose_select_body( + self, text, select, inner_columns, froms, byfrom, kwargs): + text += ', '.join(inner_columns) + + if froms: + text += " \nFROM " + + if select._hints: + text += ', '.join( + [f._compiler_dispatch(self, asfrom=True, + fromhints=byfrom, **kwargs) + for f in froms]) + else: + text += ', '.join( + [f._compiler_dispatch(self, asfrom=True, **kwargs) + for f in froms]) + else: + text += self.default_from() + + if select._whereclause is not None: + t = select._whereclause._compiler_dispatch(self, **kwargs) + if t: + text += " \nWHERE " + t + + if select._group_by_clause.clauses: + group_by = select._group_by_clause._compiler_dispatch( + self, **kwargs) + if group_by: + text += " GROUP BY " + group_by + + if select._having is not None: + t = select._having._compiler_dispatch(self, **kwargs) + if t: + text += " \nHAVING " + t + + if select._order_by_clause.clauses: + text += self.order_by_clause(select, **kwargs) + + if (select._limit_clause is not None or + select._offset_clause is not None): + text += self.limit_clause(select, **kwargs) + + if select._for_update_arg is not None: + text += self.for_update_clause(select, **kwargs) + + return text + + def _generate_prefixes(self, stmt, prefixes, **kw): + clause = " ".join( + prefix._compiler_dispatch(self, **kw) + for prefix, dialect_name in prefixes + if dialect_name is None or + dialect_name == self.dialect.name + ) + if clause: + clause += " " + return clause + + def _render_cte_clause(self): + if self.positional: + self.positiontup = sum([ + self.cte_positional[cte] + for cte in self.ctes], []) + \ + self.positiontup + cte_text = self.get_cte_preamble(self.ctes_recursive) + " " + cte_text += ", \n".join( + [txt for txt in self.ctes.values()] + ) + cte_text += "\n " + return cte_text + + def get_cte_preamble(self, recursive): + if recursive: + return "WITH RECURSIVE" + else: + return "WITH" + + def get_select_precolumns(self, select, **kw): + """Called when building a ``SELECT`` statement, position is just + before column list. + + """ + return select._distinct and "DISTINCT " or "" + + def order_by_clause(self, select, **kw): + order_by = select._order_by_clause._compiler_dispatch(self, **kw) + if order_by: + return " ORDER BY " + order_by + else: + return "" + + def for_update_clause(self, select, **kw): + return " FOR UPDATE" + + def returning_clause(self, stmt, returning_cols): + raise exc.CompileError( + "RETURNING is not supported by this " + "dialect's statement compiler.") + + def limit_clause(self, select, **kw): + text = "" + if select._limit_clause is not None: + text += "\n LIMIT " + self.process(select._limit_clause, **kw) + if select._offset_clause is not None: + if select._limit_clause is None: + text += "\n LIMIT -1" + text += " OFFSET " + self.process(select._offset_clause, **kw) + return text + + def visit_table(self, table, asfrom=False, iscrud=False, ashint=False, + fromhints=None, use_schema=True, **kwargs): + if asfrom or ashint: + if use_schema and getattr(table, "schema", None): + ret = self.preparer.quote_schema(table.schema) + \ + "." + self.preparer.quote(table.name) + else: + ret = self.preparer.quote(table.name) + if fromhints and table in fromhints: + ret = self.format_from_hint_text(ret, table, + fromhints[table], iscrud) + return ret + else: + return "" + + def visit_join(self, join, asfrom=False, **kwargs): + return ( + join.left._compiler_dispatch(self, asfrom=True, **kwargs) + + (join.isouter and " LEFT OUTER JOIN " or " JOIN ") + + join.right._compiler_dispatch(self, asfrom=True, **kwargs) + + " ON " + + join.onclause._compiler_dispatch(self, **kwargs) + ) + + def visit_insert(self, insert_stmt, **kw): + self.stack.append( + {'correlate_froms': set(), + "asfrom_froms": set(), + "selectable": insert_stmt}) + + self.isinsert = True + crud_params = crud._get_crud_params(self, insert_stmt, **kw) + + if not crud_params and \ + not self.dialect.supports_default_values and \ + not self.dialect.supports_empty_insert: + raise exc.CompileError("The '%s' dialect with current database " + "version settings does not support empty " + "inserts." % + self.dialect.name) + + if insert_stmt._has_multi_parameters: + if not self.dialect.supports_multivalues_insert: + raise exc.CompileError( + "The '%s' dialect with current database " + "version settings does not support " + "in-place multirow inserts." % + self.dialect.name) + crud_params_single = crud_params[0] + else: + crud_params_single = crud_params + + preparer = self.preparer + supports_default_values = self.dialect.supports_default_values + + text = "INSERT " + + if insert_stmt._prefixes: + text += self._generate_prefixes(insert_stmt, + insert_stmt._prefixes, **kw) + + text += "INTO " + table_text = preparer.format_table(insert_stmt.table) + + if insert_stmt._hints: + dialect_hints = dict([ + (table, hint_text) + for (table, dialect), hint_text in + insert_stmt._hints.items() + if dialect in ('*', self.dialect.name) + ]) + if insert_stmt.table in dialect_hints: + table_text = self.format_from_hint_text( + table_text, + insert_stmt.table, + dialect_hints[insert_stmt.table], + True + ) + + text += table_text + + if crud_params_single or not supports_default_values: + text += " (%s)" % ', '.join([preparer.format_column(c[0]) + for c in crud_params_single]) + + if self.returning or insert_stmt._returning: + self.returning = self.returning or insert_stmt._returning + returning_clause = self.returning_clause( + insert_stmt, self.returning) + + if self.returning_precedes_values: + text += " " + returning_clause + + if insert_stmt.select is not None: + text += " %s" % self.process(self._insert_from_select, **kw) + elif not crud_params and supports_default_values: + text += " DEFAULT VALUES" + elif insert_stmt._has_multi_parameters: + text += " VALUES %s" % ( + ", ".join( + "(%s)" % ( + ', '.join(c[1] for c in crud_param_set) + ) + for crud_param_set in crud_params + ) + ) + else: + text += " VALUES (%s)" % \ + ', '.join([c[1] for c in crud_params]) + + if self.returning and not self.returning_precedes_values: + text += " " + returning_clause + + self.stack.pop(-1) + + return text + + def update_limit_clause(self, update_stmt): + """Provide a hook for MySQL to add LIMIT to the UPDATE""" + return None + + def update_tables_clause(self, update_stmt, from_table, + extra_froms, **kw): + """Provide a hook to override the initial table clause + in an UPDATE statement. + + MySQL overrides this. + + """ + return from_table._compiler_dispatch(self, asfrom=True, + iscrud=True, **kw) + + def update_from_clause(self, update_stmt, + from_table, extra_froms, + from_hints, + **kw): + """Provide a hook to override the generation of an + UPDATE..FROM clause. + + MySQL and MSSQL override this. + + """ + return "FROM " + ', '.join( + t._compiler_dispatch(self, asfrom=True, + fromhints=from_hints, **kw) + for t in extra_froms) + + def visit_update(self, update_stmt, **kw): + self.stack.append( + {'correlate_froms': set([update_stmt.table]), + "asfrom_froms": set([update_stmt.table]), + "selectable": update_stmt}) + + self.isupdate = True + + extra_froms = update_stmt._extra_froms + + text = "UPDATE " + + if update_stmt._prefixes: + text += self._generate_prefixes(update_stmt, + update_stmt._prefixes, **kw) + + table_text = self.update_tables_clause(update_stmt, update_stmt.table, + extra_froms, **kw) + + crud_params = crud._get_crud_params(self, update_stmt, **kw) + + if update_stmt._hints: + dialect_hints = dict([ + (table, hint_text) + for (table, dialect), hint_text in + update_stmt._hints.items() + if dialect in ('*', self.dialect.name) + ]) + if update_stmt.table in dialect_hints: + table_text = self.format_from_hint_text( + table_text, + update_stmt.table, + dialect_hints[update_stmt.table], + True + ) + else: + dialect_hints = None + + text += table_text + + text += ' SET ' + include_table = extra_froms and \ + self.render_table_with_column_in_update_from + text += ', '.join( + c[0]._compiler_dispatch(self, + include_table=include_table) + + '=' + c[1] for c in crud_params + ) + + if self.returning or update_stmt._returning: + if not self.returning: + self.returning = update_stmt._returning + if self.returning_precedes_values: + text += " " + self.returning_clause( + update_stmt, self.returning) + + if extra_froms: + extra_from_text = self.update_from_clause( + update_stmt, + update_stmt.table, + extra_froms, + dialect_hints, **kw) + if extra_from_text: + text += " " + extra_from_text + + if update_stmt._whereclause is not None: + t = self.process(update_stmt._whereclause, **kw) + if t: + text += " WHERE " + t + + limit_clause = self.update_limit_clause(update_stmt) + if limit_clause: + text += " " + limit_clause + + if self.returning and not self.returning_precedes_values: + text += " " + self.returning_clause( + update_stmt, self.returning) + + self.stack.pop(-1) + + return text + + @util.memoized_property + def _key_getters_for_crud_column(self): + return crud._key_getters_for_crud_column(self) + + def visit_delete(self, delete_stmt, **kw): + self.stack.append({'correlate_froms': set([delete_stmt.table]), + "asfrom_froms": set([delete_stmt.table]), + "selectable": delete_stmt}) + self.isdelete = True + + text = "DELETE " + + if delete_stmt._prefixes: + text += self._generate_prefixes(delete_stmt, + delete_stmt._prefixes, **kw) + + text += "FROM " + table_text = delete_stmt.table._compiler_dispatch( + self, asfrom=True, iscrud=True) + + if delete_stmt._hints: + dialect_hints = dict([ + (table, hint_text) + for (table, dialect), hint_text in + delete_stmt._hints.items() + if dialect in ('*', self.dialect.name) + ]) + if delete_stmt.table in dialect_hints: + table_text = self.format_from_hint_text( + table_text, + delete_stmt.table, + dialect_hints[delete_stmt.table], + True + ) + + else: + dialect_hints = None + + text += table_text + + if delete_stmt._returning: + self.returning = delete_stmt._returning + if self.returning_precedes_values: + text += " " + self.returning_clause( + delete_stmt, delete_stmt._returning) + + if delete_stmt._whereclause is not None: + t = delete_stmt._whereclause._compiler_dispatch(self, **kw) + if t: + text += " WHERE " + t + + if self.returning and not self.returning_precedes_values: + text += " " + self.returning_clause( + delete_stmt, delete_stmt._returning) + + self.stack.pop(-1) + + return text + + def visit_savepoint(self, savepoint_stmt): + return "SAVEPOINT %s" % self.preparer.format_savepoint(savepoint_stmt) + + def visit_rollback_to_savepoint(self, savepoint_stmt): + return "ROLLBACK TO SAVEPOINT %s" % \ + self.preparer.format_savepoint(savepoint_stmt) + + def visit_release_savepoint(self, savepoint_stmt): + return "RELEASE SAVEPOINT %s" % \ + self.preparer.format_savepoint(savepoint_stmt) + + +class DDLCompiler(Compiled): + + @util.memoized_property + def sql_compiler(self): + return self.dialect.statement_compiler(self.dialect, None) + + @util.memoized_property + def type_compiler(self): + return self.dialect.type_compiler + + @property + def preparer(self): + return self.dialect.identifier_preparer + + def construct_params(self, params=None): + return None + + def visit_ddl(self, ddl, **kwargs): + # table events can substitute table and schema name + context = ddl.context + if isinstance(ddl.target, schema.Table): + context = context.copy() + + preparer = self.dialect.identifier_preparer + path = preparer.format_table_seq(ddl.target) + if len(path) == 1: + table, sch = path[0], '' + else: + table, sch = path[-1], path[0] + + context.setdefault('table', table) + context.setdefault('schema', sch) + context.setdefault('fullname', preparer.format_table(ddl.target)) + + return self.sql_compiler.post_process_text(ddl.statement % context) + + def visit_create_schema(self, create): + schema = self.preparer.format_schema(create.element) + return "CREATE SCHEMA " + schema + + def visit_drop_schema(self, drop): + schema = self.preparer.format_schema(drop.element) + text = "DROP SCHEMA " + schema + if drop.cascade: + text += " CASCADE" + return text + + def visit_create_table(self, create): + table = create.element + preparer = self.dialect.identifier_preparer + + text = "\n" + " ".join(['CREATE'] + + table._prefixes + + ['TABLE', + preparer.format_table(table), + "("]) + separator = "\n" + + # if only one primary key, specify it along with the column + first_pk = False + for create_column in create.columns: + column = create_column.element + try: + processed = self.process(create_column, + first_pk=column.primary_key + and not first_pk) + if processed is not None: + text += separator + separator = ", \n" + text += "\t" + processed + if column.primary_key: + first_pk = True + except exc.CompileError as ce: + util.raise_from_cause( + exc.CompileError( + util.u("(in table '%s', column '%s'): %s") % + (table.description, column.name, ce.args[0]) + )) + + const = self.create_table_constraints( + table, _include_foreign_key_constraints= + create.include_foreign_key_constraints) + if const: + text += separator + "\t" + const + + text += "\n)%s\n\n" % self.post_create_table(table) + return text + + def visit_create_column(self, create, first_pk=False): + column = create.element + + if column.system: + return None + + text = self.get_column_specification( + column, + first_pk=first_pk + ) + const = " ".join(self.process(constraint) + for constraint in column.constraints) + if const: + text += " " + const + + return text + + def create_table_constraints( + self, table, + _include_foreign_key_constraints=None): + + # On some DB order is significant: visit PK first, then the + # other constraints (engine.ReflectionTest.testbasic failed on FB2) + constraints = [] + if table.primary_key: + constraints.append(table.primary_key) + + all_fkcs = table.foreign_key_constraints + if _include_foreign_key_constraints is not None: + omit_fkcs = all_fkcs.difference(_include_foreign_key_constraints) + else: + omit_fkcs = set() + + constraints.extend([c for c in table._sorted_constraints + if c is not table.primary_key and + c not in omit_fkcs]) + + return ", \n\t".join( + p for p in + (self.process(constraint) + for constraint in constraints + if ( + constraint._create_rule is None or + constraint._create_rule(self)) + and ( + not self.dialect.supports_alter or + not getattr(constraint, 'use_alter', False) + )) if p is not None + ) + + def visit_drop_table(self, drop): + return "\nDROP TABLE " + self.preparer.format_table(drop.element) + + def visit_drop_view(self, drop): + return "\nDROP VIEW " + self.preparer.format_table(drop.element) + + def _verify_index_table(self, index): + if index.table is None: + raise exc.CompileError("Index '%s' is not associated " + "with any table." % index.name) + + def visit_create_index(self, create, include_schema=False, + include_table_schema=True): + index = create.element + self._verify_index_table(index) + preparer = self.preparer + text = "CREATE " + if index.unique: + text += "UNIQUE " + text += "INDEX %s ON %s (%s)" \ + % ( + self._prepared_index_name(index, + include_schema=include_schema), + preparer.format_table(index.table, + use_schema=include_table_schema), + ', '.join( + self.sql_compiler.process( + expr, include_table=False, literal_binds=True) for + expr in index.expressions) + ) + return text + + def visit_drop_index(self, drop): + index = drop.element + return "\nDROP INDEX " + self._prepared_index_name( + index, include_schema=True) + + def _prepared_index_name(self, index, include_schema=False): + if include_schema and index.table is not None and index.table.schema: + schema = index.table.schema + schema_name = self.preparer.quote_schema(schema) + else: + schema_name = None + + ident = index.name + if isinstance(ident, elements._truncated_label): + max_ = self.dialect.max_index_name_length or \ + self.dialect.max_identifier_length + if len(ident) > max_: + ident = ident[0:max_ - 8] + \ + "_" + util.md5_hex(ident)[-4:] + else: + self.dialect.validate_identifier(ident) + + index_name = self.preparer.quote(ident) + + if schema_name: + index_name = schema_name + "." + index_name + return index_name + + def visit_add_constraint(self, create): + return "ALTER TABLE %s ADD %s" % ( + self.preparer.format_table(create.element.table), + self.process(create.element) + ) + + def visit_create_sequence(self, create): + text = "CREATE SEQUENCE %s" % \ + self.preparer.format_sequence(create.element) + if create.element.increment is not None: + text += " INCREMENT BY %d" % create.element.increment + if create.element.start is not None: + text += " START WITH %d" % create.element.start + if create.element.minvalue is not None: + text += " MINVALUE %d" % create.element.minvalue + if create.element.maxvalue is not None: + text += " MAXVALUE %d" % create.element.maxvalue + if create.element.nominvalue is not None: + text += " NO MINVALUE" + if create.element.nomaxvalue is not None: + text += " NO MAXVALUE" + if create.element.cycle is not None: + text += " CYCLE" + return text + + def visit_drop_sequence(self, drop): + return "DROP SEQUENCE %s" % \ + self.preparer.format_sequence(drop.element) + + def visit_drop_constraint(self, drop): + constraint = drop.element + if constraint.name is not None: + formatted_name = self.preparer.format_constraint(constraint) + else: + formatted_name = None + + if formatted_name is None: + raise exc.CompileError( + "Can't emit DROP CONSTRAINT for constraint %r; " + "it has no name" % drop.element) + return "ALTER TABLE %s DROP CONSTRAINT %s%s" % ( + self.preparer.format_table(drop.element.table), + formatted_name, + drop.cascade and " CASCADE" or "" + ) + + def get_column_specification(self, column, **kwargs): + colspec = self.preparer.format_column(column) + " " + \ + self.dialect.type_compiler.process( + column.type, type_expression=column) + default = self.get_column_default_string(column) + if default is not None: + colspec += " DEFAULT " + default + + if not column.nullable: + colspec += " NOT NULL" + return colspec + + def post_create_table(self, table): + return '' + + def get_column_default_string(self, column): + if isinstance(column.server_default, schema.DefaultClause): + if isinstance(column.server_default.arg, util.string_types): + return "'%s'" % column.server_default.arg + else: + return self.sql_compiler.process( + column.server_default.arg, literal_binds=True) + else: + return None + + def visit_check_constraint(self, constraint): + text = "" + if constraint.name is not None: + formatted_name = self.preparer.format_constraint(constraint) + if formatted_name is not None: + text += "CONSTRAINT %s " % formatted_name + text += "CHECK (%s)" % self.sql_compiler.process(constraint.sqltext, + include_table=False, + literal_binds=True) + text += self.define_constraint_deferrability(constraint) + return text + + def visit_column_check_constraint(self, constraint): + text = "" + if constraint.name is not None: + formatted_name = self.preparer.format_constraint(constraint) + if formatted_name is not None: + text += "CONSTRAINT %s " % formatted_name + text += "CHECK (%s)" % constraint.sqltext + text += self.define_constraint_deferrability(constraint) + return text + + def visit_primary_key_constraint(self, constraint): + if len(constraint) == 0: + return '' + text = "" + if constraint.name is not None: + formatted_name = self.preparer.format_constraint(constraint) + if formatted_name is not None: + text += "CONSTRAINT %s " % formatted_name + text += "PRIMARY KEY " + text += "(%s)" % ', '.join(self.preparer.quote(c.name) + for c in constraint) + text += self.define_constraint_deferrability(constraint) + return text + + def visit_foreign_key_constraint(self, constraint): + preparer = self.dialect.identifier_preparer + text = "" + if constraint.name is not None: + formatted_name = self.preparer.format_constraint(constraint) + if formatted_name is not None: + text += "CONSTRAINT %s " % formatted_name + remote_table = list(constraint.elements)[0].column.table + text += "FOREIGN KEY(%s) REFERENCES %s (%s)" % ( + ', '.join(preparer.quote(f.parent.name) + for f in constraint.elements), + self.define_constraint_remote_table( + constraint, remote_table, preparer), + ', '.join(preparer.quote(f.column.name) + for f in constraint.elements) + ) + text += self.define_constraint_match(constraint) + text += self.define_constraint_cascades(constraint) + text += self.define_constraint_deferrability(constraint) + return text + + def define_constraint_remote_table(self, constraint, table, preparer): + """Format the remote table clause of a CREATE CONSTRAINT clause.""" + + return preparer.format_table(table) + + def visit_unique_constraint(self, constraint): + if len(constraint) == 0: + return '' + text = "" + if constraint.name is not None: + formatted_name = self.preparer.format_constraint(constraint) + text += "CONSTRAINT %s " % formatted_name + text += "UNIQUE (%s)" % ( + ', '.join(self.preparer.quote(c.name) + for c in constraint)) + text += self.define_constraint_deferrability(constraint) + return text + + def define_constraint_cascades(self, constraint): + text = "" + if constraint.ondelete is not None: + text += " ON DELETE %s" % constraint.ondelete + if constraint.onupdate is not None: + text += " ON UPDATE %s" % constraint.onupdate + return text + + def define_constraint_deferrability(self, constraint): + text = "" + if constraint.deferrable is not None: + if constraint.deferrable: + text += " DEFERRABLE" + else: + text += " NOT DEFERRABLE" + if constraint.initially is not None: + text += " INITIALLY %s" % constraint.initially + return text + + def define_constraint_match(self, constraint): + text = "" + if constraint.match is not None: + text += " MATCH %s" % constraint.match + return text + + +class GenericTypeCompiler(TypeCompiler): + + def visit_FLOAT(self, type_, **kw): + return "FLOAT" + + def visit_REAL(self, type_, **kw): + return "REAL" + + def visit_NUMERIC(self, type_, **kw): + if type_.precision is None: + return "NUMERIC" + elif type_.scale is None: + return "NUMERIC(%(precision)s)" % \ + {'precision': type_.precision} + else: + return "NUMERIC(%(precision)s, %(scale)s)" % \ + {'precision': type_.precision, + 'scale': type_.scale} + + def visit_DECIMAL(self, type_, **kw): + if type_.precision is None: + return "DECIMAL" + elif type_.scale is None: + return "DECIMAL(%(precision)s)" % \ + {'precision': type_.precision} + else: + return "DECIMAL(%(precision)s, %(scale)s)" % \ + {'precision': type_.precision, + 'scale': type_.scale} + + def visit_INTEGER(self, type_, **kw): + return "INTEGER" + + def visit_SMALLINT(self, type_, **kw): + return "SMALLINT" + + def visit_BIGINT(self, type_, **kw): + return "BIGINT" + + def visit_TIMESTAMP(self, type_, **kw): + return 'TIMESTAMP' + + def visit_DATETIME(self, type_, **kw): + return "DATETIME" + + def visit_DATE(self, type_, **kw): + return "DATE" + + def visit_TIME(self, type_, **kw): + return "TIME" + + def visit_CLOB(self, type_, **kw): + return "CLOB" + + def visit_NCLOB(self, type_, **kw): + return "NCLOB" + + def _render_string_type(self, type_, name): + + text = name + if type_.length: + text += "(%d)" % type_.length + if type_.collation: + text += ' COLLATE "%s"' % type_.collation + return text + + def visit_CHAR(self, type_, **kw): + return self._render_string_type(type_, "CHAR") + + def visit_NCHAR(self, type_, **kw): + return self._render_string_type(type_, "NCHAR") + + def visit_VARCHAR(self, type_, **kw): + return self._render_string_type(type_, "VARCHAR") + + def visit_NVARCHAR(self, type_, **kw): + return self._render_string_type(type_, "NVARCHAR") + + def visit_TEXT(self, type_, **kw): + return self._render_string_type(type_, "TEXT") + + def visit_BLOB(self, type_, **kw): + return "BLOB" + + def visit_BINARY(self, type_, **kw): + return "BINARY" + (type_.length and "(%d)" % type_.length or "") + + def visit_VARBINARY(self, type_, **kw): + return "VARBINARY" + (type_.length and "(%d)" % type_.length or "") + + def visit_BOOLEAN(self, type_, **kw): + return "BOOLEAN" + + def visit_large_binary(self, type_, **kw): + return self.visit_BLOB(type_, **kw) + + def visit_boolean(self, type_, **kw): + return self.visit_BOOLEAN(type_, **kw) + + def visit_time(self, type_, **kw): + return self.visit_TIME(type_, **kw) + + def visit_datetime(self, type_, **kw): + return self.visit_DATETIME(type_, **kw) + + def visit_date(self, type_, **kw): + return self.visit_DATE(type_, **kw) + + def visit_big_integer(self, type_, **kw): + return self.visit_BIGINT(type_, **kw) + + def visit_small_integer(self, type_, **kw): + return self.visit_SMALLINT(type_, **kw) + + def visit_integer(self, type_, **kw): + return self.visit_INTEGER(type_, **kw) + + def visit_real(self, type_, **kw): + return self.visit_REAL(type_, **kw) + + def visit_float(self, type_, **kw): + return self.visit_FLOAT(type_, **kw) + + def visit_numeric(self, type_, **kw): + return self.visit_NUMERIC(type_, **kw) + + def visit_string(self, type_, **kw): + return self.visit_VARCHAR(type_, **kw) + + def visit_unicode(self, type_, **kw): + return self.visit_VARCHAR(type_, **kw) + + def visit_text(self, type_, **kw): + return self.visit_TEXT(type_, **kw) + + def visit_unicode_text(self, type_, **kw): + return self.visit_TEXT(type_, **kw) + + def visit_enum(self, type_, **kw): + return self.visit_VARCHAR(type_, **kw) + + def visit_null(self, type_, **kw): + raise exc.CompileError("Can't generate DDL for %r; " + "did you forget to specify a " + "type on this Column?" % type_) + + def visit_type_decorator(self, type_, **kw): + return self.process(type_.type_engine(self.dialect), **kw) + + def visit_user_defined(self, type_, **kw): + return type_.get_col_spec(**kw) + + +class IdentifierPreparer(object): + + """Handle quoting and case-folding of identifiers based on options.""" + + reserved_words = RESERVED_WORDS + + legal_characters = LEGAL_CHARACTERS + + illegal_initial_characters = ILLEGAL_INITIAL_CHARACTERS + + def __init__(self, dialect, initial_quote='"', + final_quote=None, escape_quote='"', omit_schema=False): + """Construct a new ``IdentifierPreparer`` object. + + initial_quote + Character that begins a delimited identifier. + + final_quote + Character that ends a delimited identifier. Defaults to + `initial_quote`. + + omit_schema + Prevent prepending schema name. Useful for databases that do + not support schemae. + """ + + self.dialect = dialect + self.initial_quote = initial_quote + self.final_quote = final_quote or self.initial_quote + self.escape_quote = escape_quote + self.escape_to_quote = self.escape_quote * 2 + self.omit_schema = omit_schema + self._strings = {} + + def _escape_identifier(self, value): + """Escape an identifier. + + Subclasses should override this to provide database-dependent + escaping behavior. + """ + + return value.replace(self.escape_quote, self.escape_to_quote) + + def _unescape_identifier(self, value): + """Canonicalize an escaped identifier. + + Subclasses should override this to provide database-dependent + unescaping behavior that reverses _escape_identifier. + """ + + return value.replace(self.escape_to_quote, self.escape_quote) + + def quote_identifier(self, value): + """Quote an identifier. + + Subclasses should override this to provide database-dependent + quoting behavior. + """ + + return self.initial_quote + \ + self._escape_identifier(value) + \ + self.final_quote + + def _requires_quotes(self, value): + """Return True if the given identifier requires quoting.""" + lc_value = value.lower() + return (lc_value in self.reserved_words + or value[0] in self.illegal_initial_characters + or not self.legal_characters.match(util.text_type(value)) + or (lc_value != value)) + + def quote_schema(self, schema, force=None): + """Conditionally quote a schema. + + Subclasses can override this to provide database-dependent + quoting behavior for schema names. + + the 'force' flag should be considered deprecated. + + """ + return self.quote(schema, force) + + def quote(self, ident, force=None): + """Conditionally quote an identifier. + + the 'force' flag should be considered deprecated. + """ + + force = getattr(ident, "quote", None) + + if force is None: + if ident in self._strings: + return self._strings[ident] + else: + if self._requires_quotes(ident): + self._strings[ident] = self.quote_identifier(ident) + else: + self._strings[ident] = ident + return self._strings[ident] + elif force: + return self.quote_identifier(ident) + else: + return ident + + def format_sequence(self, sequence, use_schema=True): + name = self.quote(sequence.name) + if (not self.omit_schema and use_schema and + sequence.schema is not None): + name = self.quote_schema(sequence.schema) + "." + name + return name + + def format_label(self, label, name=None): + return self.quote(name or label.name) + + def format_alias(self, alias, name=None): + return self.quote(name or alias.name) + + def format_savepoint(self, savepoint, name=None): + return self.quote(name or savepoint.ident) + + @util.dependencies("sqlalchemy.sql.naming") + def format_constraint(self, naming, constraint): + if isinstance(constraint.name, elements._defer_name): + name = naming._constraint_name_for_table( + constraint, constraint.table) + if name: + return self.quote(name) + elif isinstance(constraint.name, elements._defer_none_name): + return None + return self.quote(constraint.name) + + def format_table(self, table, use_schema=True, name=None): + """Prepare a quoted table and schema name.""" + + if name is None: + name = table.name + result = self.quote(name) + if not self.omit_schema and use_schema \ + and getattr(table, "schema", None): + result = self.quote_schema(table.schema) + "." + result + return result + + def format_schema(self, name, quote=None): + """Prepare a quoted schema name.""" + + return self.quote(name, quote) + + def format_column(self, column, use_table=False, + name=None, table_name=None): + """Prepare a quoted column name.""" + + if name is None: + name = column.name + if not getattr(column, 'is_literal', False): + if use_table: + return self.format_table( + column.table, use_schema=False, + name=table_name) + "." + self.quote(name) + else: + return self.quote(name) + else: + # literal textual elements get stuck into ColumnClause a lot, + # which shouldn't get quoted + + if use_table: + return self.format_table( + column.table, use_schema=False, + name=table_name) + '.' + name + else: + return name + + def format_table_seq(self, table, use_schema=True): + """Format table name and schema as a tuple.""" + + # Dialects with more levels in their fully qualified references + # ('database', 'owner', etc.) could override this and return + # a longer sequence. + + if not self.omit_schema and use_schema and \ + getattr(table, 'schema', None): + return (self.quote_schema(table.schema), + self.format_table(table, use_schema=False)) + else: + return (self.format_table(table, use_schema=False), ) + + @util.memoized_property + def _r_identifiers(self): + initial, final, escaped_final = \ + [re.escape(s) for s in + (self.initial_quote, self.final_quote, + self._escape_identifier(self.final_quote))] + r = re.compile( + r'(?:' + r'(?:%(initial)s((?:%(escaped)s|[^%(final)s])+)%(final)s' + r'|([^\.]+))(?=\.|$))+' % + {'initial': initial, + 'final': final, + 'escaped': escaped_final}) + return r + + def unformat_identifiers(self, identifiers): + """Unpack 'schema.table.column'-like strings into components.""" + + r = self._r_identifiers + return [self._unescape_identifier(i) + for i in [a or b for a, b in r.findall(identifiers)]] diff --git a/lib/python3.4/site-packages/sqlalchemy/sql/crud.py b/lib/python3.4/site-packages/sqlalchemy/sql/crud.py new file mode 100644 index 0000000..273cc7e --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/sql/crud.py @@ -0,0 +1,571 @@ +# sql/crud.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Functions used by compiler.py to determine the parameters rendered +within INSERT and UPDATE statements. + +""" +from .. import util +from .. import exc +from . import elements +import operator + +REQUIRED = util.symbol('REQUIRED', """ +Placeholder for the value within a :class:`.BindParameter` +which is required to be present when the statement is passed +to :meth:`.Connection.execute`. + +This symbol is typically used when a :func:`.expression.insert` +or :func:`.expression.update` statement is compiled without parameter +values present. + +""") + + +def _get_crud_params(compiler, stmt, **kw): + """create a set of tuples representing column/string pairs for use + in an INSERT or UPDATE statement. + + Also generates the Compiled object's postfetch, prefetch, and + returning column collections, used for default handling and ultimately + populating the ResultProxy's prefetch_cols() and postfetch_cols() + collections. + + """ + + compiler.postfetch = [] + compiler.prefetch = [] + compiler.returning = [] + + # no parameters in the statement, no parameters in the + # compiled params - return binds for all columns + if compiler.column_keys is None and stmt.parameters is None: + return [ + (c, _create_bind_param( + compiler, c, None, required=True)) + for c in stmt.table.columns + ] + + if stmt._has_multi_parameters: + stmt_parameters = stmt.parameters[0] + else: + stmt_parameters = stmt.parameters + + # getters - these are normally just column.key, + # but in the case of mysql multi-table update, the rules for + # .key must conditionally take tablename into account + _column_as_key, _getattr_col_key, _col_bind_name = \ + _key_getters_for_crud_column(compiler) + + # if we have statement parameters - set defaults in the + # compiled params + if compiler.column_keys is None: + parameters = {} + else: + parameters = dict((_column_as_key(key), REQUIRED) + for key in compiler.column_keys + if not stmt_parameters or + key not in stmt_parameters) + + # create a list of column assignment clauses as tuples + values = [] + + if stmt_parameters is not None: + _get_stmt_parameters_params( + compiler, + parameters, stmt_parameters, _column_as_key, values, kw) + + check_columns = {} + + # special logic that only occurs for multi-table UPDATE + # statements + if compiler.isupdate and stmt._extra_froms and stmt_parameters: + _get_multitable_params( + compiler, stmt, stmt_parameters, check_columns, + _col_bind_name, _getattr_col_key, values, kw) + + if compiler.isinsert and stmt.select_names: + _scan_insert_from_select_cols( + compiler, stmt, parameters, + _getattr_col_key, _column_as_key, + _col_bind_name, check_columns, values, kw) + else: + _scan_cols( + compiler, stmt, parameters, + _getattr_col_key, _column_as_key, + _col_bind_name, check_columns, values, kw) + + if parameters and stmt_parameters: + check = set(parameters).intersection( + _column_as_key(k) for k in stmt.parameters + ).difference(check_columns) + if check: + raise exc.CompileError( + "Unconsumed column names: %s" % + (", ".join("%s" % c for c in check)) + ) + + if stmt._has_multi_parameters: + values = _extend_values_for_multiparams(compiler, stmt, values, kw) + + return values + + +def _create_bind_param( + compiler, col, value, process=True, + required=False, name=None, **kw): + if name is None: + name = col.key + bindparam = elements.BindParameter( + name, value, type_=col.type, required=required) + bindparam._is_crud = True + if process: + bindparam = bindparam._compiler_dispatch(compiler, **kw) + return bindparam + + +def _key_getters_for_crud_column(compiler): + if compiler.isupdate and compiler.statement._extra_froms: + # when extra tables are present, refer to the columns + # in those extra tables as table-qualified, including in + # dictionaries and when rendering bind param names. + # the "main" table of the statement remains unqualified, + # allowing the most compatibility with a non-multi-table + # statement. + _et = set(compiler.statement._extra_froms) + + def _column_as_key(key): + str_key = elements._column_as_key(key) + if hasattr(key, 'table') and key.table in _et: + return (key.table.name, str_key) + else: + return str_key + + def _getattr_col_key(col): + if col.table in _et: + return (col.table.name, col.key) + else: + return col.key + + def _col_bind_name(col): + if col.table in _et: + return "%s_%s" % (col.table.name, col.key) + else: + return col.key + + else: + _column_as_key = elements._column_as_key + _getattr_col_key = _col_bind_name = operator.attrgetter("key") + + return _column_as_key, _getattr_col_key, _col_bind_name + + +def _scan_insert_from_select_cols( + compiler, stmt, parameters, _getattr_col_key, + _column_as_key, _col_bind_name, check_columns, values, kw): + + need_pks, implicit_returning, \ + implicit_return_defaults, postfetch_lastrowid = \ + _get_returning_modifiers(compiler, stmt) + + cols = [stmt.table.c[_column_as_key(name)] + for name in stmt.select_names] + + compiler._insert_from_select = stmt.select + + add_select_cols = [] + if stmt.include_insert_from_select_defaults: + col_set = set(cols) + for col in stmt.table.columns: + if col not in col_set and col.default: + cols.append(col) + + for c in cols: + col_key = _getattr_col_key(c) + if col_key in parameters and col_key not in check_columns: + parameters.pop(col_key) + values.append((c, None)) + else: + _append_param_insert_select_hasdefault( + compiler, stmt, c, add_select_cols, kw) + + if add_select_cols: + values.extend(add_select_cols) + compiler._insert_from_select = compiler._insert_from_select._generate() + compiler._insert_from_select._raw_columns = \ + tuple(compiler._insert_from_select._raw_columns) + tuple( + expr for col, expr in add_select_cols) + + +def _scan_cols( + compiler, stmt, parameters, _getattr_col_key, + _column_as_key, _col_bind_name, check_columns, values, kw): + + need_pks, implicit_returning, \ + implicit_return_defaults, postfetch_lastrowid = \ + _get_returning_modifiers(compiler, stmt) + + if stmt._parameter_ordering: + parameter_ordering = [ + _column_as_key(key) for key in stmt._parameter_ordering + ] + ordered_keys = set(parameter_ordering) + cols = [ + stmt.table.c[key] for key in parameter_ordering + ] + [ + c for c in stmt.table.c if c.key not in ordered_keys + ] + else: + cols = stmt.table.columns + + for c in cols: + col_key = _getattr_col_key(c) + if col_key in parameters and col_key not in check_columns: + + _append_param_parameter( + compiler, stmt, c, col_key, parameters, _col_bind_name, + implicit_returning, implicit_return_defaults, values, kw) + + elif compiler.isinsert: + if c.primary_key and \ + need_pks and \ + ( + implicit_returning or + not postfetch_lastrowid or + c is not stmt.table._autoincrement_column + ): + + if implicit_returning: + _append_param_insert_pk_returning( + compiler, stmt, c, values, kw) + else: + _append_param_insert_pk(compiler, stmt, c, values, kw) + + elif c.default is not None: + + _append_param_insert_hasdefault( + compiler, stmt, c, implicit_return_defaults, + values, kw) + + elif c.server_default is not None: + if implicit_return_defaults and \ + c in implicit_return_defaults: + compiler.returning.append(c) + elif not c.primary_key: + compiler.postfetch.append(c) + elif implicit_return_defaults and \ + c in implicit_return_defaults: + compiler.returning.append(c) + + elif compiler.isupdate: + _append_param_update( + compiler, stmt, c, implicit_return_defaults, values, kw) + + +def _append_param_parameter( + compiler, stmt, c, col_key, parameters, _col_bind_name, + implicit_returning, implicit_return_defaults, values, kw): + value = parameters.pop(col_key) + if elements._is_literal(value): + value = _create_bind_param( + compiler, c, value, required=value is REQUIRED, + name=_col_bind_name(c) + if not stmt._has_multi_parameters + else "%s_0" % _col_bind_name(c), + **kw + ) + else: + if isinstance(value, elements.BindParameter) and \ + value.type._isnull: + value = value._clone() + value.type = c.type + + if c.primary_key and implicit_returning: + compiler.returning.append(c) + value = compiler.process(value.self_group(), **kw) + elif implicit_return_defaults and \ + c in implicit_return_defaults: + compiler.returning.append(c) + value = compiler.process(value.self_group(), **kw) + else: + compiler.postfetch.append(c) + value = compiler.process(value.self_group(), **kw) + values.append((c, value)) + + +def _append_param_insert_pk_returning(compiler, stmt, c, values, kw): + if c.default is not None: + if c.default.is_sequence: + if compiler.dialect.supports_sequences and \ + (not c.default.optional or + not compiler.dialect.sequences_optional): + proc = compiler.process(c.default, **kw) + values.append((c, proc)) + compiler.returning.append(c) + elif c.default.is_clause_element: + values.append( + (c, compiler.process( + c.default.arg.self_group(), **kw)) + ) + compiler.returning.append(c) + else: + values.append( + (c, _create_prefetch_bind_param(compiler, c)) + ) + + else: + compiler.returning.append(c) + + +def _create_prefetch_bind_param(compiler, c, process=True, name=None): + param = _create_bind_param(compiler, c, None, process=process, name=name) + compiler.prefetch.append(c) + return param + + +class _multiparam_column(elements.ColumnElement): + def __init__(self, original, index): + self.key = "%s_%d" % (original.key, index + 1) + self.original = original + self.default = original.default + self.type = original.type + + def __eq__(self, other): + return isinstance(other, _multiparam_column) and \ + other.key == self.key and \ + other.original == self.original + + +def _process_multiparam_default_bind(compiler, c, index, kw): + + if not c.default: + raise exc.CompileError( + "INSERT value for column %s is explicitly rendered as a bound" + "parameter in the VALUES clause; " + "a Python-side value or SQL expression is required" % c) + elif c.default.is_clause_element: + return compiler.process(c.default.arg.self_group(), **kw) + else: + col = _multiparam_column(c, index) + return _create_prefetch_bind_param(compiler, col) + + +def _append_param_insert_pk(compiler, stmt, c, values, kw): + if ( + (c.default is not None and + (not c.default.is_sequence or + compiler.dialect.supports_sequences)) or + c is stmt.table._autoincrement_column and + (compiler.dialect.supports_sequences or + compiler.dialect. + preexecute_autoincrement_sequences) + ): + values.append( + (c, _create_prefetch_bind_param(compiler, c)) + ) + + +def _append_param_insert_hasdefault( + compiler, stmt, c, implicit_return_defaults, values, kw): + + if c.default.is_sequence: + if compiler.dialect.supports_sequences and \ + (not c.default.optional or + not compiler.dialect.sequences_optional): + proc = compiler.process(c.default, **kw) + values.append((c, proc)) + if implicit_return_defaults and \ + c in implicit_return_defaults: + compiler.returning.append(c) + elif not c.primary_key: + compiler.postfetch.append(c) + elif c.default.is_clause_element: + proc = compiler.process(c.default.arg.self_group(), **kw) + values.append((c, proc)) + + if implicit_return_defaults and \ + c in implicit_return_defaults: + compiler.returning.append(c) + elif not c.primary_key: + # don't add primary key column to postfetch + compiler.postfetch.append(c) + else: + values.append( + (c, _create_prefetch_bind_param(compiler, c)) + ) + + +def _append_param_insert_select_hasdefault( + compiler, stmt, c, values, kw): + + if c.default.is_sequence: + if compiler.dialect.supports_sequences and \ + (not c.default.optional or + not compiler.dialect.sequences_optional): + proc = c.default + values.append((c, proc)) + elif c.default.is_clause_element: + proc = c.default.arg.self_group() + values.append((c, proc)) + else: + values.append( + (c, _create_prefetch_bind_param(compiler, c, process=False)) + ) + + +def _append_param_update( + compiler, stmt, c, implicit_return_defaults, values, kw): + + if c.onupdate is not None and not c.onupdate.is_sequence: + if c.onupdate.is_clause_element: + values.append( + (c, compiler.process( + c.onupdate.arg.self_group(), **kw)) + ) + if implicit_return_defaults and \ + c in implicit_return_defaults: + compiler.returning.append(c) + else: + compiler.postfetch.append(c) + else: + values.append( + (c, _create_prefetch_bind_param(compiler, c)) + ) + elif c.server_onupdate is not None: + if implicit_return_defaults and \ + c in implicit_return_defaults: + compiler.returning.append(c) + else: + compiler.postfetch.append(c) + elif implicit_return_defaults and \ + stmt._return_defaults is not True and \ + c in implicit_return_defaults: + compiler.returning.append(c) + + +def _get_multitable_params( + compiler, stmt, stmt_parameters, check_columns, + _col_bind_name, _getattr_col_key, values, kw): + + normalized_params = dict( + (elements._clause_element_as_expr(c), param) + for c, param in stmt_parameters.items() + ) + affected_tables = set() + for t in stmt._extra_froms: + for c in t.c: + if c in normalized_params: + affected_tables.add(t) + check_columns[_getattr_col_key(c)] = c + value = normalized_params[c] + if elements._is_literal(value): + value = _create_bind_param( + compiler, c, value, required=value is REQUIRED, + name=_col_bind_name(c)) + else: + compiler.postfetch.append(c) + value = compiler.process(value.self_group(), **kw) + values.append((c, value)) + # determine tables which are actually to be updated - process onupdate + # and server_onupdate for these + for t in affected_tables: + for c in t.c: + if c in normalized_params: + continue + elif (c.onupdate is not None and not + c.onupdate.is_sequence): + if c.onupdate.is_clause_element: + values.append( + (c, compiler.process( + c.onupdate.arg.self_group(), + **kw) + ) + ) + compiler.postfetch.append(c) + else: + values.append( + (c, _create_prefetch_bind_param( + compiler, c, name=_col_bind_name(c))) + ) + elif c.server_onupdate is not None: + compiler.postfetch.append(c) + + +def _extend_values_for_multiparams(compiler, stmt, values, kw): + values_0 = values + values = [values] + + values.extend( + [ + ( + c, + (_create_bind_param( + compiler, c, row[c.key], + name="%s_%d" % (c.key, i + 1) + ) if elements._is_literal(row[c.key]) + else compiler.process( + row[c.key].self_group(), **kw)) + if c.key in row else + _process_multiparam_default_bind(compiler, c, i, kw) + ) + for (c, param) in values_0 + ] + for i, row in enumerate(stmt.parameters[1:]) + ) + return values + + +def _get_stmt_parameters_params( + compiler, parameters, stmt_parameters, _column_as_key, values, kw): + for k, v in stmt_parameters.items(): + colkey = _column_as_key(k) + if colkey is not None: + parameters.setdefault(colkey, v) + else: + # a non-Column expression on the left side; + # add it to values() in an "as-is" state, + # coercing right side to bound param + if elements._is_literal(v): + v = compiler.process( + elements.BindParameter(None, v, type_=k.type), + **kw) + else: + v = compiler.process(v.self_group(), **kw) + + values.append((k, v)) + + +def _get_returning_modifiers(compiler, stmt): + need_pks = compiler.isinsert and \ + not compiler.inline and \ + not stmt._returning and \ + not stmt._has_multi_parameters + + implicit_returning = need_pks and \ + compiler.dialect.implicit_returning and \ + stmt.table.implicit_returning + + if compiler.isinsert: + implicit_return_defaults = (implicit_returning and + stmt._return_defaults) + elif compiler.isupdate: + implicit_return_defaults = (compiler.dialect.implicit_returning and + stmt.table.implicit_returning and + stmt._return_defaults) + else: + implicit_return_defaults = False + + if implicit_return_defaults: + if stmt._return_defaults is True: + implicit_return_defaults = set(stmt.table.c) + else: + implicit_return_defaults = set(stmt._return_defaults) + + postfetch_lastrowid = need_pks and compiler.dialect.postfetch_lastrowid + + return need_pks, implicit_returning, \ + implicit_return_defaults, postfetch_lastrowid diff --git a/lib/python3.4/site-packages/sqlalchemy/sql/ddl.py b/lib/python3.4/site-packages/sqlalchemy/sql/ddl.py new file mode 100644 index 0000000..1cb9eeb --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/sql/ddl.py @@ -0,0 +1,1095 @@ +# sql/ddl.py +# Copyright (C) 2009-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +""" +Provides the hierarchy of DDL-defining schema items as well as routines +to invoke them for a create/drop call. + +""" + +from .. import util +from .elements import ClauseElement +from .base import Executable, _generative, SchemaVisitor, _bind_or_error +from ..util import topological +from .. import event +from .. import exc + + +class _DDLCompiles(ClauseElement): + def _compiler(self, dialect, **kw): + """Return a compiler appropriate for this ClauseElement, given a + Dialect.""" + + return dialect.ddl_compiler(dialect, self, **kw) + + +class DDLElement(Executable, _DDLCompiles): + """Base class for DDL expression constructs. + + This class is the base for the general purpose :class:`.DDL` class, + as well as the various create/drop clause constructs such as + :class:`.CreateTable`, :class:`.DropTable`, :class:`.AddConstraint`, + etc. + + :class:`.DDLElement` integrates closely with SQLAlchemy events, + introduced in :ref:`event_toplevel`. An instance of one is + itself an event receiving callable:: + + event.listen( + users, + 'after_create', + AddConstraint(constraint).execute_if(dialect='postgresql') + ) + + .. seealso:: + + :class:`.DDL` + + :class:`.DDLEvents` + + :ref:`event_toplevel` + + :ref:`schema_ddl_sequences` + + """ + + _execution_options = Executable.\ + _execution_options.union({'autocommit': True}) + + target = None + on = None + dialect = None + callable_ = None + + def _execute_on_connection(self, connection, multiparams, params): + return connection._execute_ddl(self, multiparams, params) + + def execute(self, bind=None, target=None): + """Execute this DDL immediately. + + Executes the DDL statement in isolation using the supplied + :class:`.Connectable` or + :class:`.Connectable` assigned to the ``.bind`` + property, if not supplied. If the DDL has a conditional ``on`` + criteria, it will be invoked with None as the event. + + :param bind: + Optional, an ``Engine`` or ``Connection``. If not supplied, a valid + :class:`.Connectable` must be present in the + ``.bind`` property. + + :param target: + Optional, defaults to None. The target SchemaItem for the + execute call. Will be passed to the ``on`` callable if any, + and may also provide string expansion data for the + statement. See ``execute_at`` for more information. + + """ + + if bind is None: + bind = _bind_or_error(self) + + if self._should_execute(target, bind): + return bind.execute(self.against(target)) + else: + bind.engine.logger.info( + "DDL execution skipped, criteria not met.") + + @util.deprecated("0.7", "See :class:`.DDLEvents`, as well as " + ":meth:`.DDLElement.execute_if`.") + def execute_at(self, event_name, target): + """Link execution of this DDL to the DDL lifecycle of a SchemaItem. + + Links this ``DDLElement`` to a ``Table`` or ``MetaData`` instance, + executing it when that schema item is created or dropped. The DDL + statement will be executed using the same Connection and transactional + context as the Table create/drop itself. The ``.bind`` property of + this statement is ignored. + + :param event: + One of the events defined in the schema item's ``.ddl_events``; + e.g. 'before-create', 'after-create', 'before-drop' or 'after-drop' + + :param target: + The Table or MetaData instance for which this DDLElement will + be associated with. + + A DDLElement instance can be linked to any number of schema items. + + ``execute_at`` builds on the ``append_ddl_listener`` interface of + :class:`.MetaData` and :class:`.Table` objects. + + Caveat: Creating or dropping a Table in isolation will also trigger + any DDL set to ``execute_at`` that Table's MetaData. This may change + in a future release. + + """ + + def call_event(target, connection, **kw): + if self._should_execute_deprecated(event_name, + target, connection, **kw): + return connection.execute(self.against(target)) + + event.listen(target, "" + event_name.replace('-', '_'), call_event) + + @_generative + def against(self, target): + """Return a copy of this DDL against a specific schema item.""" + + self.target = target + + @_generative + def execute_if(self, dialect=None, callable_=None, state=None): + """Return a callable that will execute this + DDLElement conditionally. + + Used to provide a wrapper for event listening:: + + event.listen( + metadata, + 'before_create', + DDL("my_ddl").execute_if(dialect='postgresql') + ) + + :param dialect: May be a string, tuple or a callable + predicate. If a string, it will be compared to the name of the + executing database dialect:: + + DDL('something').execute_if(dialect='postgresql') + + If a tuple, specifies multiple dialect names:: + + DDL('something').execute_if(dialect=('postgresql', 'mysql')) + + :param callable_: A callable, which will be invoked with + four positional arguments as well as optional keyword + arguments: + + :ddl: + This DDL element. + + :target: + The :class:`.Table` or :class:`.MetaData` object which is the + target of this event. May be None if the DDL is executed + explicitly. + + :bind: + The :class:`.Connection` being used for DDL execution + + :tables: + Optional keyword argument - a list of Table objects which are to + be created/ dropped within a MetaData.create_all() or drop_all() + method call. + + :state: + Optional keyword argument - will be the ``state`` argument + passed to this function. + + :checkfirst: + Keyword argument, will be True if the 'checkfirst' flag was + set during the call to ``create()``, ``create_all()``, + ``drop()``, ``drop_all()``. + + If the callable returns a true value, the DDL statement will be + executed. + + :param state: any value which will be passed to the callable\_ + as the ``state`` keyword argument. + + .. seealso:: + + :class:`.DDLEvents` + + :ref:`event_toplevel` + + """ + self.dialect = dialect + self.callable_ = callable_ + self.state = state + + def _should_execute(self, target, bind, **kw): + if self.on is not None and \ + not self._should_execute_deprecated(None, target, bind, **kw): + return False + + if isinstance(self.dialect, util.string_types): + if self.dialect != bind.engine.name: + return False + elif isinstance(self.dialect, (tuple, list, set)): + if bind.engine.name not in self.dialect: + return False + if (self.callable_ is not None and + not self.callable_(self, target, bind, + state=self.state, **kw)): + return False + + return True + + def _should_execute_deprecated(self, event, target, bind, **kw): + if self.on is None: + return True + elif isinstance(self.on, util.string_types): + return self.on == bind.engine.name + elif isinstance(self.on, (tuple, list, set)): + return bind.engine.name in self.on + else: + return self.on(self, event, target, bind, **kw) + + def __call__(self, target, bind, **kw): + """Execute the DDL as a ddl_listener.""" + + if self._should_execute(target, bind, **kw): + return bind.execute(self.against(target)) + + def _check_ddl_on(self, on): + if (on is not None and + (not isinstance(on, util.string_types + (tuple, list, set)) and + not util.callable(on))): + raise exc.ArgumentError( + "Expected the name of a database dialect, a tuple " + "of names, or a callable for " + "'on' criteria, got type '%s'." % type(on).__name__) + + def bind(self): + if self._bind: + return self._bind + + def _set_bind(self, bind): + self._bind = bind + bind = property(bind, _set_bind) + + def _generate(self): + s = self.__class__.__new__(self.__class__) + s.__dict__ = self.__dict__.copy() + return s + + +class DDL(DDLElement): + """A literal DDL statement. + + Specifies literal SQL DDL to be executed by the database. DDL objects + function as DDL event listeners, and can be subscribed to those events + listed in :class:`.DDLEvents`, using either :class:`.Table` or + :class:`.MetaData` objects as targets. Basic templating support allows + a single DDL instance to handle repetitive tasks for multiple tables. + + Examples:: + + from sqlalchemy import event, DDL + + tbl = Table('users', metadata, Column('uid', Integer)) + event.listen(tbl, 'before_create', DDL('DROP TRIGGER users_trigger')) + + spow = DDL('ALTER TABLE %(table)s SET secretpowers TRUE') + event.listen(tbl, 'after_create', spow.execute_if(dialect='somedb')) + + drop_spow = DDL('ALTER TABLE users SET secretpowers FALSE') + connection.execute(drop_spow) + + When operating on Table events, the following ``statement`` + string substitions are available:: + + %(table)s - the Table name, with any required quoting applied + %(schema)s - the schema name, with any required quoting applied + %(fullname)s - the Table name including schema, quoted if needed + + The DDL's "context", if any, will be combined with the standard + substitutions noted above. Keys present in the context will override + the standard substitutions. + + """ + + __visit_name__ = "ddl" + + def __init__(self, statement, on=None, context=None, bind=None): + """Create a DDL statement. + + :param statement: + A string or unicode string to be executed. Statements will be + processed with Python's string formatting operator. See the + ``context`` argument and the ``execute_at`` method. + + A literal '%' in a statement must be escaped as '%%'. + + SQL bind parameters are not available in DDL statements. + + :param on: + .. deprecated:: 0.7 + See :meth:`.DDLElement.execute_if`. + + Optional filtering criteria. May be a string, tuple or a callable + predicate. If a string, it will be compared to the name of the + executing database dialect:: + + DDL('something', on='postgresql') + + If a tuple, specifies multiple dialect names:: + + DDL('something', on=('postgresql', 'mysql')) + + If a callable, it will be invoked with four positional arguments + as well as optional keyword arguments: + + :ddl: + This DDL element. + + :event: + The name of the event that has triggered this DDL, such as + 'after-create' Will be None if the DDL is executed explicitly. + + :target: + The ``Table`` or ``MetaData`` object which is the target of + this event. May be None if the DDL is executed explicitly. + + :connection: + The ``Connection`` being used for DDL execution + + :tables: + Optional keyword argument - a list of Table objects which are to + be created/ dropped within a MetaData.create_all() or drop_all() + method call. + + + If the callable returns a true value, the DDL statement will be + executed. + + :param context: + Optional dictionary, defaults to None. These values will be + available for use in string substitutions on the DDL statement. + + :param bind: + Optional. A :class:`.Connectable`, used by + default when ``execute()`` is invoked without a bind argument. + + + .. seealso:: + + :class:`.DDLEvents` + + :ref:`event_toplevel` + + """ + + if not isinstance(statement, util.string_types): + raise exc.ArgumentError( + "Expected a string or unicode SQL statement, got '%r'" % + statement) + + self.statement = statement + self.context = context or {} + + self._check_ddl_on(on) + self.on = on + self._bind = bind + + def __repr__(self): + return '<%s@%s; %s>' % ( + type(self).__name__, id(self), + ', '.join([repr(self.statement)] + + ['%s=%r' % (key, getattr(self, key)) + for key in ('on', 'context') + if getattr(self, key)])) + + +class _CreateDropBase(DDLElement): + """Base class for DDL constructs that represent CREATE and DROP or + equivalents. + + The common theme of _CreateDropBase is a single + ``element`` attribute which refers to the element + to be created or dropped. + + """ + + def __init__(self, element, on=None, bind=None): + self.element = element + self._check_ddl_on(on) + self.on = on + self.bind = bind + + def _create_rule_disable(self, compiler): + """Allow disable of _create_rule using a callable. + + Pass to _create_rule using + util.portable_instancemethod(self._create_rule_disable) + to retain serializability. + + """ + return False + + +class CreateSchema(_CreateDropBase): + """Represent a CREATE SCHEMA statement. + + .. versionadded:: 0.7.4 + + The argument here is the string name of the schema. + + """ + + __visit_name__ = "create_schema" + + def __init__(self, name, quote=None, **kw): + """Create a new :class:`.CreateSchema` construct.""" + + self.quote = quote + super(CreateSchema, self).__init__(name, **kw) + + +class DropSchema(_CreateDropBase): + """Represent a DROP SCHEMA statement. + + The argument here is the string name of the schema. + + .. versionadded:: 0.7.4 + + """ + + __visit_name__ = "drop_schema" + + def __init__(self, name, quote=None, cascade=False, **kw): + """Create a new :class:`.DropSchema` construct.""" + + self.quote = quote + self.cascade = cascade + super(DropSchema, self).__init__(name, **kw) + + +class CreateTable(_CreateDropBase): + """Represent a CREATE TABLE statement.""" + + __visit_name__ = "create_table" + + def __init__( + self, element, on=None, bind=None, + include_foreign_key_constraints=None): + """Create a :class:`.CreateTable` construct. + + :param element: a :class:`.Table` that's the subject + of the CREATE + :param on: See the description for 'on' in :class:`.DDL`. + :param bind: See the description for 'bind' in :class:`.DDL`. + :param include_foreign_key_constraints: optional sequence of + :class:`.ForeignKeyConstraint` objects that will be included + inline within the CREATE construct; if omitted, all foreign key + constraints that do not specify use_alter=True are included. + + .. versionadded:: 1.0.0 + + """ + super(CreateTable, self).__init__(element, on=on, bind=bind) + self.columns = [CreateColumn(column) + for column in element.columns + ] + self.include_foreign_key_constraints = include_foreign_key_constraints + + +class _DropView(_CreateDropBase): + """Semi-public 'DROP VIEW' construct. + + Used by the test suite for dialect-agnostic drops of views. + This object will eventually be part of a public "view" API. + + """ + __visit_name__ = "drop_view" + + +class CreateColumn(_DDLCompiles): + """Represent a :class:`.Column` as rendered in a CREATE TABLE statement, + via the :class:`.CreateTable` construct. + + This is provided to support custom column DDL within the generation + of CREATE TABLE statements, by using the + compiler extension documented in :ref:`sqlalchemy.ext.compiler_toplevel` + to extend :class:`.CreateColumn`. + + Typical integration is to examine the incoming :class:`.Column` + object, and to redirect compilation if a particular flag or condition + is found:: + + from sqlalchemy import schema + from sqlalchemy.ext.compiler import compiles + + @compiles(schema.CreateColumn) + def compile(element, compiler, **kw): + column = element.element + + if "special" not in column.info: + return compiler.visit_create_column(element, **kw) + + text = "%s SPECIAL DIRECTIVE %s" % ( + column.name, + compiler.type_compiler.process(column.type) + ) + default = compiler.get_column_default_string(column) + if default is not None: + text += " DEFAULT " + default + + if not column.nullable: + text += " NOT NULL" + + if column.constraints: + text += " ".join( + compiler.process(const) + for const in column.constraints) + return text + + The above construct can be applied to a :class:`.Table` as follows:: + + from sqlalchemy import Table, Metadata, Column, Integer, String + from sqlalchemy import schema + + metadata = MetaData() + + table = Table('mytable', MetaData(), + Column('x', Integer, info={"special":True}, primary_key=True), + Column('y', String(50)), + Column('z', String(20), info={"special":True}) + ) + + metadata.create_all(conn) + + Above, the directives we've added to the :attr:`.Column.info` collection + will be detected by our custom compilation scheme:: + + CREATE TABLE mytable ( + x SPECIAL DIRECTIVE INTEGER NOT NULL, + y VARCHAR(50), + z SPECIAL DIRECTIVE VARCHAR(20), + PRIMARY KEY (x) + ) + + The :class:`.CreateColumn` construct can also be used to skip certain + columns when producing a ``CREATE TABLE``. This is accomplished by + creating a compilation rule that conditionally returns ``None``. + This is essentially how to produce the same effect as using the + ``system=True`` argument on :class:`.Column`, which marks a column + as an implicitly-present "system" column. + + For example, suppose we wish to produce a :class:`.Table` which skips + rendering of the Postgresql ``xmin`` column against the Postgresql + backend, but on other backends does render it, in anticipation of a + triggered rule. A conditional compilation rule could skip this name only + on Postgresql:: + + from sqlalchemy.schema import CreateColumn + + @compiles(CreateColumn, "postgresql") + def skip_xmin(element, compiler, **kw): + if element.element.name == 'xmin': + return None + else: + return compiler.visit_create_column(element, **kw) + + + my_table = Table('mytable', metadata, + Column('id', Integer, primary_key=True), + Column('xmin', Integer) + ) + + Above, a :class:`.CreateTable` construct will generate a ``CREATE TABLE`` + which only includes the ``id`` column in the string; the ``xmin`` column + will be omitted, but only against the Postgresql backend. + + .. versionadded:: 0.8.3 The :class:`.CreateColumn` construct supports + skipping of columns by returning ``None`` from a custom compilation + rule. + + .. versionadded:: 0.8 The :class:`.CreateColumn` construct was added + to support custom column creation styles. + + """ + __visit_name__ = 'create_column' + + def __init__(self, element): + self.element = element + + +class DropTable(_CreateDropBase): + """Represent a DROP TABLE statement.""" + + __visit_name__ = "drop_table" + + +class CreateSequence(_CreateDropBase): + """Represent a CREATE SEQUENCE statement.""" + + __visit_name__ = "create_sequence" + + +class DropSequence(_CreateDropBase): + """Represent a DROP SEQUENCE statement.""" + + __visit_name__ = "drop_sequence" + + +class CreateIndex(_CreateDropBase): + """Represent a CREATE INDEX statement.""" + + __visit_name__ = "create_index" + + +class DropIndex(_CreateDropBase): + """Represent a DROP INDEX statement.""" + + __visit_name__ = "drop_index" + + +class AddConstraint(_CreateDropBase): + """Represent an ALTER TABLE ADD CONSTRAINT statement.""" + + __visit_name__ = "add_constraint" + + def __init__(self, element, *args, **kw): + super(AddConstraint, self).__init__(element, *args, **kw) + element._create_rule = util.portable_instancemethod( + self._create_rule_disable) + + +class DropConstraint(_CreateDropBase): + """Represent an ALTER TABLE DROP CONSTRAINT statement.""" + + __visit_name__ = "drop_constraint" + + def __init__(self, element, cascade=False, **kw): + self.cascade = cascade + super(DropConstraint, self).__init__(element, **kw) + element._create_rule = util.portable_instancemethod( + self._create_rule_disable) + + +class DDLBase(SchemaVisitor): + def __init__(self, connection): + self.connection = connection + + +class SchemaGenerator(DDLBase): + + def __init__(self, dialect, connection, checkfirst=False, + tables=None, **kwargs): + super(SchemaGenerator, self).__init__(connection, **kwargs) + self.checkfirst = checkfirst + self.tables = tables + self.preparer = dialect.identifier_preparer + self.dialect = dialect + self.memo = {} + + def _can_create_table(self, table): + self.dialect.validate_identifier(table.name) + if table.schema: + self.dialect.validate_identifier(table.schema) + return not self.checkfirst or \ + not self.dialect.has_table(self.connection, + table.name, schema=table.schema) + + def _can_create_sequence(self, sequence): + return self.dialect.supports_sequences and \ + ( + (not self.dialect.sequences_optional or + not sequence.optional) and + ( + not self.checkfirst or + not self.dialect.has_sequence( + self.connection, + sequence.name, + schema=sequence.schema) + ) + ) + + def visit_metadata(self, metadata): + if self.tables is not None: + tables = self.tables + else: + tables = list(metadata.tables.values()) + + collection = sort_tables_and_constraints( + [t for t in tables if self._can_create_table(t)]) + + seq_coll = [s for s in metadata._sequences.values() + if s.column is None and self._can_create_sequence(s)] + + event_collection = [ + t for (t, fks) in collection if t is not None + ] + metadata.dispatch.before_create(metadata, self.connection, + tables=event_collection, + checkfirst=self.checkfirst, + _ddl_runner=self) + + for seq in seq_coll: + self.traverse_single(seq, create_ok=True) + + for table, fkcs in collection: + if table is not None: + self.traverse_single( + table, create_ok=True, + include_foreign_key_constraints=fkcs, + _is_metadata_operation=True) + else: + for fkc in fkcs: + self.traverse_single(fkc) + + metadata.dispatch.after_create(metadata, self.connection, + tables=event_collection, + checkfirst=self.checkfirst, + _ddl_runner=self) + + def visit_table( + self, table, create_ok=False, + include_foreign_key_constraints=None, + _is_metadata_operation=False): + if not create_ok and not self._can_create_table(table): + return + + table.dispatch.before_create( + table, self.connection, + checkfirst=self.checkfirst, + _ddl_runner=self, + _is_metadata_operation=_is_metadata_operation) + + for column in table.columns: + if column.default is not None: + self.traverse_single(column.default) + + if not self.dialect.supports_alter: + # e.g., don't omit any foreign key constraints + include_foreign_key_constraints = None + + self.connection.execute( + CreateTable( + table, + include_foreign_key_constraints=include_foreign_key_constraints + )) + + if hasattr(table, 'indexes'): + for index in table.indexes: + self.traverse_single(index) + + table.dispatch.after_create( + table, self.connection, + checkfirst=self.checkfirst, + _ddl_runner=self, + _is_metadata_operation=_is_metadata_operation) + + def visit_foreign_key_constraint(self, constraint): + if not self.dialect.supports_alter: + return + self.connection.execute(AddConstraint(constraint)) + + def visit_sequence(self, sequence, create_ok=False): + if not create_ok and not self._can_create_sequence(sequence): + return + self.connection.execute(CreateSequence(sequence)) + + def visit_index(self, index): + self.connection.execute(CreateIndex(index)) + + +class SchemaDropper(DDLBase): + + def __init__(self, dialect, connection, checkfirst=False, + tables=None, **kwargs): + super(SchemaDropper, self).__init__(connection, **kwargs) + self.checkfirst = checkfirst + self.tables = tables + self.preparer = dialect.identifier_preparer + self.dialect = dialect + self.memo = {} + + def visit_metadata(self, metadata): + if self.tables is not None: + tables = self.tables + else: + tables = list(metadata.tables.values()) + + try: + unsorted_tables = [t for t in tables if self._can_drop_table(t)] + collection = list(reversed( + sort_tables_and_constraints( + unsorted_tables, + filter_fn=lambda constraint: False + if not self.dialect.supports_alter + or constraint.name is None + else None + ) + )) + except exc.CircularDependencyError as err2: + if not self.dialect.supports_alter: + util.warn( + "Can't sort tables for DROP; an " + "unresolvable foreign key " + "dependency exists between tables: %s, and backend does " + "not support ALTER. To restore at least a partial sort, " + "apply use_alter=True to ForeignKey and " + "ForeignKeyConstraint " + "objects involved in the cycle to mark these as known " + "cycles that will be ignored." + % ( + ", ".join(sorted([t.fullname for t in err2.cycles])) + ) + ) + collection = [(t, ()) for t in unsorted_tables] + else: + util.raise_from_cause( + exc.CircularDependencyError( + err2.args[0], + err2.cycles, err2.edges, + msg="Can't sort tables for DROP; an " + "unresolvable foreign key " + "dependency exists between tables: %s. Please ensure " + "that the ForeignKey and ForeignKeyConstraint objects " + "involved in the cycle have " + "names so that they can be dropped using " + "DROP CONSTRAINT." + % ( + ", ".join(sorted([t.fullname for t in err2.cycles])) + ) + + ) + ) + + seq_coll = [ + s + for s in metadata._sequences.values() + if s.column is None and self._can_drop_sequence(s) + ] + + event_collection = [ + t for (t, fks) in collection if t is not None + ] + + metadata.dispatch.before_drop( + metadata, self.connection, tables=event_collection, + checkfirst=self.checkfirst, _ddl_runner=self) + + for table, fkcs in collection: + if table is not None: + self.traverse_single( + table, drop_ok=True, _is_metadata_operation=True) + else: + for fkc in fkcs: + self.traverse_single(fkc) + + for seq in seq_coll: + self.traverse_single(seq, drop_ok=True) + + metadata.dispatch.after_drop( + metadata, self.connection, tables=event_collection, + checkfirst=self.checkfirst, _ddl_runner=self) + + def _can_drop_table(self, table): + self.dialect.validate_identifier(table.name) + if table.schema: + self.dialect.validate_identifier(table.schema) + return not self.checkfirst or self.dialect.has_table( + self.connection, table.name, schema=table.schema) + + def _can_drop_sequence(self, sequence): + return self.dialect.supports_sequences and \ + ((not self.dialect.sequences_optional or + not sequence.optional) and + (not self.checkfirst or + self.dialect.has_sequence( + self.connection, + sequence.name, + schema=sequence.schema)) + ) + + def visit_index(self, index): + self.connection.execute(DropIndex(index)) + + def visit_table(self, table, drop_ok=False, _is_metadata_operation=False): + if not drop_ok and not self._can_drop_table(table): + return + + table.dispatch.before_drop( + table, self.connection, + checkfirst=self.checkfirst, + _ddl_runner=self, + _is_metadata_operation=_is_metadata_operation) + + for column in table.columns: + if column.default is not None: + self.traverse_single(column.default) + + self.connection.execute(DropTable(table)) + + table.dispatch.after_drop( + table, self.connection, + checkfirst=self.checkfirst, + _ddl_runner=self, + _is_metadata_operation=_is_metadata_operation) + + def visit_foreign_key_constraint(self, constraint): + if not self.dialect.supports_alter: + return + self.connection.execute(DropConstraint(constraint)) + + def visit_sequence(self, sequence, drop_ok=False): + if not drop_ok and not self._can_drop_sequence(sequence): + return + self.connection.execute(DropSequence(sequence)) + + +def sort_tables(tables, skip_fn=None, extra_dependencies=None): + """sort a collection of :class:`.Table` objects based on dependency. + + This is a dependency-ordered sort which will emit :class:`.Table` + objects such that they will follow their dependent :class:`.Table` objects. + Tables are dependent on another based on the presence of + :class:`.ForeignKeyConstraint` objects as well as explicit dependencies + added by :meth:`.Table.add_is_dependent_on`. + + .. warning:: + + The :func:`.sort_tables` function cannot by itself accommodate + automatic resolution of dependency cycles between tables, which + are usually caused by mutually dependent foreign key constraints. + To resolve these cycles, either the + :paramref:`.ForeignKeyConstraint.use_alter` parameter may be appled + to those constraints, or use the + :func:`.sql.sort_tables_and_constraints` function which will break + out foreign key constraints involved in cycles separately. + + :param tables: a sequence of :class:`.Table` objects. + + :param skip_fn: optional callable which will be passed a + :class:`.ForeignKey` object; if it returns True, this + constraint will not be considered as a dependency. Note this is + **different** from the same parameter in + :func:`.sort_tables_and_constraints`, which is + instead passed the owning :class:`.ForeignKeyConstraint` object. + + :param extra_dependencies: a sequence of 2-tuples of tables which will + also be considered as dependent on each other. + + .. seealso:: + + :func:`.sort_tables_and_constraints` + + :meth:`.MetaData.sorted_tables` - uses this function to sort + + + """ + + if skip_fn is not None: + def _skip_fn(fkc): + for fk in fkc.elements: + if skip_fn(fk): + return True + else: + return None + else: + _skip_fn = None + + return [ + t for (t, fkcs) in + sort_tables_and_constraints( + tables, filter_fn=_skip_fn, extra_dependencies=extra_dependencies) + if t is not None + ] + + +def sort_tables_and_constraints( + tables, filter_fn=None, extra_dependencies=None): + """sort a collection of :class:`.Table` / :class:`.ForeignKeyConstraint` + objects. + + This is a dependency-ordered sort which will emit tuples of + ``(Table, [ForeignKeyConstraint, ...])`` such that each + :class:`.Table` follows its dependent :class:`.Table` objects. + Remaining :class:`.ForeignKeyConstraint` objects that are separate due to + dependency rules not satisifed by the sort are emitted afterwards + as ``(None, [ForeignKeyConstraint ...])``. + + Tables are dependent on another based on the presence of + :class:`.ForeignKeyConstraint` objects, explicit dependencies + added by :meth:`.Table.add_is_dependent_on`, as well as dependencies + stated here using the :paramref:`~.sort_tables_and_constraints.skip_fn` + and/or :paramref:`~.sort_tables_and_constraints.extra_dependencies` + parameters. + + :param tables: a sequence of :class:`.Table` objects. + + :param filter_fn: optional callable which will be passed a + :class:`.ForeignKeyConstraint` object, and returns a value based on + whether this constraint should definitely be included or excluded as + an inline constraint, or neither. If it returns False, the constraint + will definitely be included as a dependency that cannot be subject + to ALTER; if True, it will **only** be included as an ALTER result at + the end. Returning None means the constraint is included in the + table-based result unless it is detected as part of a dependency cycle. + + :param extra_dependencies: a sequence of 2-tuples of tables which will + also be considered as dependent on each other. + + .. versionadded:: 1.0.0 + + .. seealso:: + + :func:`.sort_tables` + + + """ + + fixed_dependencies = set() + mutable_dependencies = set() + + if extra_dependencies is not None: + fixed_dependencies.update(extra_dependencies) + + remaining_fkcs = set() + for table in tables: + for fkc in table.foreign_key_constraints: + if fkc.use_alter is True: + remaining_fkcs.add(fkc) + continue + + if filter_fn: + filtered = filter_fn(fkc) + + if filtered is True: + remaining_fkcs.add(fkc) + continue + + dependent_on = fkc.referred_table + if dependent_on is not table: + mutable_dependencies.add((dependent_on, table)) + + fixed_dependencies.update( + (parent, table) for parent in table._extra_dependencies + ) + + try: + candidate_sort = list( + topological.sort( + fixed_dependencies.union(mutable_dependencies), tables, + deterministic_order=True + ) + ) + except exc.CircularDependencyError as err: + for edge in err.edges: + if edge in mutable_dependencies: + table = edge[1] + can_remove = [ + fkc for fkc in table.foreign_key_constraints + if filter_fn is None or filter_fn(fkc) is not False] + remaining_fkcs.update(can_remove) + for fkc in can_remove: + dependent_on = fkc.referred_table + if dependent_on is not table: + mutable_dependencies.discard((dependent_on, table)) + candidate_sort = list( + topological.sort( + fixed_dependencies.union(mutable_dependencies), tables, + deterministic_order=True + ) + ) + + return [ + (table, table.foreign_key_constraints.difference(remaining_fkcs)) + for table in candidate_sort + ] + [(None, list(remaining_fkcs))] diff --git a/lib/python3.4/site-packages/sqlalchemy/sql/default_comparator.py b/lib/python3.4/site-packages/sqlalchemy/sql/default_comparator.py new file mode 100644 index 0000000..d180dbc --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/sql/default_comparator.py @@ -0,0 +1,288 @@ +# sql/default_comparator.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Default implementation of SQL comparison operations. +""" + +from .. import exc, util +from . import type_api +from . import operators +from .elements import BindParameter, True_, False_, BinaryExpression, \ + Null, _const_expr, _clause_element_as_expr, \ + ClauseList, ColumnElement, TextClause, UnaryExpression, \ + collate, _is_literal, _literal_as_text, ClauseElement, and_, or_ +from .selectable import SelectBase, Alias, Selectable, ScalarSelect + + +def _boolean_compare(expr, op, obj, negate=None, reverse=False, + _python_is_types=(util.NoneType, bool), + result_type = None, + **kwargs): + + if result_type is None: + result_type = type_api.BOOLEANTYPE + + if isinstance(obj, _python_is_types + (Null, True_, False_)): + + # allow x ==/!= True/False to be treated as a literal. + # this comes out to "== / != true/false" or "1/0" if those + # constants aren't supported and works on all platforms + if op in (operators.eq, operators.ne) and \ + isinstance(obj, (bool, True_, False_)): + return BinaryExpression(expr, + _literal_as_text(obj), + op, + type_=result_type, + negate=negate, modifiers=kwargs) + else: + # all other None/True/False uses IS, IS NOT + if op in (operators.eq, operators.is_): + return BinaryExpression(expr, _const_expr(obj), + operators.is_, + negate=operators.isnot) + elif op in (operators.ne, operators.isnot): + return BinaryExpression(expr, _const_expr(obj), + operators.isnot, + negate=operators.is_) + else: + raise exc.ArgumentError( + "Only '=', '!=', 'is_()', 'isnot()' operators can " + "be used with None/True/False") + else: + obj = _check_literal(expr, op, obj) + + if reverse: + return BinaryExpression(obj, + expr, + op, + type_=result_type, + negate=negate, modifiers=kwargs) + else: + return BinaryExpression(expr, + obj, + op, + type_=result_type, + negate=negate, modifiers=kwargs) + + +def _binary_operate(expr, op, obj, reverse=False, result_type=None, + **kw): + obj = _check_literal(expr, op, obj) + + if reverse: + left, right = obj, expr + else: + left, right = expr, obj + + if result_type is None: + op, result_type = left.comparator._adapt_expression( + op, right.comparator) + + return BinaryExpression( + left, right, op, type_=result_type, modifiers=kw) + + +def _conjunction_operate(expr, op, other, **kw): + if op is operators.and_: + return and_(expr, other) + elif op is operators.or_: + return or_(expr, other) + else: + raise NotImplementedError() + + +def _scalar(expr, op, fn, **kw): + return fn(expr) + + +def _in_impl(expr, op, seq_or_selectable, negate_op, **kw): + seq_or_selectable = _clause_element_as_expr(seq_or_selectable) + + if isinstance(seq_or_selectable, ScalarSelect): + return _boolean_compare(expr, op, seq_or_selectable, + negate=negate_op) + elif isinstance(seq_or_selectable, SelectBase): + + # TODO: if we ever want to support (x, y, z) IN (select x, + # y, z from table), we would need a multi-column version of + # as_scalar() to produce a multi- column selectable that + # does not export itself as a FROM clause + + return _boolean_compare( + expr, op, seq_or_selectable.as_scalar(), + negate=negate_op, **kw) + elif isinstance(seq_or_selectable, (Selectable, TextClause)): + return _boolean_compare(expr, op, seq_or_selectable, + negate=negate_op, **kw) + elif isinstance(seq_or_selectable, ClauseElement): + raise exc.InvalidRequestError( + 'in_() accepts' + ' either a list of expressions ' + 'or a selectable: %r' % seq_or_selectable) + + # Handle non selectable arguments as sequences + args = [] + for o in seq_or_selectable: + if not _is_literal(o): + if not isinstance(o, operators.ColumnOperators): + raise exc.InvalidRequestError( + 'in_() accepts' + ' either a list of expressions ' + 'or a selectable: %r' % o) + elif o is None: + o = Null() + else: + o = expr._bind_param(op, o) + args.append(o) + if len(args) == 0: + + # Special case handling for empty IN's, behave like + # comparison against zero row selectable. We use != to + # build the contradiction as it handles NULL values + # appropriately, i.e. "not (x IN ())" should not return NULL + # values for x. + + util.warn('The IN-predicate on "%s" was invoked with an ' + 'empty sequence. This results in a ' + 'contradiction, which nonetheless can be ' + 'expensive to evaluate. Consider alternative ' + 'strategies for improved performance.' % expr) + if op is operators.in_op: + return expr != expr + else: + return expr == expr + + return _boolean_compare(expr, op, + ClauseList(*args).self_group(against=op), + negate=negate_op) + + +def _unsupported_impl(expr, op, *arg, **kw): + raise NotImplementedError("Operator '%s' is not supported on " + "this expression" % op.__name__) + + +def _inv_impl(expr, op, **kw): + """See :meth:`.ColumnOperators.__inv__`.""" + if hasattr(expr, 'negation_clause'): + return expr.negation_clause + else: + return expr._negate() + + +def _neg_impl(expr, op, **kw): + """See :meth:`.ColumnOperators.__neg__`.""" + return UnaryExpression(expr, operator=operators.neg) + + +def _match_impl(expr, op, other, **kw): + """See :meth:`.ColumnOperators.match`.""" + + return _boolean_compare( + expr, operators.match_op, + _check_literal( + expr, operators.match_op, other), + result_type=type_api.MATCHTYPE, + negate=operators.notmatch_op + if op is operators.match_op else operators.match_op, + **kw + ) + + +def _distinct_impl(expr, op, **kw): + """See :meth:`.ColumnOperators.distinct`.""" + return UnaryExpression(expr, operator=operators.distinct_op, + type_=expr.type) + + +def _between_impl(expr, op, cleft, cright, **kw): + """See :meth:`.ColumnOperators.between`.""" + return BinaryExpression( + expr, + ClauseList( + _check_literal(expr, operators.and_, cleft), + _check_literal(expr, operators.and_, cright), + operator=operators.and_, + group=False, group_contents=False), + op, + negate=operators.notbetween_op + if op is operators.between_op + else operators.between_op, + modifiers=kw) + + +def _collate_impl(expr, op, other, **kw): + return collate(expr, other) + +# a mapping of operators with the method they use, along with +# their negated operator for comparison operators +operator_lookup = { + "and_": (_conjunction_operate,), + "or_": (_conjunction_operate,), + "inv": (_inv_impl,), + "add": (_binary_operate,), + "mul": (_binary_operate,), + "sub": (_binary_operate,), + "div": (_binary_operate,), + "mod": (_binary_operate,), + "truediv": (_binary_operate,), + "custom_op": (_binary_operate,), + "concat_op": (_binary_operate,), + "lt": (_boolean_compare, operators.ge), + "le": (_boolean_compare, operators.gt), + "ne": (_boolean_compare, operators.eq), + "gt": (_boolean_compare, operators.le), + "ge": (_boolean_compare, operators.lt), + "eq": (_boolean_compare, operators.ne), + "like_op": (_boolean_compare, operators.notlike_op), + "ilike_op": (_boolean_compare, operators.notilike_op), + "notlike_op": (_boolean_compare, operators.like_op), + "notilike_op": (_boolean_compare, operators.ilike_op), + "contains_op": (_boolean_compare, operators.notcontains_op), + "startswith_op": (_boolean_compare, operators.notstartswith_op), + "endswith_op": (_boolean_compare, operators.notendswith_op), + "desc_op": (_scalar, UnaryExpression._create_desc), + "asc_op": (_scalar, UnaryExpression._create_asc), + "nullsfirst_op": (_scalar, UnaryExpression._create_nullsfirst), + "nullslast_op": (_scalar, UnaryExpression._create_nullslast), + "in_op": (_in_impl, operators.notin_op), + "notin_op": (_in_impl, operators.in_op), + "is_": (_boolean_compare, operators.is_), + "isnot": (_boolean_compare, operators.isnot), + "collate": (_collate_impl,), + "match_op": (_match_impl,), + "notmatch_op": (_match_impl,), + "distinct_op": (_distinct_impl,), + "between_op": (_between_impl, ), + "notbetween_op": (_between_impl, ), + "neg": (_neg_impl,), + "getitem": (_unsupported_impl,), + "lshift": (_unsupported_impl,), + "rshift": (_unsupported_impl,), + "contains": (_unsupported_impl,), +} + + +def _check_literal(expr, operator, other): + if isinstance(other, (ColumnElement, TextClause)): + if isinstance(other, BindParameter) and \ + other.type._isnull: + other = other._clone() + other.type = expr.type + return other + elif hasattr(other, '__clause_element__'): + other = other.__clause_element__() + elif isinstance(other, type_api.TypeEngine.Comparator): + other = other.expr + + if isinstance(other, (SelectBase, Alias)): + return other.as_scalar() + elif not isinstance(other, (ColumnElement, TextClause)): + return expr._bind_param(operator, other) + else: + return other + diff --git a/lib/python3.4/site-packages/sqlalchemy/sql/dml.py b/lib/python3.4/site-packages/sqlalchemy/sql/dml.py new file mode 100644 index 0000000..7b506f9 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/sql/dml.py @@ -0,0 +1,846 @@ +# sql/dml.py +# Copyright (C) 2009-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +""" +Provide :class:`.Insert`, :class:`.Update` and :class:`.Delete`. + +""" + +from .base import Executable, _generative, _from_objects, DialectKWArgs +from .elements import ClauseElement, _literal_as_text, Null, and_, _clone, \ + _column_as_key +from .selectable import _interpret_as_from, _interpret_as_select, HasPrefixes +from .. import util +from .. import exc + + +class UpdateBase(DialectKWArgs, HasPrefixes, Executable, ClauseElement): + """Form the base for ``INSERT``, ``UPDATE``, and ``DELETE`` statements. + + """ + + __visit_name__ = 'update_base' + + _execution_options = \ + Executable._execution_options.union({'autocommit': True}) + _hints = util.immutabledict() + _parameter_ordering = None + _prefixes = () + + def _process_colparams(self, parameters): + def process_single(p): + if isinstance(p, (list, tuple)): + return dict( + (c.key, pval) + for c, pval in zip(self.table.c, p) + ) + else: + return p + + if self._preserve_parameter_order and parameters is not None: + if not isinstance(parameters, list) or \ + (parameters and not isinstance(parameters[0], tuple)): + raise ValueError( + "When preserve_parameter_order is True, " + "values() only accepts a list of 2-tuples") + self._parameter_ordering = [key for key, value in parameters] + + return dict(parameters), False + + if (isinstance(parameters, (list, tuple)) and parameters and + isinstance(parameters[0], (list, tuple, dict))): + + if not self._supports_multi_parameters: + raise exc.InvalidRequestError( + "This construct does not support " + "multiple parameter sets.") + + return [process_single(p) for p in parameters], True + else: + return process_single(parameters), False + + def params(self, *arg, **kw): + """Set the parameters for the statement. + + This method raises ``NotImplementedError`` on the base class, + and is overridden by :class:`.ValuesBase` to provide the + SET/VALUES clause of UPDATE and INSERT. + + """ + raise NotImplementedError( + "params() is not supported for INSERT/UPDATE/DELETE statements." + " To set the values for an INSERT or UPDATE statement, use" + " stmt.values(**parameters).") + + def bind(self): + """Return a 'bind' linked to this :class:`.UpdateBase` + or a :class:`.Table` associated with it. + + """ + return self._bind or self.table.bind + + def _set_bind(self, bind): + self._bind = bind + bind = property(bind, _set_bind) + + @_generative + def returning(self, *cols): + """Add a :term:`RETURNING` or equivalent clause to this statement. + + e.g.:: + + stmt = table.update().\\ + where(table.c.data == 'value').\\ + values(status='X').\\ + returning(table.c.server_flag, + table.c.updated_timestamp) + + for server_flag, updated_timestamp in connection.execute(stmt): + print(server_flag, updated_timestamp) + + The given collection of column expressions should be derived from + the table that is + the target of the INSERT, UPDATE, or DELETE. While :class:`.Column` + objects are typical, the elements can also be expressions:: + + stmt = table.insert().returning( + (table.c.first_name + " " + table.c.last_name). + label('fullname')) + + Upon compilation, a RETURNING clause, or database equivalent, + will be rendered within the statement. For INSERT and UPDATE, + the values are the newly inserted/updated values. For DELETE, + the values are those of the rows which were deleted. + + Upon execution, the values of the columns to be returned are made + available via the result set and can be iterated using + :meth:`.ResultProxy.fetchone` and similar. For DBAPIs which do not + natively support returning values (i.e. cx_oracle), SQLAlchemy will + approximate this behavior at the result level so that a reasonable + amount of behavioral neutrality is provided. + + Note that not all databases/DBAPIs + support RETURNING. For those backends with no support, + an exception is raised upon compilation and/or execution. + For those who do support it, the functionality across backends + varies greatly, including restrictions on executemany() + and other statements which return multiple rows. Please + read the documentation notes for the database in use in + order to determine the availability of RETURNING. + + .. seealso:: + + :meth:`.ValuesBase.return_defaults` - an alternative method tailored + towards efficient fetching of server-side defaults and triggers + for single-row INSERTs or UPDATEs. + + + """ + self._returning = cols + + @_generative + def with_hint(self, text, selectable=None, dialect_name="*"): + """Add a table hint for a single table to this + INSERT/UPDATE/DELETE statement. + + .. note:: + + :meth:`.UpdateBase.with_hint` currently applies only to + Microsoft SQL Server. For MySQL INSERT/UPDATE/DELETE hints, use + :meth:`.UpdateBase.prefix_with`. + + The text of the hint is rendered in the appropriate + location for the database backend in use, relative + to the :class:`.Table` that is the subject of this + statement, or optionally to that of the given + :class:`.Table` passed as the ``selectable`` argument. + + The ``dialect_name`` option will limit the rendering of a particular + hint to a particular backend. Such as, to add a hint + that only takes effect for SQL Server:: + + mytable.insert().with_hint("WITH (PAGLOCK)", dialect_name="mssql") + + .. versionadded:: 0.7.6 + + :param text: Text of the hint. + :param selectable: optional :class:`.Table` that specifies + an element of the FROM clause within an UPDATE or DELETE + to be the subject of the hint - applies only to certain backends. + :param dialect_name: defaults to ``*``, if specified as the name + of a particular dialect, will apply these hints only when + that dialect is in use. + """ + if selectable is None: + selectable = self.table + + self._hints = self._hints.union( + {(selectable, dialect_name): text}) + + +class ValuesBase(UpdateBase): + """Supplies support for :meth:`.ValuesBase.values` to + INSERT and UPDATE constructs.""" + + __visit_name__ = 'values_base' + + _supports_multi_parameters = False + _has_multi_parameters = False + _preserve_parameter_order = False + select = None + + def __init__(self, table, values, prefixes): + self.table = _interpret_as_from(table) + self.parameters, self._has_multi_parameters = \ + self._process_colparams(values) + if prefixes: + self._setup_prefixes(prefixes) + + @_generative + def values(self, *args, **kwargs): + """specify a fixed VALUES clause for an INSERT statement, or the SET + clause for an UPDATE. + + Note that the :class:`.Insert` and :class:`.Update` constructs support + per-execution time formatting of the VALUES and/or SET clauses, + based on the arguments passed to :meth:`.Connection.execute`. + However, the :meth:`.ValuesBase.values` method can be used to "fix" a + particular set of parameters into the statement. + + Multiple calls to :meth:`.ValuesBase.values` will produce a new + construct, each one with the parameter list modified to include + the new parameters sent. In the typical case of a single + dictionary of parameters, the newly passed keys will replace + the same keys in the previous construct. In the case of a list-based + "multiple values" construct, each new list of values is extended + onto the existing list of values. + + :param \**kwargs: key value pairs representing the string key + of a :class:`.Column` mapped to the value to be rendered into the + VALUES or SET clause:: + + users.insert().values(name="some name") + + users.update().where(users.c.id==5).values(name="some name") + + :param \*args: As an alternative to passing key/value parameters, + a dictionary, tuple, or list of dictionaries or tuples can be passed + as a single positional argument in order to form the VALUES or + SET clause of the statement. The forms that are accepted vary + based on whether this is an :class:`.Insert` or an :class:`.Update` + construct. + + For either an :class:`.Insert` or :class:`.Update` construct, a + single dictionary can be passed, which works the same as that of + the kwargs form:: + + users.insert().values({"name": "some name"}) + + users.update().values({"name": "some new name"}) + + Also for either form but more typically for the :class:`.Insert` + construct, a tuple that contains an entry for every column in the + table is also accepted:: + + users.insert().values((5, "some name")) + + The :class:`.Insert` construct also supports being passed a list + of dictionaries or full-table-tuples, which on the server will + render the less common SQL syntax of "multiple values" - this + syntax is supported on backends such as SQLite, Postgresql, MySQL, + but not necessarily others:: + + users.insert().values([ + {"name": "some name"}, + {"name": "some other name"}, + {"name": "yet another name"}, + ]) + + The above form would render a multiple VALUES statement similar to:: + + INSERT INTO users (name) VALUES + (:name_1), + (:name_2), + (:name_3) + + It is essential to note that **passing multiple values is + NOT the same as using traditional executemany() form**. The above + syntax is a **special** syntax not typically used. To emit an + INSERT statement against multiple rows, the normal method is + to pass a multiple values list to the :meth:`.Connection.execute` + method, which is supported by all database backends and is generally + more efficient for a very large number of parameters. + + .. seealso:: + + :ref:`execute_multiple` - an introduction to + the traditional Core method of multiple parameter set + invocation for INSERTs and other statements. + + .. versionchanged:: 1.0.0 an INSERT that uses a multiple-VALUES + clause, even a list of length one, + implies that the :paramref:`.Insert.inline` flag is set to + True, indicating that the statement will not attempt to fetch + the "last inserted primary key" or other defaults. The + statement deals with an arbitrary number of rows, so the + :attr:`.ResultProxy.inserted_primary_key` accessor does not + apply. + + .. versionchanged:: 1.0.0 A multiple-VALUES INSERT now supports + columns with Python side default values and callables in the + same way as that of an "executemany" style of invocation; the + callable is invoked for each row. See :ref:`bug_3288` + for other details. + + The :class:`.Update` construct supports a special form which is a + list of 2-tuples, which when provided must be passed in conjunction + with the + :paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order` + parameter. + This form causes the UPDATE statement to render the SET clauses + using the order of parameters given to :meth:`.Update.values`, rather + than the ordering of columns given in the :class:`.Table`. + + .. versionadded:: 1.0.10 - added support for parameter-ordered + UPDATE statements via the + :paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order` + flag. + + .. seealso:: + + :ref:`updates_order_parameters` - full example of the + :paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order` + flag + + .. seealso:: + + :ref:`inserts_and_updates` - SQL Expression + Language Tutorial + + :func:`~.expression.insert` - produce an ``INSERT`` statement + + :func:`~.expression.update` - produce an ``UPDATE`` statement + + """ + if self.select is not None: + raise exc.InvalidRequestError( + "This construct already inserts from a SELECT") + if self._has_multi_parameters and kwargs: + raise exc.InvalidRequestError( + "This construct already has multiple parameter sets.") + + if args: + if len(args) > 1: + raise exc.ArgumentError( + "Only a single dictionary/tuple or list of " + "dictionaries/tuples is accepted positionally.") + v = args[0] + else: + v = {} + + if self.parameters is None: + self.parameters, self._has_multi_parameters = \ + self._process_colparams(v) + else: + if self._has_multi_parameters: + self.parameters = list(self.parameters) + p, self._has_multi_parameters = self._process_colparams(v) + if not self._has_multi_parameters: + raise exc.ArgumentError( + "Can't mix single-values and multiple values " + "formats in one statement") + + self.parameters.extend(p) + else: + self.parameters = self.parameters.copy() + p, self._has_multi_parameters = self._process_colparams(v) + if self._has_multi_parameters: + raise exc.ArgumentError( + "Can't mix single-values and multiple values " + "formats in one statement") + self.parameters.update(p) + + if kwargs: + if self._has_multi_parameters: + raise exc.ArgumentError( + "Can't pass kwargs and multiple parameter sets " + "simultaenously") + else: + self.parameters.update(kwargs) + + @_generative + def return_defaults(self, *cols): + """Make use of a :term:`RETURNING` clause for the purpose + of fetching server-side expressions and defaults. + + E.g.:: + + stmt = table.insert().values(data='newdata').return_defaults() + + result = connection.execute(stmt) + + server_created_at = result.returned_defaults['created_at'] + + When used against a backend that supports RETURNING, all column + values generated by SQL expression or server-side-default will be + added to any existing RETURNING clause, provided that + :meth:`.UpdateBase.returning` is not used simultaneously. The column + values will then be available on the result using the + :attr:`.ResultProxy.returned_defaults` accessor as a dictionary, + referring to values keyed to the :class:`.Column` object as well as + its ``.key``. + + This method differs from :meth:`.UpdateBase.returning` in these ways: + + 1. :meth:`.ValuesBase.return_defaults` is only intended for use with + an INSERT or an UPDATE statement that matches exactly one row. + While the RETURNING construct in the general sense supports + multiple rows for a multi-row UPDATE or DELETE statement, or for + special cases of INSERT that return multiple rows (e.g. INSERT from + SELECT, multi-valued VALUES clause), + :meth:`.ValuesBase.return_defaults` is intended only for an + "ORM-style" single-row INSERT/UPDATE statement. The row returned + by the statement is also consumed implicitly when + :meth:`.ValuesBase.return_defaults` is used. By contrast, + :meth:`.UpdateBase.returning` leaves the RETURNING result-set + intact with a collection of any number of rows. + + 2. It is compatible with the existing logic to fetch auto-generated + primary key values, also known as "implicit returning". Backends + that support RETURNING will automatically make use of RETURNING in + order to fetch the value of newly generated primary keys; while the + :meth:`.UpdateBase.returning` method circumvents this behavior, + :meth:`.ValuesBase.return_defaults` leaves it intact. + + 3. It can be called against any backend. Backends that don't support + RETURNING will skip the usage of the feature, rather than raising + an exception. The return value of + :attr:`.ResultProxy.returned_defaults` will be ``None`` + + :meth:`.ValuesBase.return_defaults` is used by the ORM to provide + an efficient implementation for the ``eager_defaults`` feature of + :func:`.mapper`. + + :param cols: optional list of column key names or :class:`.Column` + objects. If omitted, all column expressions evaluated on the server + are added to the returning list. + + .. versionadded:: 0.9.0 + + .. seealso:: + + :meth:`.UpdateBase.returning` + + :attr:`.ResultProxy.returned_defaults` + + """ + self._return_defaults = cols or True + + +class Insert(ValuesBase): + """Represent an INSERT construct. + + The :class:`.Insert` object is created using the + :func:`~.expression.insert()` function. + + .. seealso:: + + :ref:`coretutorial_insert_expressions` + + """ + __visit_name__ = 'insert' + + _supports_multi_parameters = True + + def __init__(self, + table, + values=None, + inline=False, + bind=None, + prefixes=None, + returning=None, + return_defaults=False, + **dialect_kw): + """Construct an :class:`.Insert` object. + + Similar functionality is available via the + :meth:`~.TableClause.insert` method on + :class:`~.schema.Table`. + + :param table: :class:`.TableClause` which is the subject of the + insert. + + :param values: collection of values to be inserted; see + :meth:`.Insert.values` for a description of allowed formats here. + Can be omitted entirely; a :class:`.Insert` construct will also + dynamically render the VALUES clause at execution time based on + the parameters passed to :meth:`.Connection.execute`. + + :param inline: if True, no attempt will be made to retrieve the + SQL-generated default values to be provided within the statement; + in particular, + this allows SQL expressions to be rendered 'inline' within the + statement without the need to pre-execute them beforehand; for + backends that support "returning", this turns off the "implicit + returning" feature for the statement. + + If both `values` and compile-time bind parameters are present, the + compile-time bind parameters override the information specified + within `values` on a per-key basis. + + The keys within `values` can be either + :class:`~sqlalchemy.schema.Column` objects or their string + identifiers. Each key may reference one of: + + * a literal data value (i.e. string, number, etc.); + * a Column object; + * a SELECT statement. + + If a ``SELECT`` statement is specified which references this + ``INSERT`` statement's table, the statement will be correlated + against the ``INSERT`` statement. + + .. seealso:: + + :ref:`coretutorial_insert_expressions` - SQL Expression Tutorial + + :ref:`inserts_and_updates` - SQL Expression Tutorial + + """ + ValuesBase.__init__(self, table, values, prefixes) + self._bind = bind + self.select = self.select_names = None + self.include_insert_from_select_defaults = False + self.inline = inline + self._returning = returning + self._validate_dialect_kwargs(dialect_kw) + self._return_defaults = return_defaults + + def get_children(self, **kwargs): + if self.select is not None: + return self.select, + else: + return () + + @_generative + def from_select(self, names, select, include_defaults=True): + """Return a new :class:`.Insert` construct which represents + an ``INSERT...FROM SELECT`` statement. + + e.g.:: + + sel = select([table1.c.a, table1.c.b]).where(table1.c.c > 5) + ins = table2.insert().from_select(['a', 'b'], sel) + + :param names: a sequence of string column names or :class:`.Column` + objects representing the target columns. + :param select: a :func:`.select` construct, :class:`.FromClause` + or other construct which resolves into a :class:`.FromClause`, + such as an ORM :class:`.Query` object, etc. The order of + columns returned from this FROM clause should correspond to the + order of columns sent as the ``names`` parameter; while this + is not checked before passing along to the database, the database + would normally raise an exception if these column lists don't + correspond. + :param include_defaults: if True, non-server default values and + SQL expressions as specified on :class:`.Column` objects + (as documented in :ref:`metadata_defaults_toplevel`) not + otherwise specified in the list of names will be rendered + into the INSERT and SELECT statements, so that these values are also + included in the data to be inserted. + + .. note:: A Python-side default that uses a Python callable function + will only be invoked **once** for the whole statement, and **not + per row**. + + .. versionadded:: 1.0.0 - :meth:`.Insert.from_select` now renders + Python-side and SQL expression column defaults into the + SELECT statement for columns otherwise not included in the + list of column names. + + .. versionchanged:: 1.0.0 an INSERT that uses FROM SELECT + implies that the :paramref:`.insert.inline` flag is set to + True, indicating that the statement will not attempt to fetch + the "last inserted primary key" or other defaults. The statement + deals with an arbitrary number of rows, so the + :attr:`.ResultProxy.inserted_primary_key` accessor does not apply. + + .. versionadded:: 0.8.3 + + """ + if self.parameters: + raise exc.InvalidRequestError( + "This construct already inserts value expressions") + + self.parameters, self._has_multi_parameters = \ + self._process_colparams( + dict((_column_as_key(n), Null()) for n in names)) + + self.select_names = names + self.inline = True + self.include_insert_from_select_defaults = include_defaults + self.select = _interpret_as_select(select) + + def _copy_internals(self, clone=_clone, **kw): + # TODO: coverage + self.parameters = self.parameters.copy() + if self.select is not None: + self.select = _clone(self.select) + + +class Update(ValuesBase): + """Represent an Update construct. + + The :class:`.Update` object is created using the :func:`update()` + function. + + """ + __visit_name__ = 'update' + + def __init__(self, + table, + whereclause=None, + values=None, + inline=False, + bind=None, + prefixes=None, + returning=None, + return_defaults=False, + preserve_parameter_order=False, + **dialect_kw): + """Construct an :class:`.Update` object. + + E.g.:: + + from sqlalchemy import update + + stmt = update(users).where(users.c.id==5).\\ + values(name='user #5') + + Similar functionality is available via the + :meth:`~.TableClause.update` method on + :class:`.Table`:: + + stmt = users.update().\\ + where(users.c.id==5).\\ + values(name='user #5') + + :param table: A :class:`.Table` object representing the database + table to be updated. + + :param whereclause: Optional SQL expression describing the ``WHERE`` + condition of the ``UPDATE`` statement. Modern applications + may prefer to use the generative :meth:`~Update.where()` + method to specify the ``WHERE`` clause. + + The WHERE clause can refer to multiple tables. + For databases which support this, an ``UPDATE FROM`` clause will + be generated, or on MySQL, a multi-table update. The statement + will fail on databases that don't have support for multi-table + update statements. A SQL-standard method of referring to + additional tables in the WHERE clause is to use a correlated + subquery:: + + users.update().values(name='ed').where( + users.c.name==select([addresses.c.email_address]).\\ + where(addresses.c.user_id==users.c.id).\\ + as_scalar() + ) + + .. versionchanged:: 0.7.4 + The WHERE clause can refer to multiple tables. + + :param values: + Optional dictionary which specifies the ``SET`` conditions of the + ``UPDATE``. If left as ``None``, the ``SET`` + conditions are determined from those parameters passed to the + statement during the execution and/or compilation of the + statement. When compiled standalone without any parameters, + the ``SET`` clause generates for all columns. + + Modern applications may prefer to use the generative + :meth:`.Update.values` method to set the values of the + UPDATE statement. + + :param inline: + if True, SQL defaults present on :class:`.Column` objects via + the ``default`` keyword will be compiled 'inline' into the statement + and not pre-executed. This means that their values will not + be available in the dictionary returned from + :meth:`.ResultProxy.last_updated_params`. + + :param preserve_parameter_order: if True, the update statement is + expected to receive parameters **only** via the :meth:`.Update.values` + method, and they must be passed as a Python ``list`` of 2-tuples. + The rendered UPDATE statement will emit the SET clause for each + referenced column maintaining this order. + + .. versionadded:: 1.0.10 + + .. seealso:: + + :ref:`updates_order_parameters` - full example of the + :paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order` flag + + If both ``values`` and compile-time bind parameters are present, the + compile-time bind parameters override the information specified + within ``values`` on a per-key basis. + + The keys within ``values`` can be either :class:`.Column` + objects or their string identifiers (specifically the "key" of the + :class:`.Column`, normally but not necessarily equivalent to + its "name"). Normally, the + :class:`.Column` objects used here are expected to be + part of the target :class:`.Table` that is the table + to be updated. However when using MySQL, a multiple-table + UPDATE statement can refer to columns from any of + the tables referred to in the WHERE clause. + + The values referred to in ``values`` are typically: + + * a literal data value (i.e. string, number, etc.) + * a SQL expression, such as a related :class:`.Column`, + a scalar-returning :func:`.select` construct, + etc. + + When combining :func:`.select` constructs within the values + clause of an :func:`.update` construct, + the subquery represented by the :func:`.select` should be + *correlated* to the parent table, that is, providing criterion + which links the table inside the subquery to the outer table + being updated:: + + users.update().values( + name=select([addresses.c.email_address]).\\ + where(addresses.c.user_id==users.c.id).\\ + as_scalar() + ) + + .. seealso:: + + :ref:`inserts_and_updates` - SQL Expression + Language Tutorial + + + """ + self._preserve_parameter_order = preserve_parameter_order + ValuesBase.__init__(self, table, values, prefixes) + self._bind = bind + self._returning = returning + if whereclause is not None: + self._whereclause = _literal_as_text(whereclause) + else: + self._whereclause = None + self.inline = inline + self._validate_dialect_kwargs(dialect_kw) + self._return_defaults = return_defaults + + def get_children(self, **kwargs): + if self._whereclause is not None: + return self._whereclause, + else: + return () + + def _copy_internals(self, clone=_clone, **kw): + # TODO: coverage + self._whereclause = clone(self._whereclause, **kw) + self.parameters = self.parameters.copy() + + @_generative + def where(self, whereclause): + """return a new update() construct with the given expression added to + its WHERE clause, joined to the existing clause via AND, if any. + + """ + if self._whereclause is not None: + self._whereclause = and_(self._whereclause, + _literal_as_text(whereclause)) + else: + self._whereclause = _literal_as_text(whereclause) + + @property + def _extra_froms(self): + # TODO: this could be made memoized + # if the memoization is reset on each generative call. + froms = [] + seen = set([self.table]) + + if self._whereclause is not None: + for item in _from_objects(self._whereclause): + if not seen.intersection(item._cloned_set): + froms.append(item) + seen.update(item._cloned_set) + + return froms + + +class Delete(UpdateBase): + """Represent a DELETE construct. + + The :class:`.Delete` object is created using the :func:`delete()` + function. + + """ + + __visit_name__ = 'delete' + + def __init__(self, + table, + whereclause=None, + bind=None, + returning=None, + prefixes=None, + **dialect_kw): + """Construct :class:`.Delete` object. + + Similar functionality is available via the + :meth:`~.TableClause.delete` method on + :class:`~.schema.Table`. + + :param table: The table to delete rows from. + + :param whereclause: A :class:`.ClauseElement` describing the ``WHERE`` + condition of the ``DELETE`` statement. Note that the + :meth:`~Delete.where()` generative method may be used instead. + + .. seealso:: + + :ref:`deletes` - SQL Expression Tutorial + + """ + self._bind = bind + self.table = _interpret_as_from(table) + self._returning = returning + + if prefixes: + self._setup_prefixes(prefixes) + + if whereclause is not None: + self._whereclause = _literal_as_text(whereclause) + else: + self._whereclause = None + + self._validate_dialect_kwargs(dialect_kw) + + def get_children(self, **kwargs): + if self._whereclause is not None: + return self._whereclause, + else: + return () + + @_generative + def where(self, whereclause): + """Add the given WHERE clause to a newly returned delete construct.""" + + if self._whereclause is not None: + self._whereclause = and_(self._whereclause, + _literal_as_text(whereclause)) + else: + self._whereclause = _literal_as_text(whereclause) + + def _copy_internals(self, clone=_clone, **kw): + # TODO: coverage + self._whereclause = clone(self._whereclause, **kw) diff --git a/lib/python3.4/site-packages/sqlalchemy/sql/elements.py b/lib/python3.4/site-packages/sqlalchemy/sql/elements.py new file mode 100644 index 0000000..67957a1 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/sql/elements.py @@ -0,0 +1,3951 @@ +# sql/elements.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Core SQL expression elements, including :class:`.ClauseElement`, +:class:`.ColumnElement`, and derived classes. + +""" + +from __future__ import unicode_literals + +from .. import util, exc, inspection +from . import type_api +from . import operators +from .visitors import Visitable, cloned_traverse, traverse +from .annotation import Annotated +import itertools +from .base import Executable, PARSE_AUTOCOMMIT, Immutable, NO_ARG +from .base import _generative +import numbers + +import re +import operator + + +def _clone(element, **kw): + return element._clone() + + +def collate(expression, collation): + """Return the clause ``expression COLLATE collation``. + + e.g.:: + + collate(mycolumn, 'utf8_bin') + + produces:: + + mycolumn COLLATE utf8_bin + + """ + + expr = _literal_as_binds(expression) + return BinaryExpression( + expr, + _literal_as_text(collation), + operators.collate, type_=expr.type) + + +def between(expr, lower_bound, upper_bound, symmetric=False): + """Produce a ``BETWEEN`` predicate clause. + + E.g.:: + + from sqlalchemy import between + stmt = select([users_table]).where(between(users_table.c.id, 5, 7)) + + Would produce SQL resembling:: + + SELECT id, name FROM user WHERE id BETWEEN :id_1 AND :id_2 + + The :func:`.between` function is a standalone version of the + :meth:`.ColumnElement.between` method available on all + SQL expressions, as in:: + + stmt = select([users_table]).where(users_table.c.id.between(5, 7)) + + All arguments passed to :func:`.between`, including the left side + column expression, are coerced from Python scalar values if a + the value is not a :class:`.ColumnElement` subclass. For example, + three fixed values can be compared as in:: + + print(between(5, 3, 7)) + + Which would produce:: + + :param_1 BETWEEN :param_2 AND :param_3 + + :param expr: a column expression, typically a :class:`.ColumnElement` + instance or alternatively a Python scalar expression to be coerced + into a column expression, serving as the left side of the ``BETWEEN`` + expression. + + :param lower_bound: a column or Python scalar expression serving as the + lower bound of the right side of the ``BETWEEN`` expression. + + :param upper_bound: a column or Python scalar expression serving as the + upper bound of the right side of the ``BETWEEN`` expression. + + :param symmetric: if True, will render " BETWEEN SYMMETRIC ". Note + that not all databases support this syntax. + + .. versionadded:: 0.9.5 + + .. seealso:: + + :meth:`.ColumnElement.between` + + """ + expr = _literal_as_binds(expr) + return expr.between(lower_bound, upper_bound, symmetric=symmetric) + + +def literal(value, type_=None): + """Return a literal clause, bound to a bind parameter. + + Literal clauses are created automatically when non- + :class:`.ClauseElement` objects (such as strings, ints, dates, etc.) are + used in a comparison operation with a :class:`.ColumnElement` subclass, + such as a :class:`~sqlalchemy.schema.Column` object. Use this function + to force the generation of a literal clause, which will be created as a + :class:`BindParameter` with a bound value. + + :param value: the value to be bound. Can be any Python object supported by + the underlying DB-API, or is translatable via the given type argument. + + :param type\_: an optional :class:`~sqlalchemy.types.TypeEngine` which + will provide bind-parameter translation for this literal. + + """ + return BindParameter(None, value, type_=type_, unique=True) + + +def type_coerce(expression, type_): + """Associate a SQL expression with a particular type, without rendering + ``CAST``. + + E.g.:: + + from sqlalchemy import type_coerce + + stmt = select([type_coerce(log_table.date_string, StringDateTime())]) + + The above construct will produce SQL that is usually otherwise unaffected + by the :func:`.type_coerce` call:: + + SELECT date_string FROM log + + However, when result rows are fetched, the ``StringDateTime`` type + will be applied to result rows on behalf of the ``date_string`` column. + + A type that features bound-value handling will also have that behavior + take effect when literal values or :func:`.bindparam` constructs are + passed to :func:`.type_coerce` as targets. + For example, if a type implements the :meth:`.TypeEngine.bind_expression` + method or :meth:`.TypeEngine.bind_processor` method or equivalent, + these functions will take effect at statement compilation/execution time + when a literal value is passed, as in:: + + # bound-value handling of MyStringType will be applied to the + # literal value "some string" + stmt = select([type_coerce("some string", MyStringType)]) + + :func:`.type_coerce` is similar to the :func:`.cast` function, + except that it does not render the ``CAST`` expression in the resulting + statement. + + :param expression: A SQL expression, such as a :class:`.ColumnElement` + expression or a Python string which will be coerced into a bound literal + value. + + :param type_: A :class:`.TypeEngine` class or instance indicating + the type to which the expression is coerced. + + .. seealso:: + + :func:`.cast` + + """ + type_ = type_api.to_instance(type_) + + if hasattr(expression, '__clause_element__'): + return type_coerce(expression.__clause_element__(), type_) + elif isinstance(expression, BindParameter): + bp = expression._clone() + bp.type = type_ + return bp + elif not isinstance(expression, Visitable): + if expression is None: + return Null() + else: + return literal(expression, type_=type_) + else: + return Label(None, expression, type_=type_) + + +def outparam(key, type_=None): + """Create an 'OUT' parameter for usage in functions (stored procedures), + for databases which support them. + + The ``outparam`` can be used like a regular function parameter. + The "output" value will be available from the + :class:`~sqlalchemy.engine.ResultProxy` object via its ``out_parameters`` + attribute, which returns a dictionary containing the values. + + """ + return BindParameter( + key, None, type_=type_, unique=False, isoutparam=True) + + +def not_(clause): + """Return a negation of the given clause, i.e. ``NOT(clause)``. + + The ``~`` operator is also overloaded on all + :class:`.ColumnElement` subclasses to produce the + same result. + + """ + return operators.inv(_literal_as_binds(clause)) + + +@inspection._self_inspects +class ClauseElement(Visitable): + """Base class for elements of a programmatically constructed SQL + expression. + + """ + __visit_name__ = 'clause' + + _annotations = {} + supports_execution = False + _from_objects = [] + bind = None + _is_clone_of = None + is_selectable = False + is_clause_element = True + + description = None + _order_by_label_element = None + _is_from_container = False + + def _clone(self): + """Create a shallow copy of this ClauseElement. + + This method may be used by a generative API. Its also used as + part of the "deep" copy afforded by a traversal that combines + the _copy_internals() method. + + """ + c = self.__class__.__new__(self.__class__) + c.__dict__ = self.__dict__.copy() + ClauseElement._cloned_set._reset(c) + ColumnElement.comparator._reset(c) + + # this is a marker that helps to "equate" clauses to each other + # when a Select returns its list of FROM clauses. the cloning + # process leaves around a lot of remnants of the previous clause + # typically in the form of column expressions still attached to the + # old table. + c._is_clone_of = self + + return c + + @property + def _constructor(self): + """return the 'constructor' for this ClauseElement. + + This is for the purposes for creating a new object of + this type. Usually, its just the element's __class__. + However, the "Annotated" version of the object overrides + to return the class of its proxied element. + + """ + return self.__class__ + + @util.memoized_property + def _cloned_set(self): + """Return the set consisting all cloned ancestors of this + ClauseElement. + + Includes this ClauseElement. This accessor tends to be used for + FromClause objects to identify 'equivalent' FROM clauses, regardless + of transformative operations. + + """ + s = util.column_set() + f = self + while f is not None: + s.add(f) + f = f._is_clone_of + return s + + def __getstate__(self): + d = self.__dict__.copy() + d.pop('_is_clone_of', None) + return d + + def _annotate(self, values): + """return a copy of this ClauseElement with annotations + updated by the given dictionary. + + """ + return Annotated(self, values) + + def _with_annotations(self, values): + """return a copy of this ClauseElement with annotations + replaced by the given dictionary. + + """ + return Annotated(self, values) + + def _deannotate(self, values=None, clone=False): + """return a copy of this :class:`.ClauseElement` with annotations + removed. + + :param values: optional tuple of individual values + to remove. + + """ + if clone: + # clone is used when we are also copying + # the expression for a deep deannotation + return self._clone() + else: + # if no clone, since we have no annotations we return + # self + return self + + def _execute_on_connection(self, connection, multiparams, params): + return connection._execute_clauseelement(self, multiparams, params) + + def unique_params(self, *optionaldict, **kwargs): + """Return a copy with :func:`bindparam()` elements replaced. + + Same functionality as ``params()``, except adds `unique=True` + to affected bind parameters so that multiple statements can be + used. + + """ + return self._params(True, optionaldict, kwargs) + + def params(self, *optionaldict, **kwargs): + """Return a copy with :func:`bindparam()` elements replaced. + + Returns a copy of this ClauseElement with :func:`bindparam()` + elements replaced with values taken from the given dictionary:: + + >>> clause = column('x') + bindparam('foo') + >>> print clause.compile().params + {'foo':None} + >>> print clause.params({'foo':7}).compile().params + {'foo':7} + + """ + return self._params(False, optionaldict, kwargs) + + def _params(self, unique, optionaldict, kwargs): + if len(optionaldict) == 1: + kwargs.update(optionaldict[0]) + elif len(optionaldict) > 1: + raise exc.ArgumentError( + "params() takes zero or one positional dictionary argument") + + def visit_bindparam(bind): + if bind.key in kwargs: + bind.value = kwargs[bind.key] + bind.required = False + if unique: + bind._convert_to_unique() + return cloned_traverse(self, {}, {'bindparam': visit_bindparam}) + + def compare(self, other, **kw): + """Compare this ClauseElement to the given ClauseElement. + + Subclasses should override the default behavior, which is a + straight identity comparison. + + \**kw are arguments consumed by subclass compare() methods and + may be used to modify the criteria for comparison. + (see :class:`.ColumnElement`) + + """ + return self is other + + def _copy_internals(self, clone=_clone, **kw): + """Reassign internal elements to be clones of themselves. + + Called during a copy-and-traverse operation on newly + shallow-copied elements to create a deep copy. + + The given clone function should be used, which may be applying + additional transformations to the element (i.e. replacement + traversal, cloned traversal, annotations). + + """ + pass + + def get_children(self, **kwargs): + """Return immediate child elements of this :class:`.ClauseElement`. + + This is used for visit traversal. + + \**kwargs may contain flags that change the collection that is + returned, for example to return a subset of items in order to + cut down on larger traversals, or to return child items from a + different context (such as schema-level collections instead of + clause-level). + + """ + return [] + + def self_group(self, against=None): + """Apply a 'grouping' to this :class:`.ClauseElement`. + + This method is overridden by subclasses to return a + "grouping" construct, i.e. parenthesis. In particular + it's used by "binary" expressions to provide a grouping + around themselves when placed into a larger expression, + as well as by :func:`.select` constructs when placed into + the FROM clause of another :func:`.select`. (Note that + subqueries should be normally created using the + :meth:`.Select.alias` method, as many platforms require + nested SELECT statements to be named). + + As expressions are composed together, the application of + :meth:`self_group` is automatic - end-user code should never + need to use this method directly. Note that SQLAlchemy's + clause constructs take operator precedence into account - + so parenthesis might not be needed, for example, in + an expression like ``x OR (y AND z)`` - AND takes precedence + over OR. + + The base :meth:`self_group` method of :class:`.ClauseElement` + just returns self. + """ + return self + + @util.dependencies("sqlalchemy.engine.default") + def compile(self, default, bind=None, dialect=None, **kw): + """Compile this SQL expression. + + The return value is a :class:`~.Compiled` object. + Calling ``str()`` or ``unicode()`` on the returned value will yield a + string representation of the result. The + :class:`~.Compiled` object also can return a + dictionary of bind parameter names and values + using the ``params`` accessor. + + :param bind: An ``Engine`` or ``Connection`` from which a + ``Compiled`` will be acquired. This argument takes precedence over + this :class:`.ClauseElement`'s bound engine, if any. + + :param column_keys: Used for INSERT and UPDATE statements, a list of + column names which should be present in the VALUES clause of the + compiled statement. If ``None``, all columns from the target table + object are rendered. + + :param dialect: A ``Dialect`` instance from which a ``Compiled`` + will be acquired. This argument takes precedence over the `bind` + argument as well as this :class:`.ClauseElement`'s bound engine, + if any. + + :param inline: Used for INSERT statements, for a dialect which does + not support inline retrieval of newly generated primary key + columns, will force the expression used to create the new primary + key value to be rendered inline within the INSERT statement's + VALUES clause. This typically refers to Sequence execution but may + also refer to any server-side default generation function + associated with a primary key `Column`. + + :param compile_kwargs: optional dictionary of additional parameters + that will be passed through to the compiler within all "visit" + methods. This allows any custom flag to be passed through to + a custom compilation construct, for example. It is also used + for the case of passing the ``literal_binds`` flag through:: + + from sqlalchemy.sql import table, column, select + + t = table('t', column('x')) + + s = select([t]).where(t.c.x == 5) + + print s.compile(compile_kwargs={"literal_binds": True}) + + .. versionadded:: 0.9.0 + + .. seealso:: + + :ref:`faq_sql_expression_string` + + """ + + if not dialect: + if bind: + dialect = bind.dialect + elif self.bind: + dialect = self.bind.dialect + bind = self.bind + else: + dialect = default.DefaultDialect() + return self._compiler(dialect, bind=bind, **kw) + + def _compiler(self, dialect, **kw): + """Return a compiler appropriate for this ClauseElement, given a + Dialect.""" + + return dialect.statement_compiler(dialect, self, **kw) + + def __str__(self): + if util.py3k: + return str(self.compile()) + else: + return unicode(self.compile()).encode('ascii', 'backslashreplace') + + def __and__(self, other): + """'and' at the ClauseElement level. + + .. deprecated:: 0.9.5 - conjunctions are intended to be + at the :class:`.ColumnElement`. level + + """ + return and_(self, other) + + def __or__(self, other): + """'or' at the ClauseElement level. + + .. deprecated:: 0.9.5 - conjunctions are intended to be + at the :class:`.ColumnElement`. level + + """ + return or_(self, other) + + def __invert__(self): + if hasattr(self, 'negation_clause'): + return self.negation_clause + else: + return self._negate() + + def _negate(self): + return UnaryExpression( + self.self_group(against=operators.inv), + operator=operators.inv, + negate=None) + + def __bool__(self): + raise TypeError("Boolean value of this clause is not defined") + + __nonzero__ = __bool__ + + def __repr__(self): + friendly = self.description + if friendly is None: + return object.__repr__(self) + else: + return '<%s.%s at 0x%x; %s>' % ( + self.__module__, self.__class__.__name__, id(self), friendly) + + +class ColumnElement(operators.ColumnOperators, ClauseElement): + """Represent a column-oriented SQL expression suitable for usage in the + "columns" clause, WHERE clause etc. of a statement. + + While the most familiar kind of :class:`.ColumnElement` is the + :class:`.Column` object, :class:`.ColumnElement` serves as the basis + for any unit that may be present in a SQL expression, including + the expressions themselves, SQL functions, bound parameters, + literal expressions, keywords such as ``NULL``, etc. + :class:`.ColumnElement` is the ultimate base class for all such elements. + + A wide variety of SQLAlchemy Core functions work at the SQL expression + level, and are intended to accept instances of :class:`.ColumnElement` as + arguments. These functions will typically document that they accept a + "SQL expression" as an argument. What this means in terms of SQLAlchemy + usually refers to an input which is either already in the form of a + :class:`.ColumnElement` object, or a value which can be **coerced** into + one. The coercion rules followed by most, but not all, SQLAlchemy Core + functions with regards to SQL expressions are as follows: + + * a literal Python value, such as a string, integer or floating + point value, boolean, datetime, ``Decimal`` object, or virtually + any other Python object, will be coerced into a "literal bound + value". This generally means that a :func:`.bindparam` will be + produced featuring the given value embedded into the construct; the + resulting :class:`.BindParameter` object is an instance of + :class:`.ColumnElement`. The Python value will ultimately be sent + to the DBAPI at execution time as a paramterized argument to the + ``execute()`` or ``executemany()`` methods, after SQLAlchemy + type-specific converters (e.g. those provided by any associated + :class:`.TypeEngine` objects) are applied to the value. + + * any special object value, typically ORM-level constructs, which + feature a method called ``__clause_element__()``. The Core + expression system looks for this method when an object of otherwise + unknown type is passed to a function that is looking to coerce the + argument into a :class:`.ColumnElement` expression. The + ``__clause_element__()`` method, if present, should return a + :class:`.ColumnElement` instance. The primary use of + ``__clause_element__()`` within SQLAlchemy is that of class-bound + attributes on ORM-mapped classes; a ``User`` class which contains a + mapped attribute named ``.name`` will have a method + ``User.name.__clause_element__()`` which when invoked returns the + :class:`.Column` called ``name`` associated with the mapped table. + + * The Python ``None`` value is typically interpreted as ``NULL``, + which in SQLAlchemy Core produces an instance of :func:`.null`. + + A :class:`.ColumnElement` provides the ability to generate new + :class:`.ColumnElement` + objects using Python expressions. This means that Python operators + such as ``==``, ``!=`` and ``<`` are overloaded to mimic SQL operations, + and allow the instantiation of further :class:`.ColumnElement` instances + which are composed from other, more fundamental :class:`.ColumnElement` + objects. For example, two :class:`.ColumnClause` objects can be added + together with the addition operator ``+`` to produce + a :class:`.BinaryExpression`. + Both :class:`.ColumnClause` and :class:`.BinaryExpression` are subclasses + of :class:`.ColumnElement`:: + + >>> from sqlalchemy.sql import column + >>> column('a') + column('b') + + >>> print column('a') + column('b') + a + b + + .. seealso:: + + :class:`.Column` + + :func:`.expression.column` + + """ + + __visit_name__ = 'column' + primary_key = False + foreign_keys = [] + + _label = None + """The named label that can be used to target + this column in a result set. + + This label is almost always the label used when + rendering AS AS "; typically columns that don't have + any parent table and are named the same as what the label would be + in any case. + + """ + + _resolve_label = None + """The name that should be used to identify this ColumnElement in a + select() object when "label resolution" logic is used; this refers + to using a string name in an expression like order_by() or group_by() + that wishes to target a labeled expression in the columns clause. + + The name is distinct from that of .name or ._label to account for the case + where anonymizing logic may be used to change the name that's actually + rendered at compile time; this attribute should hold onto the original + name that was user-assigned when producing a .label() construct. + + """ + + _allow_label_resolve = True + """A flag that can be flipped to prevent a column from being resolvable + by string label name.""" + + _alt_names = () + + def self_group(self, against=None): + if (against in (operators.and_, operators.or_, operators._asbool) and + self.type._type_affinity + is type_api.BOOLEANTYPE._type_affinity): + return AsBoolean(self, operators.istrue, operators.isfalse) + else: + return self + + def _negate(self): + if self.type._type_affinity is type_api.BOOLEANTYPE._type_affinity: + return AsBoolean(self, operators.isfalse, operators.istrue) + else: + return super(ColumnElement, self)._negate() + + @util.memoized_property + def type(self): + return type_api.NULLTYPE + + @util.memoized_property + def comparator(self): + try: + comparator_factory = self.type.comparator_factory + except AttributeError: + raise TypeError( + "Object %r associated with '.type' attribute " + "is not a TypeEngine class or object" % self.type) + else: + return comparator_factory(self) + + def __getattr__(self, key): + try: + return getattr(self.comparator, key) + except AttributeError: + raise AttributeError( + 'Neither %r object nor %r object has an attribute %r' % ( + type(self).__name__, + type(self.comparator).__name__, + key) + ) + + def operate(self, op, *other, **kwargs): + return op(self.comparator, *other, **kwargs) + + def reverse_operate(self, op, other, **kwargs): + return op(other, self.comparator, **kwargs) + + def _bind_param(self, operator, obj): + return BindParameter(None, obj, + _compared_to_operator=operator, + _compared_to_type=self.type, unique=True) + + @property + def expression(self): + """Return a column expression. + + Part of the inspection interface; returns self. + + """ + return self + + @property + def _select_iterable(self): + return (self, ) + + @util.memoized_property + def base_columns(self): + return util.column_set(c for c in self.proxy_set + if not hasattr(c, '_proxies')) + + @util.memoized_property + def proxy_set(self): + s = util.column_set([self]) + if hasattr(self, '_proxies'): + for c in self._proxies: + s.update(c.proxy_set) + return s + + def shares_lineage(self, othercolumn): + """Return True if the given :class:`.ColumnElement` + has a common ancestor to this :class:`.ColumnElement`.""" + + return bool(self.proxy_set.intersection(othercolumn.proxy_set)) + + def _compare_name_for_result(self, other): + """Return True if the given column element compares to this one + when targeting within a result row.""" + + return hasattr(other, 'name') and hasattr(self, 'name') and \ + other.name == self.name + + def _make_proxy( + self, selectable, name=None, name_is_truncatable=False, **kw): + """Create a new :class:`.ColumnElement` representing this + :class:`.ColumnElement` as it appears in the select list of a + descending selectable. + + """ + if name is None: + name = self.anon_label + if self.key: + key = self.key + else: + try: + key = str(self) + except exc.UnsupportedCompilationError: + key = self.anon_label + + else: + key = name + co = ColumnClause( + _as_truncated(name) if name_is_truncatable else name, + type_=getattr(self, 'type', None), + _selectable=selectable + ) + co._proxies = [self] + if selectable._is_clone_of is not None: + co._is_clone_of = \ + selectable._is_clone_of.columns.get(key) + selectable._columns[key] = co + return co + + def compare(self, other, use_proxies=False, equivalents=None, **kw): + """Compare this ColumnElement to another. + + Special arguments understood: + + :param use_proxies: when True, consider two columns that + share a common base column as equivalent (i.e. shares_lineage()) + + :param equivalents: a dictionary of columns as keys mapped to sets + of columns. If the given "other" column is present in this + dictionary, if any of the columns in the corresponding set() pass + the comparison test, the result is True. This is used to expand the + comparison to other columns that may be known to be equivalent to + this one via foreign key or other criterion. + + """ + to_compare = (other, ) + if equivalents and other in equivalents: + to_compare = equivalents[other].union(to_compare) + + for oth in to_compare: + if use_proxies and self.shares_lineage(oth): + return True + elif hash(oth) == hash(self): + return True + else: + return False + + def cast(self, type_): + """Produce a type cast, i.e. ``CAST( AS )``. + + This is a shortcut to the :func:`~.expression.cast` function. + + .. versionadded:: 1.0.7 + + """ + return Cast(self, type_) + + def label(self, name): + """Produce a column label, i.e. `` AS ``. + + This is a shortcut to the :func:`~.expression.label` function. + + if 'name' is None, an anonymous label name will be generated. + + """ + return Label(name, self, self.type) + + @util.memoized_property + def anon_label(self): + """provides a constant 'anonymous label' for this ColumnElement. + + This is a label() expression which will be named at compile time. + The same label() is returned each time anon_label is called so + that expressions can reference anon_label multiple times, producing + the same label name at compile time. + + the compiler uses this function automatically at compile time + for expressions that are known to be 'unnamed' like binary + expressions and function calls. + + """ + while self._is_clone_of is not None: + self = self._is_clone_of + + return _anonymous_label( + '%%(%d %s)s' % (id(self), getattr(self, 'name', 'anon')) + ) + + +class BindParameter(ColumnElement): + """Represent a "bound expression". + + :class:`.BindParameter` is invoked explicitly using the + :func:`.bindparam` function, as in:: + + from sqlalchemy import bindparam + + stmt = select([users_table]).\\ + where(users_table.c.name == bindparam('username')) + + Detailed discussion of how :class:`.BindParameter` is used is + at :func:`.bindparam`. + + .. seealso:: + + :func:`.bindparam` + + """ + + __visit_name__ = 'bindparam' + + _is_crud = False + + def __init__(self, key, value=NO_ARG, type_=None, + unique=False, required=NO_ARG, + quote=None, callable_=None, + isoutparam=False, + _compared_to_operator=None, + _compared_to_type=None): + """Produce a "bound expression". + + The return value is an instance of :class:`.BindParameter`; this + is a :class:`.ColumnElement` subclass which represents a so-called + "placeholder" value in a SQL expression, the value of which is + supplied at the point at which the statement in executed against a + database connection. + + In SQLAlchemy, the :func:`.bindparam` construct has + the ability to carry along the actual value that will be ultimately + used at expression time. In this way, it serves not just as + a "placeholder" for eventual population, but also as a means of + representing so-called "unsafe" values which should not be rendered + directly in a SQL statement, but rather should be passed along + to the :term:`DBAPI` as values which need to be correctly escaped + and potentially handled for type-safety. + + When using :func:`.bindparam` explicitly, the use case is typically + one of traditional deferment of parameters; the :func:`.bindparam` + construct accepts a name which can then be referred to at execution + time:: + + from sqlalchemy import bindparam + + stmt = select([users_table]).\\ + where(users_table.c.name == bindparam('username')) + + The above statement, when rendered, will produce SQL similar to:: + + SELECT id, name FROM user WHERE name = :username + + In order to populate the value of ``:username`` above, the value + would typically be applied at execution time to a method + like :meth:`.Connection.execute`:: + + result = connection.execute(stmt, username='wendy') + + Explicit use of :func:`.bindparam` is also common when producing + UPDATE or DELETE statements that are to be invoked multiple times, + where the WHERE criterion of the statement is to change on each + invocation, such as:: + + stmt = (users_table.update(). + where(user_table.c.name == bindparam('username')). + values(fullname=bindparam('fullname')) + ) + + connection.execute( + stmt, [{"username": "wendy", "fullname": "Wendy Smith"}, + {"username": "jack", "fullname": "Jack Jones"}, + ] + ) + + SQLAlchemy's Core expression system makes wide use of + :func:`.bindparam` in an implicit sense. It is typical that Python + literal values passed to virtually all SQL expression functions are + coerced into fixed :func:`.bindparam` constructs. For example, given + a comparison operation such as:: + + expr = users_table.c.name == 'Wendy' + + The above expression will produce a :class:`.BinaryExpression` + construct, where the left side is the :class:`.Column` object + representing the ``name`` column, and the right side is a + :class:`.BindParameter` representing the literal value:: + + print(repr(expr.right)) + BindParameter('%(4327771088 name)s', 'Wendy', type_=String()) + + The expression above will render SQL such as:: + + user.name = :name_1 + + Where the ``:name_1`` parameter name is an anonymous name. The + actual string ``Wendy`` is not in the rendered string, but is carried + along where it is later used within statement execution. If we + invoke a statement like the following:: + + stmt = select([users_table]).where(users_table.c.name == 'Wendy') + result = connection.execute(stmt) + + We would see SQL logging output as:: + + SELECT "user".id, "user".name + FROM "user" + WHERE "user".name = %(name_1)s + {'name_1': 'Wendy'} + + Above, we see that ``Wendy`` is passed as a parameter to the database, + while the placeholder ``:name_1`` is rendered in the appropriate form + for the target database, in this case the Postgresql database. + + Similarly, :func:`.bindparam` is invoked automatically + when working with :term:`CRUD` statements as far as the "VALUES" + portion is concerned. The :func:`.insert` construct produces an + ``INSERT`` expression which will, at statement execution time, + generate bound placeholders based on the arguments passed, as in:: + + stmt = users_table.insert() + result = connection.execute(stmt, name='Wendy') + + The above will produce SQL output as:: + + INSERT INTO "user" (name) VALUES (%(name)s) + {'name': 'Wendy'} + + The :class:`.Insert` construct, at compilation/execution time, + rendered a single :func:`.bindparam` mirroring the column + name ``name`` as a result of the single ``name`` parameter + we passed to the :meth:`.Connection.execute` method. + + :param key: + the key (e.g. the name) for this bind param. + Will be used in the generated + SQL statement for dialects that use named parameters. This + value may be modified when part of a compilation operation, + if other :class:`BindParameter` objects exist with the same + key, or if its length is too long and truncation is + required. + + :param value: + Initial value for this bind param. Will be used at statement + execution time as the value for this parameter passed to the + DBAPI, if no other value is indicated to the statement execution + method for this particular parameter name. Defaults to ``None``. + + :param callable\_: + A callable function that takes the place of "value". The function + will be called at statement execution time to determine the + ultimate value. Used for scenarios where the actual bind + value cannot be determined at the point at which the clause + construct is created, but embedded bind values are still desirable. + + :param type\_: + A :class:`.TypeEngine` class or instance representing an optional + datatype for this :func:`.bindparam`. If not passed, a type + may be determined automatically for the bind, based on the given + value; for example, trivial Python types such as ``str``, + ``int``, ``bool`` + may result in the :class:`.String`, :class:`.Integer` or + :class:`.Boolean` types being autoamtically selected. + + The type of a :func:`.bindparam` is significant especially in that + the type will apply pre-processing to the value before it is + passed to the database. For example, a :func:`.bindparam` which + refers to a datetime value, and is specified as holding the + :class:`.DateTime` type, may apply conversion needed to the + value (such as stringification on SQLite) before passing the value + to the database. + + :param unique: + if True, the key name of this :class:`.BindParameter` will be + modified if another :class:`.BindParameter` of the same name + already has been located within the containing + expression. This flag is used generally by the internals + when producing so-called "anonymous" bound expressions, it + isn't generally applicable to explicitly-named :func:`.bindparam` + constructs. + + :param required: + If ``True``, a value is required at execution time. If not passed, + it defaults to ``True`` if neither :paramref:`.bindparam.value` + or :paramref:`.bindparam.callable` were passed. If either of these + parameters are present, then :paramref:`.bindparam.required` + defaults to ``False``. + + .. versionchanged:: 0.8 If the ``required`` flag is not specified, + it will be set automatically to ``True`` or ``False`` depending + on whether or not the ``value`` or ``callable`` parameters + were specified. + + :param quote: + True if this parameter name requires quoting and is not + currently known as a SQLAlchemy reserved word; this currently + only applies to the Oracle backend, where bound names must + sometimes be quoted. + + :param isoutparam: + if True, the parameter should be treated like a stored procedure + "OUT" parameter. This applies to backends such as Oracle which + support OUT parameters. + + .. seealso:: + + :ref:`coretutorial_bind_param` + + :ref:`coretutorial_insert_expressions` + + :func:`.outparam` + + """ + if isinstance(key, ColumnClause): + type_ = key.type + key = key.key + if required is NO_ARG: + required = (value is NO_ARG and callable_ is None) + if value is NO_ARG: + value = None + + if quote is not None: + key = quoted_name(key, quote) + + if unique: + self.key = _anonymous_label('%%(%d %s)s' % (id(self), key + or 'param')) + else: + self.key = key or _anonymous_label('%%(%d param)s' + % id(self)) + + # identifying key that won't change across + # clones, used to identify the bind's logical + # identity + self._identifying_key = self.key + + # key that was passed in the first place, used to + # generate new keys + self._orig_key = key or 'param' + + self.unique = unique + self.value = value + self.callable = callable_ + self.isoutparam = isoutparam + self.required = required + if type_ is None: + if _compared_to_type is not None: + self.type = \ + _compared_to_type.coerce_compared_value( + _compared_to_operator, value) + else: + self.type = type_api._type_map.get(type(value), + type_api.NULLTYPE) + elif isinstance(type_, type): + self.type = type_() + else: + self.type = type_ + + def _with_value(self, value): + """Return a copy of this :class:`.BindParameter` with the given value + set. + """ + cloned = self._clone() + cloned.value = value + cloned.callable = None + cloned.required = False + if cloned.type is type_api.NULLTYPE: + cloned.type = type_api._type_map.get(type(value), + type_api.NULLTYPE) + return cloned + + @property + def effective_value(self): + """Return the value of this bound parameter, + taking into account if the ``callable`` parameter + was set. + + The ``callable`` value will be evaluated + and returned if present, else ``value``. + + """ + if self.callable: + return self.callable() + else: + return self.value + + def _clone(self): + c = ClauseElement._clone(self) + if self.unique: + c.key = _anonymous_label('%%(%d %s)s' % (id(c), c._orig_key + or 'param')) + return c + + def _convert_to_unique(self): + if not self.unique: + self.unique = True + self.key = _anonymous_label( + '%%(%d %s)s' % (id(self), self._orig_key or 'param')) + + def compare(self, other, **kw): + """Compare this :class:`BindParameter` to the given + clause.""" + + return isinstance(other, BindParameter) \ + and self.type._compare_type_affinity(other.type) \ + and self.value == other.value + + def __getstate__(self): + """execute a deferred value for serialization purposes.""" + + d = self.__dict__.copy() + v = self.value + if self.callable: + v = self.callable() + d['callable'] = None + d['value'] = v + return d + + def __repr__(self): + return 'BindParameter(%r, %r, type_=%r)' % (self.key, + self.value, self.type) + + +class TypeClause(ClauseElement): + """Handle a type keyword in a SQL statement. + + Used by the ``Case`` statement. + + """ + + __visit_name__ = 'typeclause' + + def __init__(self, type): + self.type = type + + +class TextClause(Executable, ClauseElement): + """Represent a literal SQL text fragment. + + E.g.:: + + from sqlalchemy import text + + t = text("SELECT * FROM users") + result = connection.execute(t) + + + The :class:`.Text` construct is produced using the :func:`.text` + function; see that function for full documentation. + + .. seealso:: + + :func:`.text` + + """ + + __visit_name__ = 'textclause' + + _bind_params_regex = re.compile(r'(?`` + to specify bind parameters; they will be compiled to their + engine-specific format. + + :param autocommit: + Deprecated. Use .execution_options(autocommit=) + to set the autocommit option. + + :param bind: + an optional connection or engine to be used for this text query. + + :param bindparams: + Deprecated. A list of :func:`.bindparam` instances used to + provide information about parameters embedded in the statement. + This argument now invokes the :meth:`.TextClause.bindparams` + method on the construct before returning it. E.g.:: + + stmt = text("SELECT * FROM table WHERE id=:id", + bindparams=[bindparam('id', value=5, type_=Integer)]) + + Is equivalent to:: + + stmt = text("SELECT * FROM table WHERE id=:id").\\ + bindparams(bindparam('id', value=5, type_=Integer)) + + .. deprecated:: 0.9.0 the :meth:`.TextClause.bindparams` method + supersedes the ``bindparams`` argument to :func:`.text`. + + :param typemap: + Deprecated. A dictionary mapping the names of columns + represented in the columns clause of a ``SELECT`` statement + to type objects, + which will be used to perform post-processing on columns within + the result set. This parameter now invokes the + :meth:`.TextClause.columns` method, which returns a + :class:`.TextAsFrom` construct that gains a ``.c`` collection and + can be embedded in other expressions. E.g.:: + + stmt = text("SELECT * FROM table", + typemap={'id': Integer, 'name': String}, + ) + + Is equivalent to:: + + stmt = text("SELECT * FROM table").columns(id=Integer, + name=String) + + Or alternatively:: + + from sqlalchemy.sql import column + stmt = text("SELECT * FROM table").columns( + column('id', Integer), + column('name', String) + ) + + .. deprecated:: 0.9.0 the :meth:`.TextClause.columns` method + supersedes the ``typemap`` argument to :func:`.text`. + + """ + stmt = TextClause(text, bind=bind) + if bindparams: + stmt = stmt.bindparams(*bindparams) + if typemap: + stmt = stmt.columns(**typemap) + if autocommit is not None: + util.warn_deprecated('autocommit on text() is deprecated. ' + 'Use .execution_options(autocommit=True)') + stmt = stmt.execution_options(autocommit=autocommit) + + return stmt + + @_generative + def bindparams(self, *binds, **names_to_values): + """Establish the values and/or types of bound parameters within + this :class:`.TextClause` construct. + + Given a text construct such as:: + + from sqlalchemy import text + stmt = text("SELECT id, name FROM user WHERE name=:name " + "AND timestamp=:timestamp") + + the :meth:`.TextClause.bindparams` method can be used to establish + the initial value of ``:name`` and ``:timestamp``, + using simple keyword arguments:: + + stmt = stmt.bindparams(name='jack', + timestamp=datetime.datetime(2012, 10, 8, 15, 12, 5)) + + Where above, new :class:`.BindParameter` objects + will be generated with the names ``name`` and ``timestamp``, and + values of ``jack`` and ``datetime.datetime(2012, 10, 8, 15, 12, 5)``, + respectively. The types will be + inferred from the values given, in this case :class:`.String` and + :class:`.DateTime`. + + When specific typing behavior is needed, the positional ``*binds`` + argument can be used in which to specify :func:`.bindparam` constructs + directly. These constructs must include at least the ``key`` + argument, then an optional value and type:: + + from sqlalchemy import bindparam + stmt = stmt.bindparams( + bindparam('name', value='jack', type_=String), + bindparam('timestamp', type_=DateTime) + ) + + Above, we specified the type of :class:`.DateTime` for the + ``timestamp`` bind, and the type of :class:`.String` for the ``name`` + bind. In the case of ``name`` we also set the default value of + ``"jack"``. + + Additional bound parameters can be supplied at statement execution + time, e.g.:: + + result = connection.execute(stmt, + timestamp=datetime.datetime(2012, 10, 8, 15, 12, 5)) + + The :meth:`.TextClause.bindparams` method can be called repeatedly, + where it will re-use existing :class:`.BindParameter` objects to add + new information. For example, we can call + :meth:`.TextClause.bindparams` first with typing information, and a + second time with value information, and it will be combined:: + + stmt = text("SELECT id, name FROM user WHERE name=:name " + "AND timestamp=:timestamp") + stmt = stmt.bindparams( + bindparam('name', type_=String), + bindparam('timestamp', type_=DateTime) + ) + stmt = stmt.bindparams( + name='jack', + timestamp=datetime.datetime(2012, 10, 8, 15, 12, 5) + ) + + + .. versionadded:: 0.9.0 The :meth:`.TextClause.bindparams` method + supersedes the argument ``bindparams`` passed to + :func:`~.expression.text`. + + + """ + self._bindparams = new_params = self._bindparams.copy() + + for bind in binds: + try: + existing = new_params[bind.key] + except KeyError: + raise exc.ArgumentError( + "This text() construct doesn't define a " + "bound parameter named %r" % bind.key) + else: + new_params[existing.key] = bind + + for key, value in names_to_values.items(): + try: + existing = new_params[key] + except KeyError: + raise exc.ArgumentError( + "This text() construct doesn't define a " + "bound parameter named %r" % key) + else: + new_params[key] = existing._with_value(value) + + @util.dependencies('sqlalchemy.sql.selectable') + def columns(self, selectable, *cols, **types): + """Turn this :class:`.TextClause` object into a :class:`.TextAsFrom` + object that can be embedded into another statement. + + This function essentially bridges the gap between an entirely + textual SELECT statement and the SQL expression language concept + of a "selectable":: + + from sqlalchemy.sql import column, text + + stmt = text("SELECT id, name FROM some_table") + stmt = stmt.columns(column('id'), column('name')).alias('st') + + stmt = select([mytable]).\\ + select_from( + mytable.join(stmt, mytable.c.name == stmt.c.name) + ).where(stmt.c.id > 5) + + Above, we used untyped :func:`.column` elements. These can also have + types specified, which will impact how the column behaves in + expressions as well as determining result set behavior:: + + stmt = text("SELECT id, name, timestamp FROM some_table") + stmt = stmt.columns( + column('id', Integer), + column('name', Unicode), + column('timestamp', DateTime) + ) + + for id, name, timestamp in connection.execute(stmt): + print(id, name, timestamp) + + Keyword arguments allow just the names and types of columns to be + specified, where the :func:`.column` elements will be generated + automatically:: + + stmt = text("SELECT id, name, timestamp FROM some_table") + stmt = stmt.columns( + id=Integer, + name=Unicode, + timestamp=DateTime + ) + + for id, name, timestamp in connection.execute(stmt): + print(id, name, timestamp) + + The :meth:`.TextClause.columns` method provides a direct + route to calling :meth:`.FromClause.alias` as well as + :meth:`.SelectBase.cte` against a textual SELECT statement:: + + stmt = stmt.columns(id=Integer, name=String).cte('st') + + stmt = select([sometable]).where(sometable.c.id == stmt.c.id) + + .. versionadded:: 0.9.0 :func:`.text` can now be converted into a + fully featured "selectable" construct using the + :meth:`.TextClause.columns` method. This method supersedes the + ``typemap`` argument to :func:`.text`. + + """ + + input_cols = [ + ColumnClause(col.key, types.pop(col.key)) + if col.key in types + else col + for col in cols + ] + [ColumnClause(key, type_) for key, type_ in types.items()] + return selectable.TextAsFrom(self, input_cols) + + @property + def type(self): + return type_api.NULLTYPE + + @property + def comparator(self): + return self.type.comparator_factory(self) + + def self_group(self, against=None): + if against is operators.in_op: + return Grouping(self) + else: + return self + + def _copy_internals(self, clone=_clone, **kw): + self._bindparams = dict((b.key, clone(b, **kw)) + for b in self._bindparams.values()) + + def get_children(self, **kwargs): + return list(self._bindparams.values()) + + def compare(self, other): + return isinstance(other, TextClause) and other.text == self.text + + +class Null(ColumnElement): + """Represent the NULL keyword in a SQL statement. + + :class:`.Null` is accessed as a constant via the + :func:`.null` function. + + """ + + __visit_name__ = 'null' + + @util.memoized_property + def type(self): + return type_api.NULLTYPE + + @classmethod + def _instance(cls): + """Return a constant :class:`.Null` construct.""" + + return Null() + + def compare(self, other): + return isinstance(other, Null) + + +class False_(ColumnElement): + """Represent the ``false`` keyword, or equivalent, in a SQL statement. + + :class:`.False_` is accessed as a constant via the + :func:`.false` function. + + """ + + __visit_name__ = 'false' + + @util.memoized_property + def type(self): + return type_api.BOOLEANTYPE + + def _negate(self): + return True_() + + @classmethod + def _instance(cls): + """Return a :class:`.False_` construct. + + E.g.:: + + >>> from sqlalchemy import false + >>> print select([t.c.x]).where(false()) + SELECT x FROM t WHERE false + + A backend which does not support true/false constants will render as + an expression against 1 or 0:: + + >>> print select([t.c.x]).where(false()) + SELECT x FROM t WHERE 0 = 1 + + The :func:`.true` and :func:`.false` constants also feature + "short circuit" operation within an :func:`.and_` or :func:`.or_` + conjunction:: + + >>> print select([t.c.x]).where(or_(t.c.x > 5, true())) + SELECT x FROM t WHERE true + + >>> print select([t.c.x]).where(and_(t.c.x > 5, false())) + SELECT x FROM t WHERE false + + .. versionchanged:: 0.9 :func:`.true` and :func:`.false` feature + better integrated behavior within conjunctions and on dialects + that don't support true/false constants. + + .. seealso:: + + :func:`.true` + + """ + + return False_() + + def compare(self, other): + return isinstance(other, False_) + + +class True_(ColumnElement): + """Represent the ``true`` keyword, or equivalent, in a SQL statement. + + :class:`.True_` is accessed as a constant via the + :func:`.true` function. + + """ + + __visit_name__ = 'true' + + @util.memoized_property + def type(self): + return type_api.BOOLEANTYPE + + def _negate(self): + return False_() + + @classmethod + def _ifnone(cls, other): + if other is None: + return cls._instance() + else: + return other + + @classmethod + def _instance(cls): + """Return a constant :class:`.True_` construct. + + E.g.:: + + >>> from sqlalchemy import true + >>> print select([t.c.x]).where(true()) + SELECT x FROM t WHERE true + + A backend which does not support true/false constants will render as + an expression against 1 or 0:: + + >>> print select([t.c.x]).where(true()) + SELECT x FROM t WHERE 1 = 1 + + The :func:`.true` and :func:`.false` constants also feature + "short circuit" operation within an :func:`.and_` or :func:`.or_` + conjunction:: + + >>> print select([t.c.x]).where(or_(t.c.x > 5, true())) + SELECT x FROM t WHERE true + + >>> print select([t.c.x]).where(and_(t.c.x > 5, false())) + SELECT x FROM t WHERE false + + .. versionchanged:: 0.9 :func:`.true` and :func:`.false` feature + better integrated behavior within conjunctions and on dialects + that don't support true/false constants. + + .. seealso:: + + :func:`.false` + + """ + + return True_() + + def compare(self, other): + return isinstance(other, True_) + + +class ClauseList(ClauseElement): + """Describe a list of clauses, separated by an operator. + + By default, is comma-separated, such as a column listing. + + """ + __visit_name__ = 'clauselist' + + def __init__(self, *clauses, **kwargs): + self.operator = kwargs.pop('operator', operators.comma_op) + self.group = kwargs.pop('group', True) + self.group_contents = kwargs.pop('group_contents', True) + text_converter = kwargs.pop( + '_literal_as_text', + _expression_literal_as_text) + if self.group_contents: + self.clauses = [ + text_converter(clause).self_group(against=self.operator) + for clause in clauses] + else: + self.clauses = [ + text_converter(clause) + for clause in clauses] + + def __iter__(self): + return iter(self.clauses) + + def __len__(self): + return len(self.clauses) + + @property + def _select_iterable(self): + return iter(self) + + def append(self, clause): + if self.group_contents: + self.clauses.append(_literal_as_text(clause). + self_group(against=self.operator)) + else: + self.clauses.append(_literal_as_text(clause)) + + def _copy_internals(self, clone=_clone, **kw): + self.clauses = [clone(clause, **kw) for clause in self.clauses] + + def get_children(self, **kwargs): + return self.clauses + + @property + def _from_objects(self): + return list(itertools.chain(*[c._from_objects for c in self.clauses])) + + def self_group(self, against=None): + if self.group and operators.is_precedent(self.operator, against): + return Grouping(self) + else: + return self + + def compare(self, other, **kw): + """Compare this :class:`.ClauseList` to the given :class:`.ClauseList`, + including a comparison of all the clause items. + + """ + if not isinstance(other, ClauseList) and len(self.clauses) == 1: + return self.clauses[0].compare(other, **kw) + elif isinstance(other, ClauseList) and \ + len(self.clauses) == len(other.clauses): + for i in range(0, len(self.clauses)): + if not self.clauses[i].compare(other.clauses[i], **kw): + return False + else: + return self.operator == other.operator + else: + return False + + +class BooleanClauseList(ClauseList, ColumnElement): + __visit_name__ = 'clauselist' + + def __init__(self, *arg, **kw): + raise NotImplementedError( + "BooleanClauseList has a private constructor") + + @classmethod + def _construct(cls, operator, continue_on, skip_on, *clauses, **kw): + convert_clauses = [] + + clauses = [ + _expression_literal_as_text(clause) + for clause in + util.coerce_generator_arg(clauses) + ] + for clause in clauses: + + if isinstance(clause, continue_on): + continue + elif isinstance(clause, skip_on): + return clause.self_group(against=operators._asbool) + + convert_clauses.append(clause) + + if len(convert_clauses) == 1: + return convert_clauses[0].self_group(against=operators._asbool) + elif not convert_clauses and clauses: + return clauses[0].self_group(against=operators._asbool) + + convert_clauses = [c.self_group(against=operator) + for c in convert_clauses] + + self = cls.__new__(cls) + self.clauses = convert_clauses + self.group = True + self.operator = operator + self.group_contents = True + self.type = type_api.BOOLEANTYPE + return self + + @classmethod + def and_(cls, *clauses): + """Produce a conjunction of expressions joined by ``AND``. + + E.g.:: + + from sqlalchemy import and_ + + stmt = select([users_table]).where( + and_( + users_table.c.name == 'wendy', + users_table.c.enrolled == True + ) + ) + + The :func:`.and_` conjunction is also available using the + Python ``&`` operator (though note that compound expressions + need to be parenthesized in order to function with Python + operator precedence behavior):: + + stmt = select([users_table]).where( + (users_table.c.name == 'wendy') & + (users_table.c.enrolled == True) + ) + + The :func:`.and_` operation is also implicit in some cases; + the :meth:`.Select.where` method for example can be invoked multiple + times against a statement, which will have the effect of each + clause being combined using :func:`.and_`:: + + stmt = select([users_table]).\\ + where(users_table.c.name == 'wendy').\\ + where(users_table.c.enrolled == True) + + .. seealso:: + + :func:`.or_` + + """ + return cls._construct(operators.and_, True_, False_, *clauses) + + @classmethod + def or_(cls, *clauses): + """Produce a conjunction of expressions joined by ``OR``. + + E.g.:: + + from sqlalchemy import or_ + + stmt = select([users_table]).where( + or_( + users_table.c.name == 'wendy', + users_table.c.name == 'jack' + ) + ) + + The :func:`.or_` conjunction is also available using the + Python ``|`` operator (though note that compound expressions + need to be parenthesized in order to function with Python + operator precedence behavior):: + + stmt = select([users_table]).where( + (users_table.c.name == 'wendy') | + (users_table.c.name == 'jack') + ) + + .. seealso:: + + :func:`.and_` + + """ + return cls._construct(operators.or_, False_, True_, *clauses) + + @property + def _select_iterable(self): + return (self, ) + + def self_group(self, against=None): + if not self.clauses: + return self + else: + return super(BooleanClauseList, self).self_group(against=against) + + def _negate(self): + return ClauseList._negate(self) + + +and_ = BooleanClauseList.and_ +or_ = BooleanClauseList.or_ + + +class Tuple(ClauseList, ColumnElement): + """Represent a SQL tuple.""" + + def __init__(self, *clauses, **kw): + """Return a :class:`.Tuple`. + + Main usage is to produce a composite IN construct:: + + from sqlalchemy import tuple_ + + tuple_(table.c.col1, table.c.col2).in_( + [(1, 2), (5, 12), (10, 19)] + ) + + .. warning:: + + The composite IN construct is not supported by all backends, + and is currently known to work on Postgresql and MySQL, + but not SQLite. Unsupported backends will raise + a subclass of :class:`~sqlalchemy.exc.DBAPIError` when such + an expression is invoked. + + """ + + clauses = [_literal_as_binds(c) for c in clauses] + self._type_tuple = [arg.type for arg in clauses] + self.type = kw.pop('type_', self._type_tuple[0] + if self._type_tuple else type_api.NULLTYPE) + + super(Tuple, self).__init__(*clauses, **kw) + + @property + def _select_iterable(self): + return (self, ) + + def _bind_param(self, operator, obj): + return Tuple(*[ + BindParameter(None, o, _compared_to_operator=operator, + _compared_to_type=type_, unique=True) + for o, type_ in zip(obj, self._type_tuple) + ]).self_group() + + +class Case(ColumnElement): + """Represent a ``CASE`` expression. + + :class:`.Case` is produced using the :func:`.case` factory function, + as in:: + + from sqlalchemy import case + + stmt = select([users_table]).\\ + where( + case( + [ + (users_table.c.name == 'wendy', 'W'), + (users_table.c.name == 'jack', 'J') + ], + else_='E' + ) + ) + + Details on :class:`.Case` usage is at :func:`.case`. + + .. seealso:: + + :func:`.case` + + """ + + __visit_name__ = 'case' + + def __init__(self, whens, value=None, else_=None): + """Produce a ``CASE`` expression. + + The ``CASE`` construct in SQL is a conditional object that + acts somewhat analogously to an "if/then" construct in other + languages. It returns an instance of :class:`.Case`. + + :func:`.case` in its usual form is passed a list of "when" + constructs, that is, a list of conditions and results as tuples:: + + from sqlalchemy import case + + stmt = select([users_table]).\\ + where( + case( + [ + (users_table.c.name == 'wendy', 'W'), + (users_table.c.name == 'jack', 'J') + ], + else_='E' + ) + ) + + The above statement will produce SQL resembling:: + + SELECT id, name FROM user + WHERE CASE + WHEN (name = :name_1) THEN :param_1 + WHEN (name = :name_2) THEN :param_2 + ELSE :param_3 + END + + When simple equality expressions of several values against a single + parent column are needed, :func:`.case` also has a "shorthand" format + used via the + :paramref:`.case.value` parameter, which is passed a column + expression to be compared. In this form, the :paramref:`.case.whens` + parameter is passed as a dictionary containing expressions to be + compared against keyed to result expressions. The statement below is + equivalent to the preceding statement:: + + stmt = select([users_table]).\\ + where( + case( + {"wendy": "W", "jack": "J"}, + value=users_table.c.name, + else_='E' + ) + ) + + The values which are accepted as result values in + :paramref:`.case.whens` as well as with :paramref:`.case.else_` are + coerced from Python literals into :func:`.bindparam` constructs. + SQL expressions, e.g. :class:`.ColumnElement` constructs, are accepted + as well. To coerce a literal string expression into a constant + expression rendered inline, use the :func:`.literal_column` construct, + as in:: + + from sqlalchemy import case, literal_column + + case( + [ + ( + orderline.c.qty > 100, + literal_column("'greaterthan100'") + ), + ( + orderline.c.qty > 10, + literal_column("'greaterthan10'") + ) + ], + else_=literal_column("'lessthan10'") + ) + + The above will render the given constants without using bound + parameters for the result values (but still for the comparison + values), as in:: + + CASE + WHEN (orderline.qty > :qty_1) THEN 'greaterthan100' + WHEN (orderline.qty > :qty_2) THEN 'greaterthan10' + ELSE 'lessthan10' + END + + :param whens: The criteria to be compared against, + :paramref:`.case.whens` accepts two different forms, based on + whether or not :paramref:`.case.value` is used. + + In the first form, it accepts a list of 2-tuples; each 2-tuple + consists of ``(, )``, where the SQL + expression is a boolean expression and "value" is a resulting value, + e.g.:: + + case([ + (users_table.c.name == 'wendy', 'W'), + (users_table.c.name == 'jack', 'J') + ]) + + In the second form, it accepts a Python dictionary of comparison + values mapped to a resulting value; this form requires + :paramref:`.case.value` to be present, and values will be compared + using the ``==`` operator, e.g.:: + + case( + {"wendy": "W", "jack": "J"}, + value=users_table.c.name + ) + + :param value: An optional SQL expression which will be used as a + fixed "comparison point" for candidate values within a dictionary + passed to :paramref:`.case.whens`. + + :param else\_: An optional SQL expression which will be the evaluated + result of the ``CASE`` construct if all expressions within + :paramref:`.case.whens` evaluate to false. When omitted, most + databases will produce a result of NULL if none of the "when" + expressions evaluate to true. + + + """ + + try: + whens = util.dictlike_iteritems(whens) + except TypeError: + pass + + if value is not None: + whenlist = [ + (_literal_as_binds(c).self_group(), + _literal_as_binds(r)) for (c, r) in whens + ] + else: + whenlist = [ + (_no_literals(c).self_group(), + _literal_as_binds(r)) for (c, r) in whens + ] + + if whenlist: + type_ = list(whenlist[-1])[-1].type + else: + type_ = None + + if value is None: + self.value = None + else: + self.value = _literal_as_binds(value) + + self.type = type_ + self.whens = whenlist + if else_ is not None: + self.else_ = _literal_as_binds(else_) + else: + self.else_ = None + + def _copy_internals(self, clone=_clone, **kw): + if self.value is not None: + self.value = clone(self.value, **kw) + self.whens = [(clone(x, **kw), clone(y, **kw)) + for x, y in self.whens] + if self.else_ is not None: + self.else_ = clone(self.else_, **kw) + + def get_children(self, **kwargs): + if self.value is not None: + yield self.value + for x, y in self.whens: + yield x + yield y + if self.else_ is not None: + yield self.else_ + + @property + def _from_objects(self): + return list(itertools.chain(*[x._from_objects for x in + self.get_children()])) + + +def literal_column(text, type_=None): + """Produce a :class:`.ColumnClause` object that has the + :paramref:`.column.is_literal` flag set to True. + + :func:`.literal_column` is similar to :func:`.column`, except that + it is more often used as a "standalone" column expression that renders + exactly as stated; while :func:`.column` stores a string name that + will be assumed to be part of a table and may be quoted as such, + :func:`.literal_column` can be that, or any other arbitrary column-oriented + expression. + + :param text: the text of the expression; can be any SQL expression. + Quoting rules will not be applied. To specify a column-name expression + which should be subject to quoting rules, use the :func:`column` + function. + + :param type\_: an optional :class:`~sqlalchemy.types.TypeEngine` + object which will + provide result-set translation and additional expression semantics for + this column. If left as None the type will be NullType. + + .. seealso:: + + :func:`.column` + + :func:`.text` + + :ref:`sqlexpression_literal_column` + + """ + return ColumnClause(text, type_=type_, is_literal=True) + + +class Cast(ColumnElement): + """Represent a ``CAST`` expression. + + :class:`.Cast` is produced using the :func:`.cast` factory function, + as in:: + + from sqlalchemy import cast, Numeric + + stmt = select([ + cast(product_table.c.unit_price, Numeric(10, 4)) + ]) + + Details on :class:`.Cast` usage is at :func:`.cast`. + + .. seealso:: + + :func:`.cast` + + """ + + __visit_name__ = 'cast' + + def __init__(self, expression, type_): + """Produce a ``CAST`` expression. + + :func:`.cast` returns an instance of :class:`.Cast`. + + E.g.:: + + from sqlalchemy import cast, Numeric + + stmt = select([ + cast(product_table.c.unit_price, Numeric(10, 4)) + ]) + + The above statement will produce SQL resembling:: + + SELECT CAST(unit_price AS NUMERIC(10, 4)) FROM product + + The :func:`.cast` function performs two distinct functions when + used. The first is that it renders the ``CAST`` expression within + the resulting SQL string. The second is that it associates the given + type (e.g. :class:`.TypeEngine` class or instance) with the column + expression on the Python side, which means the expression will take + on the expression operator behavior associated with that type, + as well as the bound-value handling and result-row-handling behavior + of the type. + + .. versionchanged:: 0.9.0 :func:`.cast` now applies the given type + to the expression such that it takes effect on the bound-value, + e.g. the Python-to-database direction, in addition to the + result handling, e.g. database-to-Python, direction. + + An alternative to :func:`.cast` is the :func:`.type_coerce` function. + This function performs the second task of associating an expression + with a specific type, but does not render the ``CAST`` expression + in SQL. + + :param expression: A SQL expression, such as a :class:`.ColumnElement` + expression or a Python string which will be coerced into a bound + literal value. + + :param type_: A :class:`.TypeEngine` class or instance indicating + the type to which the ``CAST`` should apply. + + .. seealso:: + + :func:`.type_coerce` - Python-side type coercion without emitting + CAST. + + """ + self.type = type_api.to_instance(type_) + self.clause = _literal_as_binds(expression, type_=self.type) + self.typeclause = TypeClause(self.type) + + def _copy_internals(self, clone=_clone, **kw): + self.clause = clone(self.clause, **kw) + self.typeclause = clone(self.typeclause, **kw) + + def get_children(self, **kwargs): + return self.clause, self.typeclause + + @property + def _from_objects(self): + return self.clause._from_objects + + +class Extract(ColumnElement): + """Represent a SQL EXTRACT clause, ``extract(field FROM expr)``.""" + + __visit_name__ = 'extract' + + def __init__(self, field, expr, **kwargs): + """Return a :class:`.Extract` construct. + + This is typically available as :func:`.extract` + as well as ``func.extract`` from the + :data:`.func` namespace. + + """ + self.type = type_api.INTEGERTYPE + self.field = field + self.expr = _literal_as_binds(expr, None) + + def _copy_internals(self, clone=_clone, **kw): + self.expr = clone(self.expr, **kw) + + def get_children(self, **kwargs): + return self.expr, + + @property + def _from_objects(self): + return self.expr._from_objects + + +class _label_reference(ColumnElement): + """Wrap a column expression as it appears in a 'reference' context. + + This expression is any that inclues an _order_by_label_element, + which is a Label, or a DESC / ASC construct wrapping a Label. + + The production of _label_reference() should occur when an expression + is added to this context; this includes the ORDER BY or GROUP BY of a + SELECT statement, as well as a few other places, such as the ORDER BY + within an OVER clause. + + """ + __visit_name__ = 'label_reference' + + def __init__(self, element): + self.element = element + + def _copy_internals(self, clone=_clone, **kw): + self.element = clone(self.element, **kw) + + @property + def _from_objects(self): + return () + + +class _textual_label_reference(ColumnElement): + __visit_name__ = 'textual_label_reference' + + def __init__(self, element): + self.element = element + + @util.memoized_property + def _text_clause(self): + return TextClause._create_text(self.element) + + +class UnaryExpression(ColumnElement): + """Define a 'unary' expression. + + A unary expression has a single column expression + and an operator. The operator can be placed on the left + (where it is called the 'operator') or right (where it is called the + 'modifier') of the column expression. + + :class:`.UnaryExpression` is the basis for several unary operators + including those used by :func:`.desc`, :func:`.asc`, :func:`.distinct`, + :func:`.nullsfirst` and :func:`.nullslast`. + + """ + __visit_name__ = 'unary' + + def __init__(self, element, operator=None, modifier=None, + type_=None, negate=None, wraps_column_expression=False): + self.operator = operator + self.modifier = modifier + self.element = element.self_group( + against=self.operator or self.modifier) + self.type = type_api.to_instance(type_) + self.negate = negate + self.wraps_column_expression = wraps_column_expression + + @classmethod + def _create_nullsfirst(cls, column): + """Produce the ``NULLS FIRST`` modifier for an ``ORDER BY`` expression. + + :func:`.nullsfirst` is intended to modify the expression produced + by :func:`.asc` or :func:`.desc`, and indicates how NULL values + should be handled when they are encountered during ordering:: + + + from sqlalchemy import desc, nullsfirst + + stmt = select([users_table]).\\ + order_by(nullsfirst(desc(users_table.c.name))) + + The SQL expression from the above would resemble:: + + SELECT id, name FROM user ORDER BY name DESC NULLS FIRST + + Like :func:`.asc` and :func:`.desc`, :func:`.nullsfirst` is typically + invoked from the column expression itself using + :meth:`.ColumnElement.nullsfirst`, rather than as its standalone + function version, as in:: + + stmt = (select([users_table]). + order_by(users_table.c.name.desc().nullsfirst()) + ) + + .. seealso:: + + :func:`.asc` + + :func:`.desc` + + :func:`.nullslast` + + :meth:`.Select.order_by` + + """ + return UnaryExpression( + _literal_as_label_reference(column), + modifier=operators.nullsfirst_op, + wraps_column_expression=False) + + @classmethod + def _create_nullslast(cls, column): + """Produce the ``NULLS LAST`` modifier for an ``ORDER BY`` expression. + + :func:`.nullslast` is intended to modify the expression produced + by :func:`.asc` or :func:`.desc`, and indicates how NULL values + should be handled when they are encountered during ordering:: + + + from sqlalchemy import desc, nullslast + + stmt = select([users_table]).\\ + order_by(nullslast(desc(users_table.c.name))) + + The SQL expression from the above would resemble:: + + SELECT id, name FROM user ORDER BY name DESC NULLS LAST + + Like :func:`.asc` and :func:`.desc`, :func:`.nullslast` is typically + invoked from the column expression itself using + :meth:`.ColumnElement.nullslast`, rather than as its standalone + function version, as in:: + + stmt = select([users_table]).\\ + order_by(users_table.c.name.desc().nullslast()) + + .. seealso:: + + :func:`.asc` + + :func:`.desc` + + :func:`.nullsfirst` + + :meth:`.Select.order_by` + + """ + return UnaryExpression( + _literal_as_label_reference(column), + modifier=operators.nullslast_op, + wraps_column_expression=False) + + @classmethod + def _create_desc(cls, column): + """Produce a descending ``ORDER BY`` clause element. + + e.g.:: + + from sqlalchemy import desc + + stmt = select([users_table]).order_by(desc(users_table.c.name)) + + will produce SQL as:: + + SELECT id, name FROM user ORDER BY name DESC + + The :func:`.desc` function is a standalone version of the + :meth:`.ColumnElement.desc` method available on all SQL expressions, + e.g.:: + + + stmt = select([users_table]).order_by(users_table.c.name.desc()) + + :param column: A :class:`.ColumnElement` (e.g. scalar SQL expression) + with which to apply the :func:`.desc` operation. + + .. seealso:: + + :func:`.asc` + + :func:`.nullsfirst` + + :func:`.nullslast` + + :meth:`.Select.order_by` + + """ + return UnaryExpression( + _literal_as_label_reference(column), + modifier=operators.desc_op, + wraps_column_expression=False) + + @classmethod + def _create_asc(cls, column): + """Produce an ascending ``ORDER BY`` clause element. + + e.g.:: + + from sqlalchemy import asc + stmt = select([users_table]).order_by(asc(users_table.c.name)) + + will produce SQL as:: + + SELECT id, name FROM user ORDER BY name ASC + + The :func:`.asc` function is a standalone version of the + :meth:`.ColumnElement.asc` method available on all SQL expressions, + e.g.:: + + + stmt = select([users_table]).order_by(users_table.c.name.asc()) + + :param column: A :class:`.ColumnElement` (e.g. scalar SQL expression) + with which to apply the :func:`.asc` operation. + + .. seealso:: + + :func:`.desc` + + :func:`.nullsfirst` + + :func:`.nullslast` + + :meth:`.Select.order_by` + + """ + return UnaryExpression( + _literal_as_label_reference(column), + modifier=operators.asc_op, + wraps_column_expression=False) + + @classmethod + def _create_distinct(cls, expr): + """Produce an column-expression-level unary ``DISTINCT`` clause. + + This applies the ``DISTINCT`` keyword to an individual column + expression, and is typically contained within an aggregate function, + as in:: + + from sqlalchemy import distinct, func + stmt = select([func.count(distinct(users_table.c.name))]) + + The above would produce an expression resembling:: + + SELECT COUNT(DISTINCT name) FROM user + + The :func:`.distinct` function is also available as a column-level + method, e.g. :meth:`.ColumnElement.distinct`, as in:: + + stmt = select([func.count(users_table.c.name.distinct())]) + + The :func:`.distinct` operator is different from the + :meth:`.Select.distinct` method of :class:`.Select`, + which produces a ``SELECT`` statement + with ``DISTINCT`` applied to the result set as a whole, + e.g. a ``SELECT DISTINCT`` expression. See that method for further + information. + + .. seealso:: + + :meth:`.ColumnElement.distinct` + + :meth:`.Select.distinct` + + :data:`.func` + + """ + expr = _literal_as_binds(expr) + return UnaryExpression( + expr, operator=operators.distinct_op, + type_=expr.type, wraps_column_expression=False) + + @property + def _order_by_label_element(self): + if self.modifier in (operators.desc_op, operators.asc_op): + return self.element._order_by_label_element + else: + return None + + @property + def _from_objects(self): + return self.element._from_objects + + def _copy_internals(self, clone=_clone, **kw): + self.element = clone(self.element, **kw) + + def get_children(self, **kwargs): + return self.element, + + def compare(self, other, **kw): + """Compare this :class:`UnaryExpression` against the given + :class:`.ClauseElement`.""" + + return ( + isinstance(other, UnaryExpression) and + self.operator == other.operator and + self.modifier == other.modifier and + self.element.compare(other.element, **kw) + ) + + def _negate(self): + if self.negate is not None: + return UnaryExpression( + self.element, + operator=self.negate, + negate=self.operator, + modifier=self.modifier, + type_=self.type, + wraps_column_expression=self.wraps_column_expression) + else: + return ClauseElement._negate(self) + + def self_group(self, against=None): + if self.operator and operators.is_precedent(self.operator, against): + return Grouping(self) + else: + return self + + +class AsBoolean(UnaryExpression): + + def __init__(self, element, operator, negate): + self.element = element + self.type = type_api.BOOLEANTYPE + self.operator = operator + self.negate = negate + self.modifier = None + self.wraps_column_expression = True + + def self_group(self, against=None): + return self + + def _negate(self): + return self.element._negate() + + +class BinaryExpression(ColumnElement): + """Represent an expression that is ``LEFT RIGHT``. + + A :class:`.BinaryExpression` is generated automatically + whenever two column expressions are used in a Python binary expression:: + + >>> from sqlalchemy.sql import column + >>> column('a') + column('b') + + >>> print column('a') + column('b') + a + b + + """ + + __visit_name__ = 'binary' + + def __init__(self, left, right, operator, type_=None, + negate=None, modifiers=None): + # allow compatibility with libraries that + # refer to BinaryExpression directly and pass strings + if isinstance(operator, util.string_types): + operator = operators.custom_op(operator) + self._orig = (left, right) + self.left = left.self_group(against=operator) + self.right = right.self_group(against=operator) + self.operator = operator + self.type = type_api.to_instance(type_) + self.negate = negate + + if modifiers is None: + self.modifiers = {} + else: + self.modifiers = modifiers + + def __bool__(self): + if self.operator in (operator.eq, operator.ne): + return self.operator(hash(self._orig[0]), hash(self._orig[1])) + else: + raise TypeError("Boolean value of this clause is not defined") + + __nonzero__ = __bool__ + + @property + def is_comparison(self): + return operators.is_comparison(self.operator) + + @property + def _from_objects(self): + return self.left._from_objects + self.right._from_objects + + def _copy_internals(self, clone=_clone, **kw): + self.left = clone(self.left, **kw) + self.right = clone(self.right, **kw) + + def get_children(self, **kwargs): + return self.left, self.right + + def compare(self, other, **kw): + """Compare this :class:`BinaryExpression` against the + given :class:`BinaryExpression`.""" + + return ( + isinstance(other, BinaryExpression) and + self.operator == other.operator and + ( + self.left.compare(other.left, **kw) and + self.right.compare(other.right, **kw) or + ( + operators.is_commutative(self.operator) and + self.left.compare(other.right, **kw) and + self.right.compare(other.left, **kw) + ) + ) + ) + + def self_group(self, against=None): + if operators.is_precedent(self.operator, against): + return Grouping(self) + else: + return self + + def _negate(self): + if self.negate is not None: + return BinaryExpression( + self.left, + self.right, + self.negate, + negate=self.operator, + type_=self.type, + modifiers=self.modifiers) + else: + return super(BinaryExpression, self)._negate() + + +class Grouping(ColumnElement): + """Represent a grouping within a column expression""" + + __visit_name__ = 'grouping' + + def __init__(self, element): + self.element = element + self.type = getattr(element, 'type', type_api.NULLTYPE) + + def self_group(self, against=None): + return self + + @property + def _key_label(self): + return self._label + + @property + def _label(self): + return getattr(self.element, '_label', None) or self.anon_label + + def _copy_internals(self, clone=_clone, **kw): + self.element = clone(self.element, **kw) + + def get_children(self, **kwargs): + return self.element, + + @property + def _from_objects(self): + return self.element._from_objects + + def __getattr__(self, attr): + return getattr(self.element, attr) + + def __getstate__(self): + return {'element': self.element, 'type': self.type} + + def __setstate__(self, state): + self.element = state['element'] + self.type = state['type'] + + def compare(self, other, **kw): + return isinstance(other, Grouping) and \ + self.element.compare(other.element) + + +class Over(ColumnElement): + """Represent an OVER clause. + + This is a special operator against a so-called + "window" function, as well as any aggregate function, + which produces results relative to the result set + itself. It's supported only by certain database + backends. + + """ + __visit_name__ = 'over' + + order_by = None + partition_by = None + + def __init__(self, func, partition_by=None, order_by=None): + """Produce an :class:`.Over` object against a function. + + Used against aggregate or so-called "window" functions, + for database backends that support window functions. + + E.g.:: + + from sqlalchemy import over + over(func.row_number(), order_by='x') + + Would produce "ROW_NUMBER() OVER(ORDER BY x)". + + :param func: a :class:`.FunctionElement` construct, typically + generated by :data:`~.expression.func`. + :param partition_by: a column element or string, or a list + of such, that will be used as the PARTITION BY clause + of the OVER construct. + :param order_by: a column element or string, or a list + of such, that will be used as the ORDER BY clause + of the OVER construct. + + This function is also available from the :data:`~.expression.func` + construct itself via the :meth:`.FunctionElement.over` method. + + .. versionadded:: 0.7 + + """ + self.func = func + if order_by is not None: + self.order_by = ClauseList( + *util.to_list(order_by), + _literal_as_text=_literal_as_label_reference) + if partition_by is not None: + self.partition_by = ClauseList( + *util.to_list(partition_by), + _literal_as_text=_literal_as_label_reference) + + @util.memoized_property + def type(self): + return self.func.type + + def get_children(self, **kwargs): + return [c for c in + (self.func, self.partition_by, self.order_by) + if c is not None] + + def _copy_internals(self, clone=_clone, **kw): + self.func = clone(self.func, **kw) + if self.partition_by is not None: + self.partition_by = clone(self.partition_by, **kw) + if self.order_by is not None: + self.order_by = clone(self.order_by, **kw) + + @property + def _from_objects(self): + return list(itertools.chain( + *[c._from_objects for c in + (self.func, self.partition_by, self.order_by) + if c is not None] + )) + + +class FunctionFilter(ColumnElement): + """Represent a function FILTER clause. + + This is a special operator against aggregate and window functions, + which controls which rows are passed to it. + It's supported only by certain database backends. + + Invocation of :class:`.FunctionFilter` is via + :meth:`.FunctionElement.filter`:: + + func.count(1).filter(True) + + .. versionadded:: 1.0.0 + + .. seealso:: + + :meth:`.FunctionElement.filter` + + """ + __visit_name__ = 'funcfilter' + + criterion = None + + def __init__(self, func, *criterion): + """Produce a :class:`.FunctionFilter` object against a function. + + Used against aggregate and window functions, + for database backends that support the "FILTER" clause. + + E.g.:: + + from sqlalchemy import funcfilter + funcfilter(func.count(1), MyClass.name == 'some name') + + Would produce "COUNT(1) FILTER (WHERE myclass.name = 'some name')". + + This function is also available from the :data:`~.expression.func` + construct itself via the :meth:`.FunctionElement.filter` method. + + .. versionadded:: 1.0.0 + + .. seealso:: + + :meth:`.FunctionElement.filter` + + + """ + self.func = func + self.filter(*criterion) + + def filter(self, *criterion): + """Produce an additional FILTER against the function. + + This method adds additional criteria to the initial criteria + set up by :meth:`.FunctionElement.filter`. + + Multiple criteria are joined together at SQL render time + via ``AND``. + + + """ + + for criterion in list(criterion): + criterion = _expression_literal_as_text(criterion) + + if self.criterion is not None: + self.criterion = self.criterion & criterion + else: + self.criterion = criterion + + return self + + def over(self, partition_by=None, order_by=None): + """Produce an OVER clause against this filtered function. + + Used against aggregate or so-called "window" functions, + for database backends that support window functions. + + The expression:: + + func.rank().filter(MyClass.y > 5).over(order_by='x') + + is shorthand for:: + + from sqlalchemy import over, funcfilter + over(funcfilter(func.rank(), MyClass.y > 5), order_by='x') + + See :func:`~.expression.over` for a full description. + + """ + return Over(self, partition_by=partition_by, order_by=order_by) + + @util.memoized_property + def type(self): + return self.func.type + + def get_children(self, **kwargs): + return [c for c in + (self.func, self.criterion) + if c is not None] + + def _copy_internals(self, clone=_clone, **kw): + self.func = clone(self.func, **kw) + if self.criterion is not None: + self.criterion = clone(self.criterion, **kw) + + @property + def _from_objects(self): + return list(itertools.chain( + *[c._from_objects for c in (self.func, self.criterion) + if c is not None] + )) + + +class Label(ColumnElement): + """Represents a column label (AS). + + Represent a label, as typically applied to any column-level + element using the ``AS`` sql keyword. + + """ + + __visit_name__ = 'label' + + def __init__(self, name, element, type_=None): + """Return a :class:`Label` object for the + given :class:`.ColumnElement`. + + A label changes the name of an element in the columns clause of a + ``SELECT`` statement, typically via the ``AS`` SQL keyword. + + This functionality is more conveniently available via the + :meth:`.ColumnElement.label` method on :class:`.ColumnElement`. + + :param name: label name + + :param obj: a :class:`.ColumnElement`. + + """ + + if isinstance(element, Label): + self._resolve_label = element._label + + while isinstance(element, Label): + element = element.element + + if name: + self.name = name + self._resolve_label = self.name + else: + self.name = _anonymous_label( + '%%(%d %s)s' % (id(self), getattr(element, 'name', 'anon')) + ) + + self.key = self._label = self._key_label = self.name + self._element = element + self._type = type_ + self._proxies = [element] + + def __reduce__(self): + return self.__class__, (self.name, self._element, self._type) + + @util.memoized_property + def _allow_label_resolve(self): + return self.element._allow_label_resolve + + @property + def _order_by_label_element(self): + return self + + @util.memoized_property + def type(self): + return type_api.to_instance( + self._type or getattr(self._element, 'type', None) + ) + + @util.memoized_property + def element(self): + return self._element.self_group(against=operators.as_) + + def self_group(self, against=None): + sub_element = self._element.self_group(against=against) + if sub_element is not self._element: + return Label(self.name, + sub_element, + type_=self._type) + else: + return self + + @property + def primary_key(self): + return self.element.primary_key + + @property + def foreign_keys(self): + return self.element.foreign_keys + + def get_children(self, **kwargs): + return self.element, + + def _copy_internals(self, clone=_clone, anonymize_labels=False, **kw): + self._element = clone(self._element, **kw) + self.__dict__.pop('element', None) + self.__dict__.pop('_allow_label_resolve', None) + if anonymize_labels: + self.name = self._resolve_label = _anonymous_label( + '%%(%d %s)s' % ( + id(self), getattr(self.element, 'name', 'anon')) + ) + self.key = self._label = self._key_label = self.name + + @property + def _from_objects(self): + return self.element._from_objects + + def _make_proxy(self, selectable, name=None, **kw): + e = self.element._make_proxy(selectable, + name=name if name else self.name) + e._proxies.append(self) + if self._type is not None: + e.type = self._type + return e + + +class ColumnClause(Immutable, ColumnElement): + """Represents a column expression from any textual string. + + The :class:`.ColumnClause`, a lightweight analogue to the + :class:`.Column` class, is typically invoked using the + :func:`.column` function, as in:: + + from sqlalchemy import column + + id, name = column("id"), column("name") + stmt = select([id, name]).select_from("user") + + The above statement would produce SQL like:: + + SELECT id, name FROM user + + :class:`.ColumnClause` is the immediate superclass of the schema-specific + :class:`.Column` object. While the :class:`.Column` class has all the + same capabilities as :class:`.ColumnClause`, the :class:`.ColumnClause` + class is usable by itself in those cases where behavioral requirements + are limited to simple SQL expression generation. The object has none of + the associations with schema-level metadata or with execution-time + behavior that :class:`.Column` does, so in that sense is a "lightweight" + version of :class:`.Column`. + + Full details on :class:`.ColumnClause` usage is at :func:`.column`. + + .. seealso:: + + :func:`.column` + + :class:`.Column` + + """ + __visit_name__ = 'column' + + onupdate = default = server_default = server_onupdate = None + + _memoized_property = util.group_expirable_memoized_property() + + def __init__(self, text, type_=None, is_literal=False, _selectable=None): + """Produce a :class:`.ColumnClause` object. + + The :class:`.ColumnClause` is a lightweight analogue to the + :class:`.Column` class. The :func:`.column` function can + be invoked with just a name alone, as in:: + + from sqlalchemy import column + + id, name = column("id"), column("name") + stmt = select([id, name]).select_from("user") + + The above statement would produce SQL like:: + + SELECT id, name FROM user + + Once constructed, :func:`.column` may be used like any other SQL + expression element such as within :func:`.select` constructs:: + + from sqlalchemy.sql import column + + id, name = column("id"), column("name") + stmt = select([id, name]).select_from("user") + + The text handled by :func:`.column` is assumed to be handled + like the name of a database column; if the string contains mixed case, + special characters, or matches a known reserved word on the target + backend, the column expression will render using the quoting + behavior determined by the backend. To produce a textual SQL + expression that is rendered exactly without any quoting, + use :func:`.literal_column` instead, or pass ``True`` as the + value of :paramref:`.column.is_literal`. Additionally, full SQL + statements are best handled using the :func:`.text` construct. + + :func:`.column` can be used in a table-like + fashion by combining it with the :func:`.table` function + (which is the lightweight analogue to :class:`.Table`) to produce + a working table construct with minimal boilerplate:: + + from sqlalchemy import table, column, select + + user = table("user", + column("id"), + column("name"), + column("description"), + ) + + stmt = select([user.c.description]).where(user.c.name == 'wendy') + + A :func:`.column` / :func:`.table` construct like that illustrated + above can be created in an + ad-hoc fashion and is not associated with any + :class:`.schema.MetaData`, DDL, or events, unlike its + :class:`.Table` counterpart. + + .. versionchanged:: 1.0.0 :func:`.expression.column` can now + be imported from the plain ``sqlalchemy`` namespace like any + other SQL element. + + :param text: the text of the element. + + :param type: :class:`.types.TypeEngine` object which can associate + this :class:`.ColumnClause` with a type. + + :param is_literal: if True, the :class:`.ColumnClause` is assumed to + be an exact expression that will be delivered to the output with no + quoting rules applied regardless of case sensitive settings. the + :func:`.literal_column()` function essentially invokes + :func:`.column` while passing ``is_literal=True``. + + .. seealso:: + + :class:`.Column` + + :func:`.literal_column` + + :func:`.table` + + :func:`.text` + + :ref:`sqlexpression_literal_column` + + """ + + self.key = self.name = text + self.table = _selectable + self.type = type_api.to_instance(type_) + self.is_literal = is_literal + + def _compare_name_for_result(self, other): + if self.is_literal or \ + self.table is None or self.table._textual or \ + not hasattr(other, 'proxy_set') or ( + isinstance(other, ColumnClause) and + (other.is_literal or + other.table is None or + other.table._textual) + ): + return (hasattr(other, 'name') and self.name == other.name) or \ + (hasattr(other, '_label') and self._label == other._label) + else: + return other.proxy_set.intersection(self.proxy_set) + + def _get_table(self): + return self.__dict__['table'] + + def _set_table(self, table): + self._memoized_property.expire_instance(self) + self.__dict__['table'] = table + table = property(_get_table, _set_table) + + @_memoized_property + def _from_objects(self): + t = self.table + if t is not None: + return [t] + else: + return [] + + @util.memoized_property + def description(self): + if util.py3k: + return self.name + else: + return self.name.encode('ascii', 'backslashreplace') + + @_memoized_property + def _key_label(self): + if self.key != self.name: + return self._gen_label(self.key) + else: + return self._label + + @_memoized_property + def _label(self): + return self._gen_label(self.name) + + @_memoized_property + def _render_label_in_columns_clause(self): + return self.table is not None + + def _gen_label(self, name): + t = self.table + + if self.is_literal: + return None + + elif t is not None and t.named_with_column: + if getattr(t, 'schema', None): + label = t.schema.replace('.', '_') + "_" + \ + t.name + "_" + name + else: + label = t.name + "_" + name + + # propagate name quoting rules for labels. + if getattr(name, "quote", None) is not None: + if isinstance(label, quoted_name): + label.quote = name.quote + else: + label = quoted_name(label, name.quote) + elif getattr(t.name, "quote", None) is not None: + # can't get this situation to occur, so let's + # assert false on it for now + assert not isinstance(label, quoted_name) + label = quoted_name(label, t.name.quote) + + # ensure the label name doesn't conflict with that + # of an existing column + if label in t.c: + _label = label + counter = 1 + while _label in t.c: + _label = label + "_" + str(counter) + counter += 1 + label = _label + + return _as_truncated(label) + + else: + return name + + def _bind_param(self, operator, obj): + return BindParameter(self.key, obj, + _compared_to_operator=operator, + _compared_to_type=self.type, + unique=True) + + def _make_proxy(self, selectable, name=None, attach=True, + name_is_truncatable=False, **kw): + # propagate the "is_literal" flag only if we are keeping our name, + # otherwise its considered to be a label + is_literal = self.is_literal and (name is None or name == self.name) + c = self._constructor( + _as_truncated(name or self.name) if + name_is_truncatable else + (name or self.name), + type_=self.type, + _selectable=selectable, + is_literal=is_literal + ) + if name is None: + c.key = self.key + c._proxies = [self] + if selectable._is_clone_of is not None: + c._is_clone_of = \ + selectable._is_clone_of.columns.get(c.key) + + if attach: + selectable._columns[c.key] = c + return c + + +class _IdentifiedClause(Executable, ClauseElement): + + __visit_name__ = 'identified' + _execution_options = \ + Executable._execution_options.union({'autocommit': False}) + + def __init__(self, ident): + self.ident = ident + + +class SavepointClause(_IdentifiedClause): + __visit_name__ = 'savepoint' + + +class RollbackToSavepointClause(_IdentifiedClause): + __visit_name__ = 'rollback_to_savepoint' + + +class ReleaseSavepointClause(_IdentifiedClause): + __visit_name__ = 'release_savepoint' + + +class quoted_name(util.MemoizedSlots, util.text_type): + """Represent a SQL identifier combined with quoting preferences. + + :class:`.quoted_name` is a Python unicode/str subclass which + represents a particular identifier name along with a + ``quote`` flag. This ``quote`` flag, when set to + ``True`` or ``False``, overrides automatic quoting behavior + for this identifier in order to either unconditionally quote + or to not quote the name. If left at its default of ``None``, + quoting behavior is applied to the identifier on a per-backend basis + based on an examination of the token itself. + + A :class:`.quoted_name` object with ``quote=True`` is also + prevented from being modified in the case of a so-called + "name normalize" option. Certain database backends, such as + Oracle, Firebird, and DB2 "normalize" case-insensitive names + as uppercase. The SQLAlchemy dialects for these backends + convert from SQLAlchemy's lower-case-means-insensitive convention + to the upper-case-means-insensitive conventions of those backends. + The ``quote=True`` flag here will prevent this conversion from occurring + to support an identifier that's quoted as all lower case against + such a backend. + + The :class:`.quoted_name` object is normally created automatically + when specifying the name for key schema constructs such as + :class:`.Table`, :class:`.Column`, and others. The class can also be + passed explicitly as the name to any function that receives a name which + can be quoted. Such as to use the :meth:`.Engine.has_table` method with + an unconditionally quoted name:: + + from sqlaclchemy import create_engine + from sqlalchemy.sql.elements import quoted_name + + engine = create_engine("oracle+cx_oracle://some_dsn") + engine.has_table(quoted_name("some_table", True)) + + The above logic will run the "has table" logic against the Oracle backend, + passing the name exactly as ``"some_table"`` without converting to + upper case. + + .. versionadded:: 0.9.0 + + """ + + __slots__ = 'quote', 'lower', 'upper' + + def __new__(cls, value, quote): + if value is None: + return None + # experimental - don't bother with quoted_name + # if quote flag is None. doesn't seem to make any dent + # in performance however + # elif not sprcls and quote is None: + # return value + elif isinstance(value, cls) and ( + quote is None or value.quote == quote + ): + return value + self = super(quoted_name, cls).__new__(cls, value) + self.quote = quote + return self + + def __reduce__(self): + return quoted_name, (util.text_type(self), self.quote) + + def _memoized_method_lower(self): + if self.quote: + return self + else: + return util.text_type(self).lower() + + def _memoized_method_upper(self): + if self.quote: + return self + else: + return util.text_type(self).upper() + + def __repr__(self): + backslashed = self.encode('ascii', 'backslashreplace') + if not util.py2k: + backslashed = backslashed.decode('ascii') + return "'%s'" % backslashed + + +class _truncated_label(quoted_name): + """A unicode subclass used to identify symbolic " + "names that may require truncation.""" + + __slots__ = () + + def __new__(cls, value, quote=None): + quote = getattr(value, "quote", quote) + # return super(_truncated_label, cls).__new__(cls, value, quote, True) + return super(_truncated_label, cls).__new__(cls, value, quote) + + def __reduce__(self): + return self.__class__, (util.text_type(self), self.quote) + + def apply_map(self, map_): + return self + + +class conv(_truncated_label): + """Mark a string indicating that a name has already been converted + by a naming convention. + + This is a string subclass that indicates a name that should not be + subject to any further naming conventions. + + E.g. when we create a :class:`.Constraint` using a naming convention + as follows:: + + m = MetaData(naming_convention={ + "ck": "ck_%(table_name)s_%(constraint_name)s" + }) + t = Table('t', m, Column('x', Integer), + CheckConstraint('x > 5', name='x5')) + + The name of the above constraint will be rendered as ``"ck_t_x5"``. + That is, the existing name ``x5`` is used in the naming convention as the + ``constraint_name`` token. + + In some situations, such as in migration scripts, we may be rendering + the above :class:`.CheckConstraint` with a name that's already been + converted. In order to make sure the name isn't double-modified, the + new name is applied using the :func:`.schema.conv` marker. We can + use this explicitly as follows:: + + + m = MetaData(naming_convention={ + "ck": "ck_%(table_name)s_%(constraint_name)s" + }) + t = Table('t', m, Column('x', Integer), + CheckConstraint('x > 5', name=conv('ck_t_x5'))) + + Where above, the :func:`.schema.conv` marker indicates that the constraint + name here is final, and the name will render as ``"ck_t_x5"`` and not + ``"ck_t_ck_t_x5"`` + + .. versionadded:: 0.9.4 + + .. seealso:: + + :ref:`constraint_naming_conventions` + + """ + __slots__ = () + + +class _defer_name(_truncated_label): + """mark a name as 'deferred' for the purposes of automated name + generation. + + """ + __slots__ = () + + def __new__(cls, value): + if value is None: + return _NONE_NAME + elif isinstance(value, conv): + return value + else: + return super(_defer_name, cls).__new__(cls, value) + + def __reduce__(self): + return self.__class__, (util.text_type(self), ) + + +class _defer_none_name(_defer_name): + """indicate a 'deferred' name that was ultimately the value None.""" + __slots__ = () + +_NONE_NAME = _defer_none_name("_unnamed_") + +# for backwards compatibility in case +# someone is re-implementing the +# _truncated_identifier() sequence in a custom +# compiler +_generated_label = _truncated_label + + +class _anonymous_label(_truncated_label): + """A unicode subclass used to identify anonymously + generated names.""" + + __slots__ = () + + def __add__(self, other): + return _anonymous_label( + quoted_name( + util.text_type.__add__(self, util.text_type(other)), + self.quote) + ) + + def __radd__(self, other): + return _anonymous_label( + quoted_name( + util.text_type.__add__(util.text_type(other), self), + self.quote) + ) + + def apply_map(self, map_): + if self.quote is not None: + # preserve quoting only if necessary + return quoted_name(self % map_, self.quote) + else: + # else skip the constructor call + return self % map_ + + +def _as_truncated(value): + """coerce the given value to :class:`._truncated_label`. + + Existing :class:`._truncated_label` and + :class:`._anonymous_label` objects are passed + unchanged. + """ + + if isinstance(value, _truncated_label): + return value + else: + return _truncated_label(value) + + +def _string_or_unprintable(element): + if isinstance(element, util.string_types): + return element + else: + try: + return str(element) + except Exception: + return "unprintable element %r" % element + + +def _expand_cloned(elements): + """expand the given set of ClauseElements to be the set of all 'cloned' + predecessors. + + """ + return itertools.chain(*[x._cloned_set for x in elements]) + + +def _select_iterables(elements): + """expand tables into individual columns in the + given list of column expressions. + + """ + return itertools.chain(*[c._select_iterable for c in elements]) + + +def _cloned_intersection(a, b): + """return the intersection of sets a and b, counting + any overlap between 'cloned' predecessors. + + The returned set is in terms of the entities present within 'a'. + + """ + all_overlap = set(_expand_cloned(a)).intersection(_expand_cloned(b)) + return set(elem for elem in a + if all_overlap.intersection(elem._cloned_set)) + + +def _cloned_difference(a, b): + all_overlap = set(_expand_cloned(a)).intersection(_expand_cloned(b)) + return set(elem for elem in a + if not all_overlap.intersection(elem._cloned_set)) + + +def _labeled(element): + if not hasattr(element, 'name'): + return element.label(None) + else: + return element + + +def _is_column(col): + """True if ``col`` is an instance of :class:`.ColumnElement`.""" + + return isinstance(col, ColumnElement) + + +def _find_columns(clause): + """locate Column objects within the given expression.""" + + cols = util.column_set() + traverse(clause, {}, {'column': cols.add}) + return cols + + +# there is some inconsistency here between the usage of +# inspect() vs. checking for Visitable and __clause_element__. +# Ideally all functions here would derive from inspect(), +# however the inspect() versions add significant callcount +# overhead for critical functions like _interpret_as_column_or_from(). +# Generally, the column-based functions are more performance critical +# and are fine just checking for __clause_element__(). It is only +# _interpret_as_from() where we'd like to be able to receive ORM entities +# that have no defined namespace, hence inspect() is needed there. + + +def _column_as_key(element): + if isinstance(element, util.string_types): + return element + if hasattr(element, '__clause_element__'): + element = element.__clause_element__() + try: + return element.key + except AttributeError: + return None + + +def _clause_element_as_expr(element): + if hasattr(element, '__clause_element__'): + return element.__clause_element__() + else: + return element + + +def _literal_as_label_reference(element): + if isinstance(element, util.string_types): + return _textual_label_reference(element) + + elif hasattr(element, '__clause_element__'): + element = element.__clause_element__() + + return _literal_as_text(element) + + +def _literal_and_labels_as_label_reference(element): + if isinstance(element, util.string_types): + return _textual_label_reference(element) + + elif hasattr(element, '__clause_element__'): + element = element.__clause_element__() + + if isinstance(element, ColumnElement) and \ + element._order_by_label_element is not None: + return _label_reference(element) + else: + return _literal_as_text(element) + + +def _expression_literal_as_text(element): + return _literal_as_text(element, warn=True) + + +def _literal_as_text(element, warn=False): + if isinstance(element, Visitable): + return element + elif hasattr(element, '__clause_element__'): + return element.__clause_element__() + elif isinstance(element, util.string_types): + if warn: + util.warn_limited( + "Textual SQL expression %(expr)r should be " + "explicitly declared as text(%(expr)r)", + {"expr": util.ellipses_string(element)}) + + return TextClause(util.text_type(element)) + elif isinstance(element, (util.NoneType, bool)): + return _const_expr(element) + else: + raise exc.ArgumentError( + "SQL expression object or string expected, got object of type %r " + "instead" % type(element) + ) + + +def _no_literals(element): + if hasattr(element, '__clause_element__'): + return element.__clause_element__() + elif not isinstance(element, Visitable): + raise exc.ArgumentError("Ambiguous literal: %r. Use the 'text()' " + "function to indicate a SQL expression " + "literal, or 'literal()' to indicate a " + "bound value." % element) + else: + return element + + +def _is_literal(element): + return not isinstance(element, Visitable) and \ + not hasattr(element, '__clause_element__') + + +def _only_column_elements_or_none(element, name): + if element is None: + return None + else: + return _only_column_elements(element, name) + + +def _only_column_elements(element, name): + if hasattr(element, '__clause_element__'): + element = element.__clause_element__() + if not isinstance(element, ColumnElement): + raise exc.ArgumentError( + "Column-based expression object expected for argument " + "'%s'; got: '%s', type %s" % (name, element, type(element))) + return element + + +def _literal_as_binds(element, name=None, type_=None): + if hasattr(element, '__clause_element__'): + return element.__clause_element__() + elif not isinstance(element, Visitable): + if element is None: + return Null() + else: + return BindParameter(name, element, type_=type_, unique=True) + else: + return element + +_guess_straight_column = re.compile(r'^\w\S*$', re.I) + + +def _interpret_as_column_or_from(element): + if isinstance(element, Visitable): + return element + elif hasattr(element, '__clause_element__'): + return element.__clause_element__() + + insp = inspection.inspect(element, raiseerr=False) + if insp is None: + if isinstance(element, (util.NoneType, bool)): + return _const_expr(element) + elif hasattr(insp, "selectable"): + return insp.selectable + + # be forgiving as this is an extremely common + # and known expression + if element == "*": + guess_is_literal = True + elif isinstance(element, (numbers.Number)): + return ColumnClause(str(element), is_literal=True) + else: + element = str(element) + # give into temptation, as this fact we are guessing about + # is not one we've previously ever needed our users tell us; + # but let them know we are not happy about it + guess_is_literal = not _guess_straight_column.match(element) + util.warn_limited( + "Textual column expression %(column)r should be " + "explicitly declared with text(%(column)r), " + "or use %(literal_column)s(%(column)r) " + "for more specificity", + { + "column": util.ellipses_string(element), + "literal_column": "literal_column" + if guess_is_literal else "column" + }) + return ColumnClause( + element, + is_literal=guess_is_literal) + + +def _const_expr(element): + if isinstance(element, (Null, False_, True_)): + return element + elif element is None: + return Null() + elif element is False: + return False_() + elif element is True: + return True_() + else: + raise exc.ArgumentError( + "Expected None, False, or True" + ) + + +def _type_from_args(args): + for a in args: + if not a.type._isnull: + return a.type + else: + return type_api.NULLTYPE + + +def _corresponding_column_or_error(fromclause, column, + require_embedded=False): + c = fromclause.corresponding_column(column, + require_embedded=require_embedded) + if c is None: + raise exc.InvalidRequestError( + "Given column '%s', attached to table '%s', " + "failed to locate a corresponding column from table '%s'" + % + (column, + getattr(column, 'table', None), + fromclause.description) + ) + return c + + +class AnnotatedColumnElement(Annotated): + def __init__(self, element, values): + Annotated.__init__(self, element, values) + ColumnElement.comparator._reset(self) + for attr in ('name', 'key', 'table'): + if self.__dict__.get(attr, False) is None: + self.__dict__.pop(attr) + + def _with_annotations(self, values): + clone = super(AnnotatedColumnElement, self)._with_annotations(values) + ColumnElement.comparator._reset(clone) + return clone + + @util.memoized_property + def name(self): + """pull 'name' from parent, if not present""" + return self._Annotated__element.name + + @util.memoized_property + def table(self): + """pull 'table' from parent, if not present""" + return self._Annotated__element.table + + @util.memoized_property + def key(self): + """pull 'key' from parent, if not present""" + return self._Annotated__element.key + + @util.memoized_property + def info(self): + return self._Annotated__element.info + + @util.memoized_property + def anon_label(self): + return self._Annotated__element.anon_label diff --git a/lib/python3.4/site-packages/sqlalchemy/sql/expression.py b/lib/python3.4/site-packages/sqlalchemy/sql/expression.py new file mode 100644 index 0000000..832779c --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/sql/expression.py @@ -0,0 +1,137 @@ +# sql/expression.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Defines the public namespace for SQL expression constructs. + +Prior to version 0.9, this module contained all of "elements", "dml", +"default_comparator" and "selectable". The module was broken up +and most "factory" functions were moved to be grouped with their associated +class. + +""" + +__all__ = [ + 'Alias', 'ClauseElement', 'ColumnCollection', 'ColumnElement', + 'CompoundSelect', 'Delete', 'FromClause', 'Insert', 'Join', 'Select', + 'Selectable', 'TableClause', 'Update', 'alias', 'and_', 'asc', 'between', + 'bindparam', 'case', 'cast', 'column', 'delete', 'desc', 'distinct', + 'except_', 'except_all', 'exists', 'extract', 'func', 'modifier', + 'collate', 'insert', 'intersect', 'intersect_all', 'join', 'label', + 'literal', 'literal_column', 'not_', 'null', 'nullsfirst', 'nullslast', + 'or_', 'outparam', 'outerjoin', 'over', 'select', 'subquery', + 'table', 'text', + 'tuple_', 'type_coerce', 'union', 'union_all', 'update'] + + +from .visitors import Visitable +from .functions import func, modifier, FunctionElement, Function +from ..util.langhelpers import public_factory +from .elements import ClauseElement, ColumnElement,\ + BindParameter, UnaryExpression, BooleanClauseList, \ + Label, Cast, Case, ColumnClause, TextClause, Over, Null, \ + True_, False_, BinaryExpression, Tuple, TypeClause, Extract, \ + Grouping, not_, \ + collate, literal_column, between,\ + literal, outparam, type_coerce, ClauseList, FunctionFilter + +from .elements import SavepointClause, RollbackToSavepointClause, \ + ReleaseSavepointClause + +from .base import ColumnCollection, Generative, Executable, \ + PARSE_AUTOCOMMIT + +from .selectable import Alias, Join, Select, Selectable, TableClause, \ + CompoundSelect, CTE, FromClause, FromGrouping, SelectBase, \ + alias, GenerativeSelect, \ + subquery, HasPrefixes, HasSuffixes, Exists, ScalarSelect, TextAsFrom + + +from .dml import Insert, Update, Delete, UpdateBase, ValuesBase + +# factory functions - these pull class-bound constructors and classmethods +# from SQL elements and selectables into public functions. This allows +# the functions to be available in the sqlalchemy.sql.* namespace and +# to be auto-cross-documenting from the function to the class itself. + +and_ = public_factory(BooleanClauseList.and_, ".expression.and_") +or_ = public_factory(BooleanClauseList.or_, ".expression.or_") +bindparam = public_factory(BindParameter, ".expression.bindparam") +select = public_factory(Select, ".expression.select") +text = public_factory(TextClause._create_text, ".expression.text") +table = public_factory(TableClause, ".expression.table") +column = public_factory(ColumnClause, ".expression.column") +over = public_factory(Over, ".expression.over") +label = public_factory(Label, ".expression.label") +case = public_factory(Case, ".expression.case") +cast = public_factory(Cast, ".expression.cast") +extract = public_factory(Extract, ".expression.extract") +tuple_ = public_factory(Tuple, ".expression.tuple_") +except_ = public_factory(CompoundSelect._create_except, ".expression.except_") +except_all = public_factory( + CompoundSelect._create_except_all, ".expression.except_all") +intersect = public_factory( + CompoundSelect._create_intersect, ".expression.intersect") +intersect_all = public_factory( + CompoundSelect._create_intersect_all, ".expression.intersect_all") +union = public_factory(CompoundSelect._create_union, ".expression.union") +union_all = public_factory( + CompoundSelect._create_union_all, ".expression.union_all") +exists = public_factory(Exists, ".expression.exists") +nullsfirst = public_factory( + UnaryExpression._create_nullsfirst, ".expression.nullsfirst") +nullslast = public_factory( + UnaryExpression._create_nullslast, ".expression.nullslast") +asc = public_factory(UnaryExpression._create_asc, ".expression.asc") +desc = public_factory(UnaryExpression._create_desc, ".expression.desc") +distinct = public_factory( + UnaryExpression._create_distinct, ".expression.distinct") +true = public_factory(True_._instance, ".expression.true") +false = public_factory(False_._instance, ".expression.false") +null = public_factory(Null._instance, ".expression.null") +join = public_factory(Join._create_join, ".expression.join") +outerjoin = public_factory(Join._create_outerjoin, ".expression.outerjoin") +insert = public_factory(Insert, ".expression.insert") +update = public_factory(Update, ".expression.update") +delete = public_factory(Delete, ".expression.delete") +funcfilter = public_factory( + FunctionFilter, ".expression.funcfilter") + + +# internal functions still being called from tests and the ORM, +# these might be better off in some other namespace +from .base import _from_objects +from .elements import _literal_as_text, _clause_element_as_expr,\ + _is_column, _labeled, _only_column_elements, _string_or_unprintable, \ + _truncated_label, _clone, _cloned_difference, _cloned_intersection,\ + _column_as_key, _literal_as_binds, _select_iterables, \ + _corresponding_column_or_error, _literal_as_label_reference, \ + _expression_literal_as_text +from .selectable import _interpret_as_from + + +# old names for compatibility +_Executable = Executable +_BindParamClause = BindParameter +_Label = Label +_SelectBase = SelectBase +_BinaryExpression = BinaryExpression +_Cast = Cast +_Null = Null +_False = False_ +_True = True_ +_TextClause = TextClause +_UnaryExpression = UnaryExpression +_Case = Case +_Tuple = Tuple +_Over = Over +_Generative = Generative +_TypeClause = TypeClause +_Extract = Extract +_Exists = Exists +_Grouping = Grouping +_FromGrouping = FromGrouping +_ScalarSelect = ScalarSelect diff --git a/lib/python3.4/site-packages/sqlalchemy/sql/functions.py b/lib/python3.4/site-packages/sqlalchemy/sql/functions.py new file mode 100644 index 0000000..50c1ef0 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/sql/functions.py @@ -0,0 +1,618 @@ +# sql/functions.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""SQL function API, factories, and built-in functions. + +""" +from . import sqltypes, schema +from .base import Executable, ColumnCollection +from .elements import ClauseList, Cast, Extract, _literal_as_binds, \ + literal_column, _type_from_args, ColumnElement, _clone,\ + Over, BindParameter, FunctionFilter +from .selectable import FromClause, Select, Alias + +from . import operators +from .visitors import VisitableType +from .. import util +from . import annotation + +_registry = util.defaultdict(dict) + + +def register_function(identifier, fn, package="_default"): + """Associate a callable with a particular func. name. + + This is normally called by _GenericMeta, but is also + available by itself so that a non-Function construct + can be associated with the :data:`.func` accessor (i.e. + CAST, EXTRACT). + + """ + reg = _registry[package] + reg[identifier] = fn + + +class FunctionElement(Executable, ColumnElement, FromClause): + """Base for SQL function-oriented constructs. + + .. seealso:: + + :class:`.Function` - named SQL function. + + :data:`.func` - namespace which produces registered or ad-hoc + :class:`.Function` instances. + + :class:`.GenericFunction` - allows creation of registered function + types. + + """ + + packagenames = () + + def __init__(self, *clauses, **kwargs): + """Construct a :class:`.FunctionElement`. + """ + args = [_literal_as_binds(c, self.name) for c in clauses] + self.clause_expr = ClauseList( + operator=operators.comma_op, + group_contents=True, *args).\ + self_group() + + def _execute_on_connection(self, connection, multiparams, params): + return connection._execute_function(self, multiparams, params) + + @property + def columns(self): + """The set of columns exported by this :class:`.FunctionElement`. + + Function objects currently have no result column names built in; + this method returns a single-element column collection with + an anonymously named column. + + An interim approach to providing named columns for a function + as a FROM clause is to build a :func:`.select` with the + desired columns:: + + from sqlalchemy.sql import column + + stmt = select([column('x'), column('y')]).\ + select_from(func.myfunction()) + + + """ + return ColumnCollection(self.label(None)) + + @util.memoized_property + def clauses(self): + """Return the underlying :class:`.ClauseList` which contains + the arguments for this :class:`.FunctionElement`. + + """ + return self.clause_expr.element + + def over(self, partition_by=None, order_by=None): + """Produce an OVER clause against this function. + + Used against aggregate or so-called "window" functions, + for database backends that support window functions. + + The expression:: + + func.row_number().over(order_by='x') + + is shorthand for:: + + from sqlalchemy import over + over(func.row_number(), order_by='x') + + See :func:`~.expression.over` for a full description. + + .. versionadded:: 0.7 + + """ + return Over(self, partition_by=partition_by, order_by=order_by) + + def filter(self, *criterion): + """Produce a FILTER clause against this function. + + Used against aggregate and window functions, + for database backends that support the "FILTER" clause. + + The expression:: + + func.count(1).filter(True) + + is shorthand for:: + + from sqlalchemy import funcfilter + funcfilter(func.count(1), True) + + .. versionadded:: 1.0.0 + + .. seealso:: + + :class:`.FunctionFilter` + + :func:`.funcfilter` + + + """ + if not criterion: + return self + return FunctionFilter(self, *criterion) + + @property + def _from_objects(self): + return self.clauses._from_objects + + def get_children(self, **kwargs): + return self.clause_expr, + + def _copy_internals(self, clone=_clone, **kw): + self.clause_expr = clone(self.clause_expr, **kw) + self._reset_exported() + FunctionElement.clauses._reset(self) + + def alias(self, name=None, flat=False): + """Produce a :class:`.Alias` construct against this + :class:`.FunctionElement`. + + This construct wraps the function in a named alias which + is suitable for the FROM clause. + + e.g.:: + + from sqlalchemy.sql import column + + stmt = select([column('data')]).select_from( + func.unnest(Table.data).alias('data_view') + ) + + Would produce: + + .. sourcecode:: sql + + SELECT data + FROM unnest(sometable.data) AS data_view + + .. versionadded:: 0.9.8 The :meth:`.FunctionElement.alias` method + is now supported. Previously, this method's behavior was + undefined and did not behave consistently across versions. + + """ + + return Alias(self, name) + + def select(self): + """Produce a :func:`~.expression.select` construct + against this :class:`.FunctionElement`. + + This is shorthand for:: + + s = select([function_element]) + + """ + s = Select([self]) + if self._execution_options: + s = s.execution_options(**self._execution_options) + return s + + def scalar(self): + """Execute this :class:`.FunctionElement` against an embedded + 'bind' and return a scalar value. + + This first calls :meth:`~.FunctionElement.select` to + produce a SELECT construct. + + Note that :class:`.FunctionElement` can be passed to + the :meth:`.Connectable.scalar` method of :class:`.Connection` + or :class:`.Engine`. + + """ + return self.select().execute().scalar() + + def execute(self): + """Execute this :class:`.FunctionElement` against an embedded + 'bind'. + + This first calls :meth:`~.FunctionElement.select` to + produce a SELECT construct. + + Note that :class:`.FunctionElement` can be passed to + the :meth:`.Connectable.execute` method of :class:`.Connection` + or :class:`.Engine`. + + """ + return self.select().execute() + + def _bind_param(self, operator, obj): + return BindParameter(None, obj, _compared_to_operator=operator, + _compared_to_type=self.type, unique=True) + + +class _FunctionGenerator(object): + """Generate :class:`.Function` objects based on getattr calls.""" + + def __init__(self, **opts): + self.__names = [] + self.opts = opts + + def __getattr__(self, name): + # passthru __ attributes; fixes pydoc + if name.startswith('__'): + try: + return self.__dict__[name] + except KeyError: + raise AttributeError(name) + + elif name.endswith('_'): + name = name[0:-1] + f = _FunctionGenerator(**self.opts) + f.__names = list(self.__names) + [name] + return f + + def __call__(self, *c, **kwargs): + o = self.opts.copy() + o.update(kwargs) + + tokens = len(self.__names) + + if tokens == 2: + package, fname = self.__names + elif tokens == 1: + package, fname = "_default", self.__names[0] + else: + package = None + + if package is not None: + func = _registry[package].get(fname) + if func is not None: + return func(*c, **o) + + return Function(self.__names[-1], + packagenames=self.__names[0:-1], *c, **o) + + +func = _FunctionGenerator() +"""Generate SQL function expressions. + + :data:`.func` is a special object instance which generates SQL + functions based on name-based attributes, e.g.:: + + >>> print func.count(1) + count(:param_1) + + The element is a column-oriented SQL element like any other, and is + used in that way:: + + >>> print select([func.count(table.c.id)]) + SELECT count(sometable.id) FROM sometable + + Any name can be given to :data:`.func`. If the function name is unknown to + SQLAlchemy, it will be rendered exactly as is. For common SQL functions + which SQLAlchemy is aware of, the name may be interpreted as a *generic + function* which will be compiled appropriately to the target database:: + + >>> print func.current_timestamp() + CURRENT_TIMESTAMP + + To call functions which are present in dot-separated packages, + specify them in the same manner:: + + >>> print func.stats.yield_curve(5, 10) + stats.yield_curve(:yield_curve_1, :yield_curve_2) + + SQLAlchemy can be made aware of the return type of functions to enable + type-specific lexical and result-based behavior. For example, to ensure + that a string-based function returns a Unicode value and is similarly + treated as a string in expressions, specify + :class:`~sqlalchemy.types.Unicode` as the type: + + >>> print func.my_string(u'hi', type_=Unicode) + ' ' + \ + ... func.my_string(u'there', type_=Unicode) + my_string(:my_string_1) || :my_string_2 || my_string(:my_string_3) + + The object returned by a :data:`.func` call is usually an instance of + :class:`.Function`. + This object meets the "column" interface, including comparison and labeling + functions. The object can also be passed the :meth:`~.Connectable.execute` + method of a :class:`.Connection` or :class:`.Engine`, where it will be + wrapped inside of a SELECT statement first:: + + print connection.execute(func.current_timestamp()).scalar() + + In a few exception cases, the :data:`.func` accessor + will redirect a name to a built-in expression such as :func:`.cast` + or :func:`.extract`, as these names have well-known meaning + but are not exactly the same as "functions" from a SQLAlchemy + perspective. + + .. versionadded:: 0.8 :data:`.func` can return non-function expression + constructs for common quasi-functional names like :func:`.cast` + and :func:`.extract`. + + Functions which are interpreted as "generic" functions know how to + calculate their return type automatically. For a listing of known generic + functions, see :ref:`generic_functions`. + + .. note:: + + The :data:`.func` construct has only limited support for calling + standalone "stored procedures", especially those with special + parameterization concerns. + + See the section :ref:`stored_procedures` for details on how to use + the DBAPI-level ``callproc()`` method for fully traditional stored + procedures. + +""" + +modifier = _FunctionGenerator(group=False) + + +class Function(FunctionElement): + """Describe a named SQL function. + + See the superclass :class:`.FunctionElement` for a description + of public methods. + + .. seealso:: + + :data:`.func` - namespace which produces registered or ad-hoc + :class:`.Function` instances. + + :class:`.GenericFunction` - allows creation of registered function + types. + + """ + + __visit_name__ = 'function' + + def __init__(self, name, *clauses, **kw): + """Construct a :class:`.Function`. + + The :data:`.func` construct is normally used to construct + new :class:`.Function` instances. + + """ + self.packagenames = kw.pop('packagenames', None) or [] + self.name = name + self._bind = kw.get('bind', None) + self.type = sqltypes.to_instance(kw.get('type_', None)) + + FunctionElement.__init__(self, *clauses, **kw) + + def _bind_param(self, operator, obj): + return BindParameter(self.name, obj, + _compared_to_operator=operator, + _compared_to_type=self.type, + unique=True) + + +class _GenericMeta(VisitableType): + def __init__(cls, clsname, bases, clsdict): + if annotation.Annotated not in cls.__mro__: + cls.name = name = clsdict.get('name', clsname) + cls.identifier = identifier = clsdict.get('identifier', name) + package = clsdict.pop('package', '_default') + # legacy + if '__return_type__' in clsdict: + cls.type = clsdict['__return_type__'] + register_function(identifier, cls, package) + super(_GenericMeta, cls).__init__(clsname, bases, clsdict) + + +class GenericFunction(util.with_metaclass(_GenericMeta, Function)): + """Define a 'generic' function. + + A generic function is a pre-established :class:`.Function` + class that is instantiated automatically when called + by name from the :data:`.func` attribute. Note that + calling any name from :data:`.func` has the effect that + a new :class:`.Function` instance is created automatically, + given that name. The primary use case for defining + a :class:`.GenericFunction` class is so that a function + of a particular name may be given a fixed return type. + It can also include custom argument parsing schemes as well + as additional methods. + + Subclasses of :class:`.GenericFunction` are automatically + registered under the name of the class. For + example, a user-defined function ``as_utc()`` would + be available immediately:: + + from sqlalchemy.sql.functions import GenericFunction + from sqlalchemy.types import DateTime + + class as_utc(GenericFunction): + type = DateTime + + print select([func.as_utc()]) + + User-defined generic functions can be organized into + packages by specifying the "package" attribute when defining + :class:`.GenericFunction`. Third party libraries + containing many functions may want to use this in order + to avoid name conflicts with other systems. For example, + if our ``as_utc()`` function were part of a package + "time":: + + class as_utc(GenericFunction): + type = DateTime + package = "time" + + The above function would be available from :data:`.func` + using the package name ``time``:: + + print select([func.time.as_utc()]) + + A final option is to allow the function to be accessed + from one name in :data:`.func` but to render as a different name. + The ``identifier`` attribute will override the name used to + access the function as loaded from :data:`.func`, but will retain + the usage of ``name`` as the rendered name:: + + class GeoBuffer(GenericFunction): + type = Geometry + package = "geo" + name = "ST_Buffer" + identifier = "buffer" + + The above function will render as follows:: + + >>> print func.geo.buffer() + ST_Buffer() + + .. versionadded:: 0.8 :class:`.GenericFunction` now supports + automatic registration of new functions as well as package + and custom naming support. + + .. versionchanged:: 0.8 The attribute name ``type`` is used + to specify the function's return type at the class level. + Previously, the name ``__return_type__`` was used. This + name is still recognized for backwards-compatibility. + + """ + + coerce_arguments = True + + def __init__(self, *args, **kwargs): + parsed_args = kwargs.pop('_parsed_args', None) + if parsed_args is None: + parsed_args = [_literal_as_binds(c) for c in args] + self.packagenames = [] + self._bind = kwargs.get('bind', None) + self.clause_expr = ClauseList( + operator=operators.comma_op, + group_contents=True, *parsed_args).self_group() + self.type = sqltypes.to_instance( + kwargs.pop("type_", None) or getattr(self, 'type', None)) + +register_function("cast", Cast) +register_function("extract", Extract) + + +class next_value(GenericFunction): + """Represent the 'next value', given a :class:`.Sequence` + as its single argument. + + Compiles into the appropriate function on each backend, + or will raise NotImplementedError if used on a backend + that does not provide support for sequences. + + """ + type = sqltypes.Integer() + name = "next_value" + + def __init__(self, seq, **kw): + assert isinstance(seq, schema.Sequence), \ + "next_value() accepts a Sequence object as input." + self._bind = kw.get('bind', None) + self.sequence = seq + + @property + def _from_objects(self): + return [] + + +class AnsiFunction(GenericFunction): + def __init__(self, **kwargs): + GenericFunction.__init__(self, **kwargs) + + +class ReturnTypeFromArgs(GenericFunction): + """Define a function whose return type is the same as its arguments.""" + + def __init__(self, *args, **kwargs): + args = [_literal_as_binds(c) for c in args] + kwargs.setdefault('type_', _type_from_args(args)) + kwargs['_parsed_args'] = args + GenericFunction.__init__(self, *args, **kwargs) + + +class coalesce(ReturnTypeFromArgs): + pass + + +class max(ReturnTypeFromArgs): + pass + + +class min(ReturnTypeFromArgs): + pass + + +class sum(ReturnTypeFromArgs): + pass + + +class now(GenericFunction): + type = sqltypes.DateTime + + +class concat(GenericFunction): + type = sqltypes.String + + +class char_length(GenericFunction): + type = sqltypes.Integer + + def __init__(self, arg, **kwargs): + GenericFunction.__init__(self, arg, **kwargs) + + +class random(GenericFunction): + pass + + +class count(GenericFunction): + """The ANSI COUNT aggregate function. With no arguments, + emits COUNT \*. + + """ + type = sqltypes.Integer + + def __init__(self, expression=None, **kwargs): + if expression is None: + expression = literal_column('*') + GenericFunction.__init__(self, expression, **kwargs) + + +class current_date(AnsiFunction): + type = sqltypes.Date + + +class current_time(AnsiFunction): + type = sqltypes.Time + + +class current_timestamp(AnsiFunction): + type = sqltypes.DateTime + + +class current_user(AnsiFunction): + type = sqltypes.String + + +class localtime(AnsiFunction): + type = sqltypes.DateTime + + +class localtimestamp(AnsiFunction): + type = sqltypes.DateTime + + +class session_user(AnsiFunction): + type = sqltypes.String + + +class sysdate(AnsiFunction): + type = sqltypes.DateTime + + +class user(AnsiFunction): + type = sqltypes.String diff --git a/lib/python3.4/site-packages/sqlalchemy/sql/naming.py b/lib/python3.4/site-packages/sqlalchemy/sql/naming.py new file mode 100644 index 0000000..2a1a832 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/sql/naming.py @@ -0,0 +1,146 @@ +# sqlalchemy/naming.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Establish constraint and index naming conventions. + + +""" + +from .schema import Constraint, ForeignKeyConstraint, PrimaryKeyConstraint, \ + UniqueConstraint, CheckConstraint, Index, Table, Column +from .. import event, events +from .. import exc +from .elements import _truncated_label, _defer_name, _defer_none_name, conv +import re + + +class ConventionDict(object): + + def __init__(self, const, table, convention): + self.const = const + self._is_fk = isinstance(const, ForeignKeyConstraint) + self.table = table + self.convention = convention + self._const_name = const.name + + def _key_table_name(self): + return self.table.name + + def _column_X(self, idx): + if self._is_fk: + fk = self.const.elements[idx] + return fk.parent + else: + return list(self.const.columns)[idx] + + def _key_constraint_name(self): + if isinstance(self._const_name, (type(None), _defer_none_name)): + raise exc.InvalidRequestError( + "Naming convention including " + "%(constraint_name)s token requires that " + "constraint is explicitly named." + ) + if not isinstance(self._const_name, conv): + self.const.name = None + return self._const_name + + def _key_column_X_name(self, idx): + return self._column_X(idx).name + + def _key_column_X_label(self, idx): + return self._column_X(idx)._label + + def _key_referred_table_name(self): + fk = self.const.elements[0] + refs = fk.target_fullname.split(".") + if len(refs) == 3: + refschema, reftable, refcol = refs + else: + reftable, refcol = refs + return reftable + + def _key_referred_column_X_name(self, idx): + fk = self.const.elements[idx] + refs = fk.target_fullname.split(".") + if len(refs) == 3: + refschema, reftable, refcol = refs + else: + reftable, refcol = refs + return refcol + + def __getitem__(self, key): + if key in self.convention: + return self.convention[key](self.const, self.table) + elif hasattr(self, '_key_%s' % key): + return getattr(self, '_key_%s' % key)() + else: + col_template = re.match(r".*_?column_(\d+)_.+", key) + if col_template: + idx = col_template.group(1) + attr = "_key_" + key.replace(idx, "X") + idx = int(idx) + if hasattr(self, attr): + return getattr(self, attr)(idx) + raise KeyError(key) + +_prefix_dict = { + Index: "ix", + PrimaryKeyConstraint: "pk", + CheckConstraint: "ck", + UniqueConstraint: "uq", + ForeignKeyConstraint: "fk" +} + + +def _get_convention(dict_, key): + + for super_ in key.__mro__: + if super_ in _prefix_dict and _prefix_dict[super_] in dict_: + return dict_[_prefix_dict[super_]] + elif super_ in dict_: + return dict_[super_] + else: + return None + + +def _constraint_name_for_table(const, table): + metadata = table.metadata + convention = _get_convention(metadata.naming_convention, type(const)) + + if isinstance(const.name, conv): + return const.name + elif convention is not None and \ + not isinstance(const.name, conv) and \ + ( + const.name is None or + "constraint_name" in convention or + isinstance(const.name, _defer_name)): + return conv( + convention % ConventionDict(const, table, + metadata.naming_convention) + ) + elif isinstance(convention, _defer_none_name): + return None + + +@event.listens_for(Constraint, "after_parent_attach") +@event.listens_for(Index, "after_parent_attach") +def _constraint_name(const, table): + if isinstance(table, Column): + # for column-attached constraint, set another event + # to link the column attached to the table as this constraint + # associated with the table. + event.listen(table, "after_parent_attach", + lambda col, table: _constraint_name(const, table) + ) + elif isinstance(table, Table): + if isinstance(const.name, (conv, _defer_name)): + return + + newname = _constraint_name_for_table(const, table) + if newname is not None: + const.name = newname diff --git a/lib/python3.4/site-packages/sqlalchemy/sql/operators.py b/lib/python3.4/site-packages/sqlalchemy/sql/operators.py new file mode 100644 index 0000000..5e2900d --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/sql/operators.py @@ -0,0 +1,905 @@ +# sql/operators.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Defines operators used in SQL expressions.""" + +from .. import util + + +from operator import ( + and_, or_, inv, add, mul, sub, mod, truediv, lt, le, ne, gt, ge, eq, neg, + getitem, lshift, rshift, contains +) + +if util.py2k: + from operator import div +else: + div = truediv + + +class Operators(object): + """Base of comparison and logical operators. + + Implements base methods + :meth:`~sqlalchemy.sql.operators.Operators.operate` and + :meth:`~sqlalchemy.sql.operators.Operators.reverse_operate`, as well as + :meth:`~sqlalchemy.sql.operators.Operators.__and__`, + :meth:`~sqlalchemy.sql.operators.Operators.__or__`, + :meth:`~sqlalchemy.sql.operators.Operators.__invert__`. + + Usually is used via its most common subclass + :class:`.ColumnOperators`. + + """ + __slots__ = () + + def __and__(self, other): + """Implement the ``&`` operator. + + When used with SQL expressions, results in an + AND operation, equivalent to + :func:`~.expression.and_`, that is:: + + a & b + + is equivalent to:: + + from sqlalchemy import and_ + and_(a, b) + + Care should be taken when using ``&`` regarding + operator precedence; the ``&`` operator has the highest precedence. + The operands should be enclosed in parenthesis if they contain + further sub expressions:: + + (a == 2) & (b == 4) + + """ + return self.operate(and_, other) + + def __or__(self, other): + """Implement the ``|`` operator. + + When used with SQL expressions, results in an + OR operation, equivalent to + :func:`~.expression.or_`, that is:: + + a | b + + is equivalent to:: + + from sqlalchemy import or_ + or_(a, b) + + Care should be taken when using ``|`` regarding + operator precedence; the ``|`` operator has the highest precedence. + The operands should be enclosed in parenthesis if they contain + further sub expressions:: + + (a == 2) | (b == 4) + + """ + return self.operate(or_, other) + + def __invert__(self): + """Implement the ``~`` operator. + + When used with SQL expressions, results in a + NOT operation, equivalent to + :func:`~.expression.not_`, that is:: + + ~a + + is equivalent to:: + + from sqlalchemy import not_ + not_(a) + + """ + return self.operate(inv) + + def op(self, opstring, precedence=0, is_comparison=False): + """produce a generic operator function. + + e.g.:: + + somecolumn.op("*")(5) + + produces:: + + somecolumn * 5 + + This function can also be used to make bitwise operators explicit. For + example:: + + somecolumn.op('&')(0xff) + + is a bitwise AND of the value in ``somecolumn``. + + :param operator: a string which will be output as the infix operator + between this element and the expression passed to the + generated function. + + :param precedence: precedence to apply to the operator, when + parenthesizing expressions. A lower number will cause the expression + to be parenthesized when applied against another operator with + higher precedence. The default value of ``0`` is lower than all + operators except for the comma (``,``) and ``AS`` operators. + A value of 100 will be higher or equal to all operators, and -100 + will be lower than or equal to all operators. + + .. versionadded:: 0.8 - added the 'precedence' argument. + + :param is_comparison: if True, the operator will be considered as a + "comparison" operator, that is which evaluates to a boolean + true/false value, like ``==``, ``>``, etc. This flag should be set + so that ORM relationships can establish that the operator is a + comparison operator when used in a custom join condition. + + .. versionadded:: 0.9.2 - added the + :paramref:`.Operators.op.is_comparison` flag. + + .. seealso:: + + :ref:`types_operators` + + :ref:`relationship_custom_operator` + + """ + operator = custom_op(opstring, precedence, is_comparison) + + def against(other): + return operator(self, other) + return against + + def operate(self, op, *other, **kwargs): + """Operate on an argument. + + This is the lowest level of operation, raises + :class:`NotImplementedError` by default. + + Overriding this on a subclass can allow common + behavior to be applied to all operations. + For example, overriding :class:`.ColumnOperators` + to apply ``func.lower()`` to the left and right + side:: + + class MyComparator(ColumnOperators): + def operate(self, op, other): + return op(func.lower(self), func.lower(other)) + + :param op: Operator callable. + :param \*other: the 'other' side of the operation. Will + be a single scalar for most operations. + :param \**kwargs: modifiers. These may be passed by special + operators such as :meth:`ColumnOperators.contains`. + + + """ + raise NotImplementedError(str(op)) + + def reverse_operate(self, op, other, **kwargs): + """Reverse operate on an argument. + + Usage is the same as :meth:`operate`. + + """ + raise NotImplementedError(str(op)) + + +class custom_op(object): + """Represent a 'custom' operator. + + :class:`.custom_op` is normally instantitated when the + :meth:`.ColumnOperators.op` method is used to create a + custom operator callable. The class can also be used directly + when programmatically constructing expressions. E.g. + to represent the "factorial" operation:: + + from sqlalchemy.sql import UnaryExpression + from sqlalchemy.sql import operators + from sqlalchemy import Numeric + + unary = UnaryExpression(table.c.somecolumn, + modifier=operators.custom_op("!"), + type_=Numeric) + + """ + __name__ = 'custom_op' + + def __init__(self, opstring, precedence=0, is_comparison=False): + self.opstring = opstring + self.precedence = precedence + self.is_comparison = is_comparison + + def __eq__(self, other): + return isinstance(other, custom_op) and \ + other.opstring == self.opstring + + def __hash__(self): + return id(self) + + def __call__(self, left, right, **kw): + return left.operate(self, right, **kw) + + +class ColumnOperators(Operators): + """Defines boolean, comparison, and other operators for + :class:`.ColumnElement` expressions. + + By default, all methods call down to + :meth:`.operate` or :meth:`.reverse_operate`, + passing in the appropriate operator function from the + Python builtin ``operator`` module or + a SQLAlchemy-specific operator function from + :mod:`sqlalchemy.expression.operators`. For example + the ``__eq__`` function:: + + def __eq__(self, other): + return self.operate(operators.eq, other) + + Where ``operators.eq`` is essentially:: + + def eq(a, b): + return a == b + + The core column expression unit :class:`.ColumnElement` + overrides :meth:`.Operators.operate` and others + to return further :class:`.ColumnElement` constructs, + so that the ``==`` operation above is replaced by a clause + construct. + + See also: + + :ref:`types_operators` + + :attr:`.TypeEngine.comparator_factory` + + :class:`.ColumnOperators` + + :class:`.PropComparator` + + """ + + __slots__ = () + + timetuple = None + """Hack, allows datetime objects to be compared on the LHS.""" + + def __lt__(self, other): + """Implement the ``<`` operator. + + In a column context, produces the clause ``a < b``. + + """ + return self.operate(lt, other) + + def __le__(self, other): + """Implement the ``<=`` operator. + + In a column context, produces the clause ``a <= b``. + + """ + return self.operate(le, other) + + __hash__ = Operators.__hash__ + + def __eq__(self, other): + """Implement the ``==`` operator. + + In a column context, produces the clause ``a = b``. + If the target is ``None``, produces ``a IS NULL``. + + """ + return self.operate(eq, other) + + def __ne__(self, other): + """Implement the ``!=`` operator. + + In a column context, produces the clause ``a != b``. + If the target is ``None``, produces ``a IS NOT NULL``. + + """ + return self.operate(ne, other) + + def __gt__(self, other): + """Implement the ``>`` operator. + + In a column context, produces the clause ``a > b``. + + """ + return self.operate(gt, other) + + def __ge__(self, other): + """Implement the ``>=`` operator. + + In a column context, produces the clause ``a >= b``. + + """ + return self.operate(ge, other) + + def __neg__(self): + """Implement the ``-`` operator. + + In a column context, produces the clause ``-a``. + + """ + return self.operate(neg) + + def __contains__(self, other): + return self.operate(contains, other) + + def __getitem__(self, index): + """Implement the [] operator. + + This can be used by some database-specific types + such as Postgresql ARRAY and HSTORE. + + """ + return self.operate(getitem, index) + + def __lshift__(self, other): + """implement the << operator. + + Not used by SQLAlchemy core, this is provided + for custom operator systems which want to use + << as an extension point. + """ + return self.operate(lshift, other) + + def __rshift__(self, other): + """implement the >> operator. + + Not used by SQLAlchemy core, this is provided + for custom operator systems which want to use + >> as an extension point. + """ + return self.operate(rshift, other) + + def concat(self, other): + """Implement the 'concat' operator. + + In a column context, produces the clause ``a || b``, + or uses the ``concat()`` operator on MySQL. + + """ + return self.operate(concat_op, other) + + def like(self, other, escape=None): + """Implement the ``like`` operator. + + In a column context, produces the clause ``a LIKE other``. + + E.g.:: + + select([sometable]).where(sometable.c.column.like("%foobar%")) + + :param other: expression to be compared + :param escape: optional escape character, renders the ``ESCAPE`` + keyword, e.g.:: + + somecolumn.like("foo/%bar", escape="/") + + .. seealso:: + + :meth:`.ColumnOperators.ilike` + + """ + return self.operate(like_op, other, escape=escape) + + def ilike(self, other, escape=None): + """Implement the ``ilike`` operator. + + In a column context, produces the clause ``a ILIKE other``. + + E.g.:: + + select([sometable]).where(sometable.c.column.ilike("%foobar%")) + + :param other: expression to be compared + :param escape: optional escape character, renders the ``ESCAPE`` + keyword, e.g.:: + + somecolumn.ilike("foo/%bar", escape="/") + + .. seealso:: + + :meth:`.ColumnOperators.like` + + """ + return self.operate(ilike_op, other, escape=escape) + + def in_(self, other): + """Implement the ``in`` operator. + + In a column context, produces the clause ``a IN other``. + "other" may be a tuple/list of column expressions, + or a :func:`~.expression.select` construct. + + """ + return self.operate(in_op, other) + + def notin_(self, other): + """implement the ``NOT IN`` operator. + + This is equivalent to using negation with + :meth:`.ColumnOperators.in_`, i.e. ``~x.in_(y)``. + + .. versionadded:: 0.8 + + .. seealso:: + + :meth:`.ColumnOperators.in_` + + """ + return self.operate(notin_op, other) + + def notlike(self, other, escape=None): + """implement the ``NOT LIKE`` operator. + + This is equivalent to using negation with + :meth:`.ColumnOperators.like`, i.e. ``~x.like(y)``. + + .. versionadded:: 0.8 + + .. seealso:: + + :meth:`.ColumnOperators.like` + + """ + return self.operate(notlike_op, other, escape=escape) + + def notilike(self, other, escape=None): + """implement the ``NOT ILIKE`` operator. + + This is equivalent to using negation with + :meth:`.ColumnOperators.ilike`, i.e. ``~x.ilike(y)``. + + .. versionadded:: 0.8 + + .. seealso:: + + :meth:`.ColumnOperators.ilike` + + """ + return self.operate(notilike_op, other, escape=escape) + + def is_(self, other): + """Implement the ``IS`` operator. + + Normally, ``IS`` is generated automatically when comparing to a + value of ``None``, which resolves to ``NULL``. However, explicit + usage of ``IS`` may be desirable if comparing to boolean values + on certain platforms. + + .. versionadded:: 0.7.9 + + .. seealso:: :meth:`.ColumnOperators.isnot` + + """ + return self.operate(is_, other) + + def isnot(self, other): + """Implement the ``IS NOT`` operator. + + Normally, ``IS NOT`` is generated automatically when comparing to a + value of ``None``, which resolves to ``NULL``. However, explicit + usage of ``IS NOT`` may be desirable if comparing to boolean values + on certain platforms. + + .. versionadded:: 0.7.9 + + .. seealso:: :meth:`.ColumnOperators.is_` + + """ + return self.operate(isnot, other) + + def startswith(self, other, **kwargs): + """Implement the ``startwith`` operator. + + In a column context, produces the clause ``LIKE '%'`` + + """ + return self.operate(startswith_op, other, **kwargs) + + def endswith(self, other, **kwargs): + """Implement the 'endswith' operator. + + In a column context, produces the clause ``LIKE '%'`` + + """ + return self.operate(endswith_op, other, **kwargs) + + def contains(self, other, **kwargs): + """Implement the 'contains' operator. + + In a column context, produces the clause ``LIKE '%%'`` + + """ + return self.operate(contains_op, other, **kwargs) + + def match(self, other, **kwargs): + """Implements a database-specific 'match' operator. + + :meth:`~.ColumnOperators.match` attempts to resolve to + a MATCH-like function or operator provided by the backend. + Examples include: + + * Postgresql - renders ``x @@ to_tsquery(y)`` + * MySQL - renders ``MATCH (x) AGAINST (y IN BOOLEAN MODE)`` + * Oracle - renders ``CONTAINS(x, y)`` + * other backends may provide special implementations. + * Backends without any special implementation will emit + the operator as "MATCH". This is compatible with SQlite, for + example. + + """ + return self.operate(match_op, other, **kwargs) + + def desc(self): + """Produce a :func:`~.expression.desc` clause against the + parent object.""" + return self.operate(desc_op) + + def asc(self): + """Produce a :func:`~.expression.asc` clause against the + parent object.""" + return self.operate(asc_op) + + def nullsfirst(self): + """Produce a :func:`~.expression.nullsfirst` clause against the + parent object.""" + return self.operate(nullsfirst_op) + + def nullslast(self): + """Produce a :func:`~.expression.nullslast` clause against the + parent object.""" + return self.operate(nullslast_op) + + def collate(self, collation): + """Produce a :func:`~.expression.collate` clause against + the parent object, given the collation string.""" + return self.operate(collate, collation) + + def __radd__(self, other): + """Implement the ``+`` operator in reverse. + + See :meth:`.ColumnOperators.__add__`. + + """ + return self.reverse_operate(add, other) + + def __rsub__(self, other): + """Implement the ``-`` operator in reverse. + + See :meth:`.ColumnOperators.__sub__`. + + """ + return self.reverse_operate(sub, other) + + def __rmul__(self, other): + """Implement the ``*`` operator in reverse. + + See :meth:`.ColumnOperators.__mul__`. + + """ + return self.reverse_operate(mul, other) + + def __rdiv__(self, other): + """Implement the ``/`` operator in reverse. + + See :meth:`.ColumnOperators.__div__`. + + """ + return self.reverse_operate(div, other) + + def __rmod__(self, other): + """Implement the ``%`` operator in reverse. + + See :meth:`.ColumnOperators.__mod__`. + + """ + return self.reverse_operate(mod, other) + + def between(self, cleft, cright, symmetric=False): + """Produce a :func:`~.expression.between` clause against + the parent object, given the lower and upper range. + + """ + return self.operate(between_op, cleft, cright, symmetric=symmetric) + + def distinct(self): + """Produce a :func:`~.expression.distinct` clause against the + parent object. + + """ + return self.operate(distinct_op) + + def __add__(self, other): + """Implement the ``+`` operator. + + In a column context, produces the clause ``a + b`` + if the parent object has non-string affinity. + If the parent object has a string affinity, + produces the concatenation operator, ``a || b`` - + see :meth:`.ColumnOperators.concat`. + + """ + return self.operate(add, other) + + def __sub__(self, other): + """Implement the ``-`` operator. + + In a column context, produces the clause ``a - b``. + + """ + return self.operate(sub, other) + + def __mul__(self, other): + """Implement the ``*`` operator. + + In a column context, produces the clause ``a * b``. + + """ + return self.operate(mul, other) + + def __div__(self, other): + """Implement the ``/`` operator. + + In a column context, produces the clause ``a / b``. + + """ + return self.operate(div, other) + + def __mod__(self, other): + """Implement the ``%`` operator. + + In a column context, produces the clause ``a % b``. + + """ + return self.operate(mod, other) + + def __truediv__(self, other): + """Implement the ``//`` operator. + + In a column context, produces the clause ``a / b``. + + """ + return self.operate(truediv, other) + + def __rtruediv__(self, other): + """Implement the ``//`` operator in reverse. + + See :meth:`.ColumnOperators.__truediv__`. + + """ + return self.reverse_operate(truediv, other) + + +def from_(): + raise NotImplementedError() + + +def as_(): + raise NotImplementedError() + + +def exists(): + raise NotImplementedError() + + +def istrue(a): + raise NotImplementedError() + + +def isfalse(a): + raise NotImplementedError() + + +def is_(a, b): + return a.is_(b) + + +def isnot(a, b): + return a.isnot(b) + + +def collate(a, b): + return a.collate(b) + + +def op(a, opstring, b): + return a.op(opstring)(b) + + +def like_op(a, b, escape=None): + return a.like(b, escape=escape) + + +def notlike_op(a, b, escape=None): + return a.notlike(b, escape=escape) + + +def ilike_op(a, b, escape=None): + return a.ilike(b, escape=escape) + + +def notilike_op(a, b, escape=None): + return a.notilike(b, escape=escape) + + +def between_op(a, b, c, symmetric=False): + return a.between(b, c, symmetric=symmetric) + + +def notbetween_op(a, b, c, symmetric=False): + return a.notbetween(b, c, symmetric=symmetric) + + +def in_op(a, b): + return a.in_(b) + + +def notin_op(a, b): + return a.notin_(b) + + +def distinct_op(a): + return a.distinct() + + +def startswith_op(a, b, escape=None): + return a.startswith(b, escape=escape) + + +def notstartswith_op(a, b, escape=None): + return ~a.startswith(b, escape=escape) + + +def endswith_op(a, b, escape=None): + return a.endswith(b, escape=escape) + + +def notendswith_op(a, b, escape=None): + return ~a.endswith(b, escape=escape) + + +def contains_op(a, b, escape=None): + return a.contains(b, escape=escape) + + +def notcontains_op(a, b, escape=None): + return ~a.contains(b, escape=escape) + + +def match_op(a, b, **kw): + return a.match(b, **kw) + + +def notmatch_op(a, b, **kw): + return a.notmatch(b, **kw) + + +def comma_op(a, b): + raise NotImplementedError() + + +def concat_op(a, b): + return a.concat(b) + + +def desc_op(a): + return a.desc() + + +def asc_op(a): + return a.asc() + + +def nullsfirst_op(a): + return a.nullsfirst() + + +def nullslast_op(a): + return a.nullslast() + + +_commutative = set([eq, ne, add, mul]) + +_comparison = set([eq, ne, lt, gt, ge, le, between_op, like_op]) + + +def is_comparison(op): + return op in _comparison or \ + isinstance(op, custom_op) and op.is_comparison + + +def is_commutative(op): + return op in _commutative + + +def is_ordering_modifier(op): + return op in (asc_op, desc_op, + nullsfirst_op, nullslast_op) + +_associative = _commutative.union([concat_op, and_, or_]) + +_natural_self_precedent = _associative.union([getitem]) +"""Operators where if we have (a op b) op c, we don't want to +parenthesize (a op b). + +""" + +_asbool = util.symbol('_asbool', canonical=-10) +_smallest = util.symbol('_smallest', canonical=-100) +_largest = util.symbol('_largest', canonical=100) + +_PRECEDENCE = { + from_: 15, + getitem: 15, + mul: 8, + truediv: 8, + div: 8, + mod: 8, + neg: 8, + add: 7, + sub: 7, + + concat_op: 6, + match_op: 6, + notmatch_op: 6, + + ilike_op: 6, + notilike_op: 6, + like_op: 6, + notlike_op: 6, + in_op: 6, + notin_op: 6, + + is_: 6, + isnot: 6, + + eq: 5, + ne: 5, + gt: 5, + lt: 5, + ge: 5, + le: 5, + + between_op: 5, + notbetween_op: 5, + distinct_op: 5, + inv: 5, + istrue: 5, + isfalse: 5, + and_: 3, + or_: 2, + comma_op: -1, + + desc_op: 3, + asc_op: 3, + collate: 4, + + as_: -1, + exists: 0, + _asbool: -10, + _smallest: _smallest, + _largest: _largest +} + + +def is_precedent(operator, against): + if operator is against and operator in _natural_self_precedent: + return False + else: + return (_PRECEDENCE.get(operator, + getattr(operator, 'precedence', _smallest)) <= + _PRECEDENCE.get(against, + getattr(against, 'precedence', _largest))) diff --git a/lib/python3.4/site-packages/sqlalchemy/sql/schema.py b/lib/python3.4/site-packages/sqlalchemy/sql/schema.py new file mode 100644 index 0000000..c122ee4 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/sql/schema.py @@ -0,0 +1,3787 @@ +# sql/schema.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""The schema module provides the building blocks for database metadata. + +Each element within this module describes a database entity which can be +created and dropped, or is otherwise part of such an entity. Examples include +tables, columns, sequences, and indexes. + +All entities are subclasses of :class:`~sqlalchemy.schema.SchemaItem`, and as +defined in this module they are intended to be agnostic of any vendor-specific +constructs. + +A collection of entities are grouped into a unit called +:class:`~sqlalchemy.schema.MetaData`. MetaData serves as a logical grouping of +schema elements, and can also be associated with an actual database connection +such that operations involving the contained elements can contact the database +as needed. + +Two of the elements here also build upon their "syntactic" counterparts, which +are defined in :class:`~sqlalchemy.sql.expression.`, specifically +:class:`~sqlalchemy.schema.Table` and :class:`~sqlalchemy.schema.Column`. +Since these objects are part of the SQL expression language, they are usable +as components in SQL expressions. + +""" +from __future__ import absolute_import + +import inspect +from .. import exc, util, event, inspection +from .base import SchemaEventTarget, DialectKWArgs +from . import visitors +from . import type_api +from .base import _bind_or_error, ColumnCollection +from .elements import ClauseElement, ColumnClause, _truncated_label, \ + _as_truncated, TextClause, _literal_as_text,\ + ColumnElement, _find_columns, quoted_name +from .selectable import TableClause +import collections +import sqlalchemy +from . import ddl +import types + +RETAIN_SCHEMA = util.symbol('retain_schema') + + +def _get_table_key(name, schema): + if schema is None: + return name + else: + return schema + "." + name + + +@inspection._self_inspects +class SchemaItem(SchemaEventTarget, visitors.Visitable): + """Base class for items that define a database schema.""" + + __visit_name__ = 'schema_item' + + def _execute_on_connection(self, connection, multiparams, params): + return connection._execute_default(self, multiparams, params) + + def _init_items(self, *args): + """Initialize the list of child items for this SchemaItem.""" + + for item in args: + if item is not None: + item._set_parent_with_dispatch(self) + + def get_children(self, **kwargs): + """used to allow SchemaVisitor access""" + return [] + + def __repr__(self): + return util.generic_repr(self, omit_kwarg=['info']) + + @property + @util.deprecated('0.9', 'Use ``.name.quote``') + def quote(self): + """Return the value of the ``quote`` flag passed + to this schema object, for those schema items which + have a ``name`` field. + + """ + + return self.name.quote + + @util.memoized_property + def info(self): + """Info dictionary associated with the object, allowing user-defined + data to be associated with this :class:`.SchemaItem`. + + The dictionary is automatically generated when first accessed. + It can also be specified in the constructor of some objects, + such as :class:`.Table` and :class:`.Column`. + + """ + return {} + + def _schema_item_copy(self, schema_item): + if 'info' in self.__dict__: + schema_item.info = self.info.copy() + schema_item.dispatch._update(self.dispatch) + return schema_item + + +class Table(DialectKWArgs, SchemaItem, TableClause): + """Represent a table in a database. + + e.g.:: + + mytable = Table("mytable", metadata, + Column('mytable_id', Integer, primary_key=True), + Column('value', String(50)) + ) + + The :class:`.Table` object constructs a unique instance of itself based + on its name and optional schema name within the given + :class:`.MetaData` object. Calling the :class:`.Table` + constructor with the same name and same :class:`.MetaData` argument + a second time will return the *same* :class:`.Table` object - in this way + the :class:`.Table` constructor acts as a registry function. + + .. seealso:: + + :ref:`metadata_describing` - Introduction to database metadata + + Constructor arguments are as follows: + + :param name: The name of this table as represented in the database. + + The table name, along with the value of the ``schema`` parameter, + forms a key which uniquely identifies this :class:`.Table` within + the owning :class:`.MetaData` collection. + Additional calls to :class:`.Table` with the same name, metadata, + and schema name will return the same :class:`.Table` object. + + Names which contain no upper case characters + will be treated as case insensitive names, and will not be quoted + unless they are a reserved word or contain special characters. + A name with any number of upper case characters is considered + to be case sensitive, and will be sent as quoted. + + To enable unconditional quoting for the table name, specify the flag + ``quote=True`` to the constructor, or use the :class:`.quoted_name` + construct to specify the name. + + :param metadata: a :class:`.MetaData` object which will contain this + table. The metadata is used as a point of association of this table + with other tables which are referenced via foreign key. It also + may be used to associate this table with a particular + :class:`.Connectable`. + + :param \*args: Additional positional arguments are used primarily + to add the list of :class:`.Column` objects contained within this + table. Similar to the style of a CREATE TABLE statement, other + :class:`.SchemaItem` constructs may be added here, including + :class:`.PrimaryKeyConstraint`, and :class:`.ForeignKeyConstraint`. + + :param autoload: Defaults to False, unless :paramref:`.Table.autoload_with` + is set in which case it defaults to True; :class:`.Column` objects + for this table should be reflected from the database, possibly + augmenting or replacing existing :class:`.Column` objects that were + expicitly specified. + + .. versionchanged:: 1.0.0 setting the :paramref:`.Table.autoload_with` + parameter implies that :paramref:`.Table.autoload` will default + to True. + + .. seealso:: + + :ref:`metadata_reflection_toplevel` + + :param autoload_replace: Defaults to ``True``; when using + :paramref:`.Table.autoload` + in conjunction with :paramref:`.Table.extend_existing`, indicates + that :class:`.Column` objects present in the already-existing + :class:`.Table` object should be replaced with columns of the same + name retrieved from the autoload process. When ``False``, columns + already present under existing names will be omitted from the + reflection process. + + Note that this setting does not impact :class:`.Column` objects + specified programmatically within the call to :class:`.Table` that + also is autoloading; those :class:`.Column` objects will always + replace existing columns of the same name when + :paramref:`.Table.extend_existing` is ``True``. + + .. versionadded:: 0.7.5 + + .. seealso:: + + :paramref:`.Table.autoload` + + :paramref:`.Table.extend_existing` + + :param autoload_with: An :class:`.Engine` or :class:`.Connection` object + with which this :class:`.Table` object will be reflected; when + set to a non-None value, it implies that :paramref:`.Table.autoload` + is ``True``. If left unset, but :paramref:`.Table.autoload` is + explicitly set to ``True``, an autoload operation will attempt to + proceed by locating an :class:`.Engine` or :class:`.Connection` bound + to the underlying :class:`.MetaData` object. + + .. seealso:: + + :paramref:`.Table.autoload` + + :param extend_existing: When ``True``, indicates that if this + :class:`.Table` is already present in the given :class:`.MetaData`, + apply further arguments within the constructor to the existing + :class:`.Table`. + + If :paramref:`.Table.extend_existing` or + :paramref:`.Table.keep_existing` are not set, and the given name + of the new :class:`.Table` refers to a :class:`.Table` that is + already present in the target :class:`.MetaData` collection, and + this :class:`.Table` specifies additional columns or other constructs + or flags that modify the table's state, an + error is raised. The purpose of these two mutually-exclusive flags + is to specify what action should be taken when a :class:`.Table` + is specified that matches an existing :class:`.Table`, yet specifies + additional constructs. + + :paramref:`.Table.extend_existing` will also work in conjunction + with :paramref:`.Table.autoload` to run a new reflection + operation against the database, even if a :class:`.Table` + of the same name is already present in the target + :class:`.MetaData`; newly reflected :class:`.Column` objects + and other options will be added into the state of the + :class:`.Table`, potentially overwriting existing columns + and options of the same name. + + .. versionchanged:: 0.7.4 :paramref:`.Table.extend_existing` will + invoke a new reflection operation when combined with + :paramref:`.Table.autoload` set to True. + + As is always the case with :paramref:`.Table.autoload`, + :class:`.Column` objects can be specified in the same :class:`.Table` + constructor, which will take precedence. Below, the existing + table ``mytable`` will be augmented with :class:`.Column` objects + both reflected from the database, as well as the given :class:`.Column` + named "y":: + + Table("mytable", metadata, + Column('y', Integer), + extend_existing=True, + autoload=True, + autoload_with=engine + ) + + .. seealso:: + + :paramref:`.Table.autoload` + + :paramref:`.Table.autoload_replace` + + :paramref:`.Table.keep_existing` + + + :param implicit_returning: True by default - indicates that + RETURNING can be used by default to fetch newly inserted primary key + values, for backends which support this. Note that + create_engine() also provides an implicit_returning flag. + + :param include_columns: A list of strings indicating a subset of + columns to be loaded via the ``autoload`` operation; table columns who + aren't present in this list will not be represented on the resulting + ``Table`` object. Defaults to ``None`` which indicates all columns + should be reflected. + + :param info: Optional data dictionary which will be populated into the + :attr:`.SchemaItem.info` attribute of this object. + + :param keep_existing: When ``True``, indicates that if this Table + is already present in the given :class:`.MetaData`, ignore + further arguments within the constructor to the existing + :class:`.Table`, and return the :class:`.Table` object as + originally created. This is to allow a function that wishes + to define a new :class:`.Table` on first call, but on + subsequent calls will return the same :class:`.Table`, + without any of the declarations (particularly constraints) + being applied a second time. + + If :paramref:`.Table.extend_existing` or + :paramref:`.Table.keep_existing` are not set, and the given name + of the new :class:`.Table` refers to a :class:`.Table` that is + already present in the target :class:`.MetaData` collection, and + this :class:`.Table` specifies additional columns or other constructs + or flags that modify the table's state, an + error is raised. The purpose of these two mutually-exclusive flags + is to specify what action should be taken when a :class:`.Table` + is specified that matches an existing :class:`.Table`, yet specifies + additional constructs. + + .. seealso:: + + :paramref:`.Table.extend_existing` + + :param listeners: A list of tuples of the form ``(, )`` + which will be passed to :func:`.event.listen` upon construction. + This alternate hook to :func:`.event.listen` allows the establishment + of a listener function specific to this :class:`.Table` before + the "autoload" process begins. Particularly useful for + the :meth:`.DDLEvents.column_reflect` event:: + + def listen_for_reflect(table, column_info): + "handle the column reflection event" + # ... + + t = Table( + 'sometable', + autoload=True, + listeners=[ + ('column_reflect', listen_for_reflect) + ]) + + :param mustexist: When ``True``, indicates that this Table must already + be present in the given :class:`.MetaData` collection, else + an exception is raised. + + :param prefixes: + A list of strings to insert after CREATE in the CREATE TABLE + statement. They will be separated by spaces. + + :param quote: Force quoting of this table's name on or off, corresponding + to ``True`` or ``False``. When left at its default of ``None``, + the column identifier will be quoted according to whether the name is + case sensitive (identifiers with at least one upper case character are + treated as case sensitive), or if it's a reserved word. This flag + is only needed to force quoting of a reserved word which is not known + by the SQLAlchemy dialect. + + :param quote_schema: same as 'quote' but applies to the schema identifier. + + :param schema: The schema name for this table, which is required if + the table resides in a schema other than the default selected schema + for the engine's database connection. Defaults to ``None``. + + The quoting rules for the schema name are the same as those for the + ``name`` parameter, in that quoting is applied for reserved words or + case-sensitive names; to enable unconditional quoting for the + schema name, specify the flag + ``quote_schema=True`` to the constructor, or use the + :class:`.quoted_name` construct to specify the name. + + :param useexisting: Deprecated. Use :paramref:`.Table.extend_existing`. + + :param \**kw: Additional keyword arguments not mentioned above are + dialect specific, and passed in the form ``_``. + See the documentation regarding an individual dialect at + :ref:`dialect_toplevel` for detail on documented arguments. + + """ + + __visit_name__ = 'table' + + def __new__(cls, *args, **kw): + if not args: + # python3k pickle seems to call this + return object.__new__(cls) + + try: + name, metadata, args = args[0], args[1], args[2:] + except IndexError: + raise TypeError("Table() takes at least two arguments") + + schema = kw.get('schema', None) + if schema is None: + schema = metadata.schema + keep_existing = kw.pop('keep_existing', False) + extend_existing = kw.pop('extend_existing', False) + if 'useexisting' in kw: + msg = "useexisting is deprecated. Use extend_existing." + util.warn_deprecated(msg) + if extend_existing: + msg = "useexisting is synonymous with extend_existing." + raise exc.ArgumentError(msg) + extend_existing = kw.pop('useexisting', False) + + if keep_existing and extend_existing: + msg = "keep_existing and extend_existing are mutually exclusive." + raise exc.ArgumentError(msg) + + mustexist = kw.pop('mustexist', False) + key = _get_table_key(name, schema) + if key in metadata.tables: + if not keep_existing and not extend_existing and bool(args): + raise exc.InvalidRequestError( + "Table '%s' is already defined for this MetaData " + "instance. Specify 'extend_existing=True' " + "to redefine " + "options and columns on an " + "existing Table object." % key) + table = metadata.tables[key] + if extend_existing: + table._init_existing(*args, **kw) + return table + else: + if mustexist: + raise exc.InvalidRequestError( + "Table '%s' not defined" % (key)) + table = object.__new__(cls) + table.dispatch.before_parent_attach(table, metadata) + metadata._add_table(name, schema, table) + try: + table._init(name, metadata, *args, **kw) + table.dispatch.after_parent_attach(table, metadata) + return table + except: + with util.safe_reraise(): + metadata._remove_table(name, schema) + + @property + @util.deprecated('0.9', 'Use ``table.schema.quote``') + def quote_schema(self): + """Return the value of the ``quote_schema`` flag passed + to this :class:`.Table`. + """ + + return self.schema.quote + + def __init__(self, *args, **kw): + """Constructor for :class:`~.schema.Table`. + + This method is a no-op. See the top-level + documentation for :class:`~.schema.Table` + for constructor arguments. + + """ + # __init__ is overridden to prevent __new__ from + # calling the superclass constructor. + + def _init(self, name, metadata, *args, **kwargs): + super(Table, self).__init__( + quoted_name(name, kwargs.pop('quote', None))) + self.metadata = metadata + + self.schema = kwargs.pop('schema', None) + if self.schema is None: + self.schema = metadata.schema + else: + quote_schema = kwargs.pop('quote_schema', None) + self.schema = quoted_name(self.schema, quote_schema) + + self.indexes = set() + self.constraints = set() + self._columns = ColumnCollection() + PrimaryKeyConstraint()._set_parent_with_dispatch(self) + self.foreign_keys = set() + self._extra_dependencies = set() + if self.schema is not None: + self.fullname = "%s.%s" % (self.schema, self.name) + else: + self.fullname = self.name + + autoload_with = kwargs.pop('autoload_with', None) + autoload = kwargs.pop('autoload', autoload_with is not None) + # this argument is only used with _init_existing() + kwargs.pop('autoload_replace', True) + include_columns = kwargs.pop('include_columns', None) + + self.implicit_returning = kwargs.pop('implicit_returning', True) + + if 'info' in kwargs: + self.info = kwargs.pop('info') + if 'listeners' in kwargs: + listeners = kwargs.pop('listeners') + for evt, fn in listeners: + event.listen(self, evt, fn) + + self._prefixes = kwargs.pop('prefixes', []) + + self._extra_kwargs(**kwargs) + + # load column definitions from the database if 'autoload' is defined + # we do it after the table is in the singleton dictionary to support + # circular foreign keys + if autoload: + self._autoload(metadata, autoload_with, include_columns) + + # initialize all the column, etc. objects. done after reflection to + # allow user-overrides + self._init_items(*args) + + def _autoload(self, metadata, autoload_with, include_columns, + exclude_columns=()): + + if autoload_with: + autoload_with.run_callable( + autoload_with.dialect.reflecttable, + self, include_columns, exclude_columns + ) + else: + bind = _bind_or_error( + metadata, + msg="No engine is bound to this Table's MetaData. " + "Pass an engine to the Table via " + "autoload_with=, " + "or associate the MetaData with an engine via " + "metadata.bind=") + bind.run_callable( + bind.dialect.reflecttable, + self, include_columns, exclude_columns + ) + + @property + def _sorted_constraints(self): + """Return the set of constraints as a list, sorted by creation + order. + + """ + return sorted(self.constraints, key=lambda c: c._creation_order) + + @property + def foreign_key_constraints(self): + """:class:`.ForeignKeyConstraint` objects referred to by this + :class:`.Table`. + + This list is produced from the collection of :class:`.ForeignKey` + objects currently associated. + + .. versionadded:: 1.0.0 + + """ + return set(fkc.constraint for fkc in self.foreign_keys) + + def _init_existing(self, *args, **kwargs): + autoload_with = kwargs.pop('autoload_with', None) + autoload = kwargs.pop('autoload', autoload_with is not None) + autoload_replace = kwargs.pop('autoload_replace', True) + schema = kwargs.pop('schema', None) + if schema and schema != self.schema: + raise exc.ArgumentError( + "Can't change schema of existing table from '%s' to '%s'", + (self.schema, schema)) + + include_columns = kwargs.pop('include_columns', None) + + if include_columns is not None: + for c in self.c: + if c.name not in include_columns: + self._columns.remove(c) + + for key in ('quote', 'quote_schema'): + if key in kwargs: + raise exc.ArgumentError( + "Can't redefine 'quote' or 'quote_schema' arguments") + + if 'info' in kwargs: + self.info = kwargs.pop('info') + + if autoload: + if not autoload_replace: + exclude_columns = [c.name for c in self.c] + else: + exclude_columns = () + self._autoload( + self.metadata, autoload_with, + include_columns, exclude_columns) + + self._extra_kwargs(**kwargs) + self._init_items(*args) + + def _extra_kwargs(self, **kwargs): + self._validate_dialect_kwargs(kwargs) + + def _init_collections(self): + pass + + @util.memoized_property + def _autoincrement_column(self): + for col in self.primary_key: + if (col.autoincrement and col.type._type_affinity is not None and + issubclass(col.type._type_affinity, + type_api.INTEGERTYPE._type_affinity) and + (not col.foreign_keys or + col.autoincrement == 'ignore_fk') and + isinstance(col.default, (type(None), Sequence)) and + (col.server_default is None or + col.server_default.reflected)): + return col + + @property + def key(self): + """Return the 'key' for this :class:`.Table`. + + This value is used as the dictionary key within the + :attr:`.MetaData.tables` collection. It is typically the same + as that of :attr:`.Table.name` for a table with no + :attr:`.Table.schema` set; otherwise it is typically of the form + ``schemaname.tablename``. + + """ + return _get_table_key(self.name, self.schema) + + def __repr__(self): + return "Table(%s)" % ', '.join( + [repr(self.name)] + [repr(self.metadata)] + + [repr(x) for x in self.columns] + + ["%s=%s" % (k, repr(getattr(self, k))) for k in ['schema']]) + + def __str__(self): + return _get_table_key(self.description, self.schema) + + @property + def bind(self): + """Return the connectable associated with this Table.""" + + return self.metadata and self.metadata.bind or None + + def add_is_dependent_on(self, table): + """Add a 'dependency' for this Table. + + This is another Table object which must be created + first before this one can, or dropped after this one. + + Usually, dependencies between tables are determined via + ForeignKey objects. However, for other situations that + create dependencies outside of foreign keys (rules, inheriting), + this method can manually establish such a link. + + """ + self._extra_dependencies.add(table) + + def append_column(self, column): + """Append a :class:`~.schema.Column` to this :class:`~.schema.Table`. + + The "key" of the newly added :class:`~.schema.Column`, i.e. the + value of its ``.key`` attribute, will then be available + in the ``.c`` collection of this :class:`~.schema.Table`, and the + column definition will be included in any CREATE TABLE, SELECT, + UPDATE, etc. statements generated from this :class:`~.schema.Table` + construct. + + Note that this does **not** change the definition of the table + as it exists within any underlying database, assuming that + table has already been created in the database. Relational + databases support the addition of columns to existing tables + using the SQL ALTER command, which would need to be + emitted for an already-existing table that doesn't contain + the newly added column. + + """ + + column._set_parent_with_dispatch(self) + + def append_constraint(self, constraint): + """Append a :class:`~.schema.Constraint` to this + :class:`~.schema.Table`. + + This has the effect of the constraint being included in any + future CREATE TABLE statement, assuming specific DDL creation + events have not been associated with the given + :class:`~.schema.Constraint` object. + + Note that this does **not** produce the constraint within the + relational database automatically, for a table that already exists + in the database. To add a constraint to an + existing relational database table, the SQL ALTER command must + be used. SQLAlchemy also provides the + :class:`.AddConstraint` construct which can produce this SQL when + invoked as an executable clause. + + """ + + constraint._set_parent_with_dispatch(self) + + def append_ddl_listener(self, event_name, listener): + """Append a DDL event listener to this ``Table``. + + .. deprecated:: 0.7 + See :class:`.DDLEvents`. + + """ + + def adapt_listener(target, connection, **kw): + listener(event_name, target, connection) + + event.listen(self, "" + event_name.replace('-', '_'), adapt_listener) + + def _set_parent(self, metadata): + metadata._add_table(self.name, self.schema, self) + self.metadata = metadata + + def get_children(self, column_collections=True, + schema_visitor=False, **kw): + if not schema_visitor: + return TableClause.get_children( + self, column_collections=column_collections, **kw) + else: + if column_collections: + return list(self.columns) + else: + return [] + + def exists(self, bind=None): + """Return True if this table exists.""" + + if bind is None: + bind = _bind_or_error(self) + + return bind.run_callable(bind.dialect.has_table, + self.name, schema=self.schema) + + def create(self, bind=None, checkfirst=False): + """Issue a ``CREATE`` statement for this + :class:`.Table`, using the given :class:`.Connectable` + for connectivity. + + .. seealso:: + + :meth:`.MetaData.create_all`. + + """ + + if bind is None: + bind = _bind_or_error(self) + bind._run_visitor(ddl.SchemaGenerator, + self, + checkfirst=checkfirst) + + def drop(self, bind=None, checkfirst=False): + """Issue a ``DROP`` statement for this + :class:`.Table`, using the given :class:`.Connectable` + for connectivity. + + .. seealso:: + + :meth:`.MetaData.drop_all`. + + """ + if bind is None: + bind = _bind_or_error(self) + bind._run_visitor(ddl.SchemaDropper, + self, + checkfirst=checkfirst) + + def tometadata(self, metadata, schema=RETAIN_SCHEMA, + referred_schema_fn=None, name=None): + """Return a copy of this :class:`.Table` associated with a different + :class:`.MetaData`. + + E.g.:: + + m1 = MetaData() + + user = Table('user', m1, Column('id', Integer, priamry_key=True)) + + m2 = MetaData() + user_copy = user.tometadata(m2) + + :param metadata: Target :class:`.MetaData` object, into which the + new :class:`.Table` object will be created. + + :param schema: optional string name indicating the target schema. + Defaults to the special symbol :attr:`.RETAIN_SCHEMA` which indicates + that no change to the schema name should be made in the new + :class:`.Table`. If set to a string name, the new :class:`.Table` + will have this new name as the ``.schema``. If set to ``None``, the + schema will be set to that of the schema set on the target + :class:`.MetaData`, which is typically ``None`` as well, unless + set explicitly:: + + m2 = MetaData(schema='newschema') + + # user_copy_one will have "newschema" as the schema name + user_copy_one = user.tometadata(m2, schema=None) + + m3 = MetaData() # schema defaults to None + + # user_copy_two will have None as the schema name + user_copy_two = user.tometadata(m3, schema=None) + + :param referred_schema_fn: optional callable which can be supplied + in order to provide for the schema name that should be assigned + to the referenced table of a :class:`.ForeignKeyConstraint`. + The callable accepts this parent :class:`.Table`, the + target schema that we are changing to, the + :class:`.ForeignKeyConstraint` object, and the existing + "target schema" of that constraint. The function should return the + string schema name that should be applied. + E.g.:: + + def referred_schema_fn(table, to_schema, + constraint, referred_schema): + if referred_schema == 'base_tables': + return referred_schema + else: + return to_schema + + new_table = table.tometadata(m2, schema="alt_schema", + referred_schema_fn=referred_schema_fn) + + .. versionadded:: 0.9.2 + + :param name: optional string name indicating the target table name. + If not specified or None, the table name is retained. This allows + a :class:`.Table` to be copied to the same :class:`.MetaData` target + with a new name. + + .. versionadded:: 1.0.0 + + """ + if name is None: + name = self.name + if schema is RETAIN_SCHEMA: + schema = self.schema + elif schema is None: + schema = metadata.schema + key = _get_table_key(name, schema) + if key in metadata.tables: + util.warn("Table '%s' already exists within the given " + "MetaData - not copying." % self.description) + return metadata.tables[key] + + args = [] + for c in self.columns: + args.append(c.copy(schema=schema)) + table = Table( + name, metadata, schema=schema, + *args, **self.kwargs + ) + for c in self.constraints: + if isinstance(c, ForeignKeyConstraint): + referred_schema = c._referred_schema + if referred_schema_fn: + fk_constraint_schema = referred_schema_fn( + self, schema, c, referred_schema) + else: + fk_constraint_schema = ( + schema if referred_schema == self.schema else None) + table.append_constraint( + c.copy(schema=fk_constraint_schema, target_table=table)) + + elif not c._type_bound: + table.append_constraint( + c.copy(schema=schema, target_table=table)) + for index in self.indexes: + # skip indexes that would be generated + # by the 'index' flag on Column + if len(index.columns) == 1 and \ + list(index.columns)[0].index: + continue + Index(index.name, + unique=index.unique, + *[table.c[col] for col in index.columns.keys()], + **index.kwargs) + return self._schema_item_copy(table) + + +class Column(SchemaItem, ColumnClause): + """Represents a column in a database table.""" + + __visit_name__ = 'column' + + def __init__(self, *args, **kwargs): + """ + Construct a new ``Column`` object. + + :param name: The name of this column as represented in the database. + This argument may be the first positional argument, or specified + via keyword. + + Names which contain no upper case characters + will be treated as case insensitive names, and will not be quoted + unless they are a reserved word. Names with any number of upper + case characters will be quoted and sent exactly. Note that this + behavior applies even for databases which standardize upper + case names as case insensitive such as Oracle. + + The name field may be omitted at construction time and applied + later, at any time before the Column is associated with a + :class:`.Table`. This is to support convenient + usage within the :mod:`~sqlalchemy.ext.declarative` extension. + + :param type\_: The column's type, indicated using an instance which + subclasses :class:`~sqlalchemy.types.TypeEngine`. If no arguments + are required for the type, the class of the type can be sent + as well, e.g.:: + + # use a type with arguments + Column('data', String(50)) + + # use no arguments + Column('level', Integer) + + The ``type`` argument may be the second positional argument + or specified by keyword. + + If the ``type`` is ``None`` or is omitted, it will first default to + the special type :class:`.NullType`. If and when this + :class:`.Column` is made to refer to another column using + :class:`.ForeignKey` and/or :class:`.ForeignKeyConstraint`, the type + of the remote-referenced column will be copied to this column as + well, at the moment that the foreign key is resolved against that + remote :class:`.Column` object. + + .. versionchanged:: 0.9.0 + Support for propagation of type to a :class:`.Column` from its + :class:`.ForeignKey` object has been improved and should be + more reliable and timely. + + :param \*args: Additional positional arguments include various + :class:`.SchemaItem` derived constructs which will be applied + as options to the column. These include instances of + :class:`.Constraint`, :class:`.ForeignKey`, :class:`.ColumnDefault`, + and :class:`.Sequence`. In some cases an equivalent keyword + argument is available such as ``server_default``, ``default`` + and ``unique``. + + :param autoincrement: This flag may be set to ``False`` to + indicate an integer primary key column that should not be + considered to be the "autoincrement" column, that is + the integer primary key column which generates values + implicitly upon INSERT and whose value is usually returned + via the DBAPI cursor.lastrowid attribute. It defaults + to ``True`` to satisfy the common use case of a table + with a single integer primary key column. If the table + has a composite primary key consisting of more than one + integer column, set this flag to True only on the + column that should be considered "autoincrement". + + The setting *only* has an effect for columns which are: + + * Integer derived (i.e. INT, SMALLINT, BIGINT). + + * Part of the primary key + + * Not refering to another column via :class:`.ForeignKey`, unless + the value is specified as ``'ignore_fk'``:: + + # turn on autoincrement for this column despite + # the ForeignKey() + Column('id', ForeignKey('other.id'), + primary_key=True, autoincrement='ignore_fk') + + It is typically not desirable to have "autoincrement" enabled + on such a column as its value intends to mirror that of a + primary key column elsewhere. + + * have no server side or client side defaults (with the exception + of Postgresql SERIAL). + + The setting has these two effects on columns that meet the + above criteria: + + * DDL issued for the column will include database-specific + keywords intended to signify this column as an + "autoincrement" column, such as AUTO INCREMENT on MySQL, + SERIAL on Postgresql, and IDENTITY on MS-SQL. It does + *not* issue AUTOINCREMENT for SQLite since this is a + special SQLite flag that is not required for autoincrementing + behavior. + + .. seealso:: + + :ref:`sqlite_autoincrement` + + * The column will be considered to be available as + cursor.lastrowid or equivalent, for those dialects which + "post fetch" newly inserted identifiers after a row has + been inserted (SQLite, MySQL, MS-SQL). It does not have + any effect in this regard for databases that use sequences + to generate primary key identifiers (i.e. Firebird, Postgresql, + Oracle). + + .. versionchanged:: 0.7.4 + ``autoincrement`` accepts a special value ``'ignore_fk'`` + to indicate that autoincrementing status regardless of foreign + key references. This applies to certain composite foreign key + setups, such as the one demonstrated in the ORM documentation + at :ref:`post_update`. + + :param default: A scalar, Python callable, or + :class:`.ColumnElement` expression representing the + *default value* for this column, which will be invoked upon insert + if this column is otherwise not specified in the VALUES clause of + the insert. This is a shortcut to using :class:`.ColumnDefault` as + a positional argument; see that class for full detail on the + structure of the argument. + + Contrast this argument to :paramref:`.Column.server_default` + which creates a default generator on the database side. + + .. seealso:: + + :ref:`metadata_defaults_toplevel` + + :param doc: optional String that can be used by the ORM or similar + to document attributes. This attribute does not render SQL + comments (a future attribute 'comment' will achieve that). + + :param key: An optional string identifier which will identify this + ``Column`` object on the :class:`.Table`. When a key is provided, + this is the only identifier referencing the ``Column`` within the + application, including ORM attribute mapping; the ``name`` field + is used only when rendering SQL. + + :param index: When ``True``, indicates that the column is indexed. + This is a shortcut for using a :class:`.Index` construct on the + table. To specify indexes with explicit names or indexes that + contain multiple columns, use the :class:`.Index` construct + instead. + + :param info: Optional data dictionary which will be populated into the + :attr:`.SchemaItem.info` attribute of this object. + + :param nullable: If set to the default of ``True``, indicates the + column will be rendered as allowing NULL, else it's rendered as + NOT NULL. This parameter is only used when issuing CREATE TABLE + statements. + + :param onupdate: A scalar, Python callable, or + :class:`~sqlalchemy.sql.expression.ClauseElement` representing a + default value to be applied to the column within UPDATE + statements, which wil be invoked upon update if this column is not + present in the SET clause of the update. This is a shortcut to + using :class:`.ColumnDefault` as a positional argument with + ``for_update=True``. + + :param primary_key: If ``True``, marks this column as a primary key + column. Multiple columns can have this flag set to specify + composite primary keys. As an alternative, the primary key of a + :class:`.Table` can be specified via an explicit + :class:`.PrimaryKeyConstraint` object. + + :param server_default: A :class:`.FetchedValue` instance, str, Unicode + or :func:`~sqlalchemy.sql.expression.text` construct representing + the DDL DEFAULT value for the column. + + String types will be emitted as-is, surrounded by single quotes:: + + Column('x', Text, server_default="val") + + x TEXT DEFAULT 'val' + + A :func:`~sqlalchemy.sql.expression.text` expression will be + rendered as-is, without quotes:: + + Column('y', DateTime, server_default=text('NOW()')) + + y DATETIME DEFAULT NOW() + + Strings and text() will be converted into a + :class:`.DefaultClause` object upon initialization. + + Use :class:`.FetchedValue` to indicate that an already-existing + column will generate a default value on the database side which + will be available to SQLAlchemy for post-fetch after inserts. This + construct does not specify any DDL and the implementation is left + to the database, such as via a trigger. + + .. seealso:: + + :ref:`server_defaults` + + :param server_onupdate: A :class:`.FetchedValue` instance + representing a database-side default generation function. This + indicates to SQLAlchemy that a newly generated value will be + available after updates. This construct does not specify any DDL + and the implementation is left to the database, such as via a + trigger. + + :param quote: Force quoting of this column's name on or off, + corresponding to ``True`` or ``False``. When left at its default + of ``None``, the column identifier will be quoted according to + whether the name is case sensitive (identifiers with at least one + upper case character are treated as case sensitive), or if it's a + reserved word. This flag is only needed to force quoting of a + reserved word which is not known by the SQLAlchemy dialect. + + :param unique: When ``True``, indicates that this column contains a + unique constraint, or if ``index`` is ``True`` as well, indicates + that the :class:`.Index` should be created with the unique flag. + To specify multiple columns in the constraint/index or to specify + an explicit name, use the :class:`.UniqueConstraint` or + :class:`.Index` constructs explicitly. + + :param system: When ``True``, indicates this is a "system" column, + that is a column which is automatically made available by the + database, and should not be included in the columns list for a + ``CREATE TABLE`` statement. + + For more elaborate scenarios where columns should be + conditionally rendered differently on different backends, + consider custom compilation rules for :class:`.CreateColumn`. + + .. versionadded:: 0.8.3 Added the ``system=True`` parameter to + :class:`.Column`. + + """ + + name = kwargs.pop('name', None) + type_ = kwargs.pop('type_', None) + args = list(args) + if args: + if isinstance(args[0], util.string_types): + if name is not None: + raise exc.ArgumentError( + "May not pass name positionally and as a keyword.") + name = args.pop(0) + if args: + coltype = args[0] + + if hasattr(coltype, "_sqla_type"): + if type_ is not None: + raise exc.ArgumentError( + "May not pass type_ positionally and as a keyword.") + type_ = args.pop(0) + + if name is not None: + name = quoted_name(name, kwargs.pop('quote', None)) + elif "quote" in kwargs: + raise exc.ArgumentError("Explicit 'name' is required when " + "sending 'quote' argument") + + super(Column, self).__init__(name, type_) + self.key = kwargs.pop('key', name) + self.primary_key = kwargs.pop('primary_key', False) + self.nullable = kwargs.pop('nullable', not self.primary_key) + self.default = kwargs.pop('default', None) + self.server_default = kwargs.pop('server_default', None) + self.server_onupdate = kwargs.pop('server_onupdate', None) + + # these default to None because .index and .unique is *not* + # an informational flag about Column - there can still be an + # Index or UniqueConstraint referring to this Column. + self.index = kwargs.pop('index', None) + self.unique = kwargs.pop('unique', None) + + self.system = kwargs.pop('system', False) + self.doc = kwargs.pop('doc', None) + self.onupdate = kwargs.pop('onupdate', None) + self.autoincrement = kwargs.pop('autoincrement', True) + self.constraints = set() + self.foreign_keys = set() + + # check if this Column is proxying another column + if '_proxies' in kwargs: + self._proxies = kwargs.pop('_proxies') + # otherwise, add DDL-related events + elif isinstance(self.type, SchemaEventTarget): + self.type._set_parent_with_dispatch(self) + + if self.default is not None: + if isinstance(self.default, (ColumnDefault, Sequence)): + args.append(self.default) + else: + if getattr(self.type, '_warn_on_bytestring', False): + if isinstance(self.default, util.binary_type): + util.warn( + "Unicode column '%s' has non-unicode " + "default value %r specified." % ( + self.key, + self.default + )) + args.append(ColumnDefault(self.default)) + + if self.server_default is not None: + if isinstance(self.server_default, FetchedValue): + args.append(self.server_default._as_for_update(False)) + else: + args.append(DefaultClause(self.server_default)) + + if self.onupdate is not None: + if isinstance(self.onupdate, (ColumnDefault, Sequence)): + args.append(self.onupdate) + else: + args.append(ColumnDefault(self.onupdate, for_update=True)) + + if self.server_onupdate is not None: + if isinstance(self.server_onupdate, FetchedValue): + args.append(self.server_onupdate._as_for_update(True)) + else: + args.append(DefaultClause(self.server_onupdate, + for_update=True)) + self._init_items(*args) + + util.set_creation_order(self) + + if 'info' in kwargs: + self.info = kwargs.pop('info') + + if kwargs: + raise exc.ArgumentError( + "Unknown arguments passed to Column: " + repr(list(kwargs))) + +# @property +# def quote(self): +# return getattr(self.name, "quote", None) + + def __str__(self): + if self.name is None: + return "(no name)" + elif self.table is not None: + if self.table.named_with_column: + return (self.table.description + "." + self.description) + else: + return self.description + else: + return self.description + + def references(self, column): + """Return True if this Column references the given column via foreign + key.""" + + for fk in self.foreign_keys: + if fk.column.proxy_set.intersection(column.proxy_set): + return True + else: + return False + + def append_foreign_key(self, fk): + fk._set_parent_with_dispatch(self) + + def __repr__(self): + kwarg = [] + if self.key != self.name: + kwarg.append('key') + if self.primary_key: + kwarg.append('primary_key') + if not self.nullable: + kwarg.append('nullable') + if self.onupdate: + kwarg.append('onupdate') + if self.default: + kwarg.append('default') + if self.server_default: + kwarg.append('server_default') + return "Column(%s)" % ', '.join( + [repr(self.name)] + [repr(self.type)] + + [repr(x) for x in self.foreign_keys if x is not None] + + [repr(x) for x in self.constraints] + + [(self.table is not None and "table=<%s>" % + self.table.description or "table=None")] + + ["%s=%s" % (k, repr(getattr(self, k))) for k in kwarg]) + + def _set_parent(self, table): + if not self.name: + raise exc.ArgumentError( + "Column must be constructed with a non-blank name or " + "assign a non-blank .name before adding to a Table.") + if self.key is None: + self.key = self.name + + existing = getattr(self, 'table', None) + if existing is not None and existing is not table: + raise exc.ArgumentError( + "Column object '%s' already assigned to Table '%s'" % ( + self.key, + existing.description + )) + + if self.key in table._columns: + col = table._columns.get(self.key) + if col is not self: + for fk in col.foreign_keys: + table.foreign_keys.remove(fk) + if fk.constraint in table.constraints: + # this might have been removed + # already, if it's a composite constraint + # and more than one col being replaced + table.constraints.remove(fk.constraint) + + table._columns.replace(self) + + if self.primary_key: + table.primary_key._replace(self) + Table._autoincrement_column._reset(table) + elif self.key in table.primary_key: + raise exc.ArgumentError( + "Trying to redefine primary-key column '%s' as a " + "non-primary-key column on table '%s'" % ( + self.key, table.fullname)) + self.table = table + + if self.index: + if isinstance(self.index, util.string_types): + raise exc.ArgumentError( + "The 'index' keyword argument on Column is boolean only. " + "To create indexes with a specific name, create an " + "explicit Index object external to the Table.") + Index(None, self, unique=bool(self.unique)) + elif self.unique: + if isinstance(self.unique, util.string_types): + raise exc.ArgumentError( + "The 'unique' keyword argument on Column is boolean " + "only. To create unique constraints or indexes with a " + "specific name, append an explicit UniqueConstraint to " + "the Table's list of elements, or create an explicit " + "Index object external to the Table.") + table.append_constraint(UniqueConstraint(self.key)) + + self._setup_on_memoized_fks(lambda fk: fk._set_remote_table(table)) + + def _setup_on_memoized_fks(self, fn): + fk_keys = [ + ((self.table.key, self.key), False), + ((self.table.key, self.name), True), + ] + for fk_key, link_to_name in fk_keys: + if fk_key in self.table.metadata._fk_memos: + for fk in self.table.metadata._fk_memos[fk_key]: + if fk.link_to_name is link_to_name: + fn(fk) + + def _on_table_attach(self, fn): + if self.table is not None: + fn(self, self.table) + else: + event.listen(self, 'after_parent_attach', fn) + + def copy(self, **kw): + """Create a copy of this ``Column``, unitialized. + + This is used in ``Table.tometadata``. + + """ + + # Constraint objects plus non-constraint-bound ForeignKey objects + args = \ + [c.copy(**kw) for c in self.constraints if not c._type_bound] + \ + [c.copy(**kw) for c in self.foreign_keys if not c.constraint] + + type_ = self.type + if isinstance(type_, SchemaEventTarget): + type_ = type_.copy(**kw) + + c = self._constructor( + name=self.name, + type_=type_, + key=self.key, + primary_key=self.primary_key, + nullable=self.nullable, + unique=self.unique, + system=self.system, + # quote=self.quote, + index=self.index, + autoincrement=self.autoincrement, + default=self.default, + server_default=self.server_default, + onupdate=self.onupdate, + server_onupdate=self.server_onupdate, + doc=self.doc, + *args + ) + return self._schema_item_copy(c) + + def _make_proxy(self, selectable, name=None, key=None, + name_is_truncatable=False, **kw): + """Create a *proxy* for this column. + + This is a copy of this ``Column`` referenced by a different parent + (such as an alias or select statement). The column should + be used only in select scenarios, as its full DDL/default + information is not transferred. + + """ + fk = [ForeignKey(f.column, _constraint=f.constraint) + for f in self.foreign_keys] + if name is None and self.name is None: + raise exc.InvalidRequestError( + "Cannot initialize a sub-selectable" + " with this Column object until its 'name' has " + "been assigned.") + try: + c = self._constructor( + _as_truncated(name or self.name) if + name_is_truncatable else (name or self.name), + self.type, + key=key if key else name if name else self.key, + primary_key=self.primary_key, + nullable=self.nullable, + _proxies=[self], *fk) + except TypeError: + util.raise_from_cause( + TypeError( + "Could not create a copy of this %r object. " + "Ensure the class includes a _constructor() " + "attribute or method which accepts the " + "standard Column constructor arguments, or " + "references the Column class itself." % self.__class__) + ) + + c.table = selectable + selectable._columns.add(c) + if selectable._is_clone_of is not None: + c._is_clone_of = selectable._is_clone_of.columns[c.key] + if self.primary_key: + selectable.primary_key.add(c) + c.dispatch.after_parent_attach(c, selectable) + return c + + def get_children(self, schema_visitor=False, **kwargs): + if schema_visitor: + return [x for x in (self.default, self.onupdate) + if x is not None] + \ + list(self.foreign_keys) + list(self.constraints) + else: + return ColumnClause.get_children(self, **kwargs) + + +class ForeignKey(DialectKWArgs, SchemaItem): + """Defines a dependency between two columns. + + ``ForeignKey`` is specified as an argument to a :class:`.Column` object, + e.g.:: + + t = Table("remote_table", metadata, + Column("remote_id", ForeignKey("main_table.id")) + ) + + Note that ``ForeignKey`` is only a marker object that defines + a dependency between two columns. The actual constraint + is in all cases represented by the :class:`.ForeignKeyConstraint` + object. This object will be generated automatically when + a ``ForeignKey`` is associated with a :class:`.Column` which + in turn is associated with a :class:`.Table`. Conversely, + when :class:`.ForeignKeyConstraint` is applied to a :class:`.Table`, + ``ForeignKey`` markers are automatically generated to be + present on each associated :class:`.Column`, which are also + associated with the constraint object. + + Note that you cannot define a "composite" foreign key constraint, + that is a constraint between a grouping of multiple parent/child + columns, using ``ForeignKey`` objects. To define this grouping, + the :class:`.ForeignKeyConstraint` object must be used, and applied + to the :class:`.Table`. The associated ``ForeignKey`` objects + are created automatically. + + The ``ForeignKey`` objects associated with an individual + :class:`.Column` object are available in the `foreign_keys` collection + of that column. + + Further examples of foreign key configuration are in + :ref:`metadata_foreignkeys`. + + """ + + __visit_name__ = 'foreign_key' + + def __init__(self, column, _constraint=None, use_alter=False, name=None, + onupdate=None, ondelete=None, deferrable=None, + initially=None, link_to_name=False, match=None, + info=None, + **dialect_kw): + """ + Construct a column-level FOREIGN KEY. + + The :class:`.ForeignKey` object when constructed generates a + :class:`.ForeignKeyConstraint` which is associated with the parent + :class:`.Table` object's collection of constraints. + + :param column: A single target column for the key relationship. A + :class:`.Column` object or a column name as a string: + ``tablename.columnkey`` or ``schema.tablename.columnkey``. + ``columnkey`` is the ``key`` which has been assigned to the column + (defaults to the column name itself), unless ``link_to_name`` is + ``True`` in which case the rendered name of the column is used. + + .. versionadded:: 0.7.4 + Note that if the schema name is not included, and the + underlying :class:`.MetaData` has a "schema", that value will + be used. + + :param name: Optional string. An in-database name for the key if + `constraint` is not provided. + + :param onupdate: Optional string. If set, emit ON UPDATE when + issuing DDL for this constraint. Typical values include CASCADE, + DELETE and RESTRICT. + + :param ondelete: Optional string. If set, emit ON DELETE when + issuing DDL for this constraint. Typical values include CASCADE, + DELETE and RESTRICT. + + :param deferrable: Optional bool. If set, emit DEFERRABLE or NOT + DEFERRABLE when issuing DDL for this constraint. + + :param initially: Optional string. If set, emit INITIALLY when + issuing DDL for this constraint. + + :param link_to_name: if True, the string name given in ``column`` is + the rendered name of the referenced column, not its locally + assigned ``key``. + + :param use_alter: passed to the underlying + :class:`.ForeignKeyConstraint` to indicate the constraint should + be generated/dropped externally from the CREATE TABLE/ DROP TABLE + statement. See :paramref:`.ForeignKeyConstraint.use_alter` + for further description. + + .. seealso:: + + :paramref:`.ForeignKeyConstraint.use_alter` + + :ref:`use_alter` + + :param match: Optional string. If set, emit MATCH when issuing + DDL for this constraint. Typical values include SIMPLE, PARTIAL + and FULL. + + :param info: Optional data dictionary which will be populated into the + :attr:`.SchemaItem.info` attribute of this object. + + .. versionadded:: 1.0.0 + + :param \**dialect_kw: Additional keyword arguments are dialect + specific, and passed in the form ``_``. The + arguments are ultimately handled by a corresponding + :class:`.ForeignKeyConstraint`. See the documentation regarding + an individual dialect at :ref:`dialect_toplevel` for detail on + documented arguments. + + .. versionadded:: 0.9.2 + + """ + + self._colspec = column + if isinstance(self._colspec, util.string_types): + self._table_column = None + else: + if hasattr(self._colspec, '__clause_element__'): + self._table_column = self._colspec.__clause_element__() + else: + self._table_column = self._colspec + + if not isinstance(self._table_column, ColumnClause): + raise exc.ArgumentError( + "String, Column, or Column-bound argument " + "expected, got %r" % self._table_column) + elif not isinstance( + self._table_column.table, (util.NoneType, TableClause)): + raise exc.ArgumentError( + "ForeignKey received Column not bound " + "to a Table, got: %r" % self._table_column.table + ) + + # the linked ForeignKeyConstraint. + # ForeignKey will create this when parent Column + # is attached to a Table, *or* ForeignKeyConstraint + # object passes itself in when creating ForeignKey + # markers. + self.constraint = _constraint + self.parent = None + self.use_alter = use_alter + self.name = name + self.onupdate = onupdate + self.ondelete = ondelete + self.deferrable = deferrable + self.initially = initially + self.link_to_name = link_to_name + self.match = match + if info: + self.info = info + self._unvalidated_dialect_kw = dialect_kw + + def __repr__(self): + return "ForeignKey(%r)" % self._get_colspec() + + def copy(self, schema=None): + """Produce a copy of this :class:`.ForeignKey` object. + + The new :class:`.ForeignKey` will not be bound + to any :class:`.Column`. + + This method is usually used by the internal + copy procedures of :class:`.Column`, :class:`.Table`, + and :class:`.MetaData`. + + :param schema: The returned :class:`.ForeignKey` will + reference the original table and column name, qualified + by the given string schema name. + + """ + + fk = ForeignKey( + self._get_colspec(schema=schema), + use_alter=self.use_alter, + name=self.name, + onupdate=self.onupdate, + ondelete=self.ondelete, + deferrable=self.deferrable, + initially=self.initially, + link_to_name=self.link_to_name, + match=self.match, + **self._unvalidated_dialect_kw + ) + return self._schema_item_copy(fk) + + def _get_colspec(self, schema=None, table_name=None): + """Return a string based 'column specification' for this + :class:`.ForeignKey`. + + This is usually the equivalent of the string-based "tablename.colname" + argument first passed to the object's constructor. + + """ + if schema: + _schema, tname, colname = self._column_tokens + if table_name is not None: + tname = table_name + return "%s.%s.%s" % (schema, tname, colname) + elif table_name: + schema, tname, colname = self._column_tokens + if schema: + return "%s.%s.%s" % (schema, table_name, colname) + else: + return "%s.%s" % (table_name, colname) + elif self._table_column is not None: + return "%s.%s" % ( + self._table_column.table.fullname, self._table_column.key) + else: + return self._colspec + + @property + def _referred_schema(self): + return self._column_tokens[0] + + def _table_key(self): + if self._table_column is not None: + if self._table_column.table is None: + return None + else: + return self._table_column.table.key + else: + schema, tname, colname = self._column_tokens + return _get_table_key(tname, schema) + + target_fullname = property(_get_colspec) + + def references(self, table): + """Return True if the given :class:`.Table` is referenced by this + :class:`.ForeignKey`.""" + + return table.corresponding_column(self.column) is not None + + def get_referent(self, table): + """Return the :class:`.Column` in the given :class:`.Table` + referenced by this :class:`.ForeignKey`. + + Returns None if this :class:`.ForeignKey` does not reference the given + :class:`.Table`. + + """ + + return table.corresponding_column(self.column) + + @util.memoized_property + def _column_tokens(self): + """parse a string-based _colspec into its component parts.""" + + m = self._get_colspec().split('.') + if m is None: + raise exc.ArgumentError( + "Invalid foreign key column specification: %s" % + self._colspec) + if (len(m) == 1): + tname = m.pop() + colname = None + else: + colname = m.pop() + tname = m.pop() + + # A FK between column 'bar' and table 'foo' can be + # specified as 'foo', 'foo.bar', 'dbo.foo.bar', + # 'otherdb.dbo.foo.bar'. Once we have the column name and + # the table name, treat everything else as the schema + # name. Some databases (e.g. Sybase) support + # inter-database foreign keys. See tickets#1341 and -- + # indirectly related -- Ticket #594. This assumes that '.' + # will never appear *within* any component of the FK. + + if (len(m) > 0): + schema = '.'.join(m) + else: + schema = None + return schema, tname, colname + + def _resolve_col_tokens(self): + if self.parent is None: + raise exc.InvalidRequestError( + "this ForeignKey object does not yet have a " + "parent Column associated with it.") + + elif self.parent.table is None: + raise exc.InvalidRequestError( + "this ForeignKey's parent column is not yet associated " + "with a Table.") + + parenttable = self.parent.table + + # assertion, can be commented out. + # basically Column._make_proxy() sends the actual + # target Column to the ForeignKey object, so the + # string resolution here is never called. + for c in self.parent.base_columns: + if isinstance(c, Column): + assert c.table is parenttable + break + else: + assert False + ###################### + + schema, tname, colname = self._column_tokens + + if schema is None and parenttable.metadata.schema is not None: + schema = parenttable.metadata.schema + + tablekey = _get_table_key(tname, schema) + return parenttable, tablekey, colname + + def _link_to_col_by_colstring(self, parenttable, table, colname): + if not hasattr(self.constraint, '_referred_table'): + self.constraint._referred_table = table + else: + assert self.constraint._referred_table is table + + _column = None + if colname is None: + # colname is None in the case that ForeignKey argument + # was specified as table name only, in which case we + # match the column name to the same column on the + # parent. + key = self.parent + _column = table.c.get(self.parent.key, None) + elif self.link_to_name: + key = colname + for c in table.c: + if c.name == colname: + _column = c + else: + key = colname + _column = table.c.get(colname, None) + + if _column is None: + raise exc.NoReferencedColumnError( + "Could not initialize target column " + "for ForeignKey '%s' on table '%s': " + "table '%s' has no column named '%s'" % + (self._colspec, parenttable.name, table.name, key), + table.name, key) + + self._set_target_column(_column) + + def _set_target_column(self, column): + # propagate TypeEngine to parent if it didn't have one + if self.parent.type._isnull: + self.parent.type = column.type + + # super-edgy case, if other FKs point to our column, + # they'd get the type propagated out also. + if isinstance(self.parent.table, Table): + + def set_type(fk): + if fk.parent.type._isnull: + fk.parent.type = column.type + self.parent._setup_on_memoized_fks(set_type) + + self.column = column + + @util.memoized_property + def column(self): + """Return the target :class:`.Column` referenced by this + :class:`.ForeignKey`. + + If no target column has been established, an exception + is raised. + + .. versionchanged:: 0.9.0 + Foreign key target column resolution now occurs as soon as both + the ForeignKey object and the remote Column to which it refers + are both associated with the same MetaData object. + + """ + + if isinstance(self._colspec, util.string_types): + + parenttable, tablekey, colname = self._resolve_col_tokens() + + if tablekey not in parenttable.metadata: + raise exc.NoReferencedTableError( + "Foreign key associated with column '%s' could not find " + "table '%s' with which to generate a " + "foreign key to target column '%s'" % + (self.parent, tablekey, colname), + tablekey) + elif parenttable.key not in parenttable.metadata: + raise exc.InvalidRequestError( + "Table %s is no longer associated with its " + "parent MetaData" % parenttable) + else: + raise exc.NoReferencedColumnError( + "Could not initialize target column for " + "ForeignKey '%s' on table '%s': " + "table '%s' has no column named '%s'" % ( + self._colspec, parenttable.name, tablekey, colname), + tablekey, colname) + elif hasattr(self._colspec, '__clause_element__'): + _column = self._colspec.__clause_element__() + return _column + else: + _column = self._colspec + return _column + + def _set_parent(self, column): + if self.parent is not None and self.parent is not column: + raise exc.InvalidRequestError( + "This ForeignKey already has a parent !") + self.parent = column + self.parent.foreign_keys.add(self) + self.parent._on_table_attach(self._set_table) + + def _set_remote_table(self, table): + parenttable, tablekey, colname = self._resolve_col_tokens() + self._link_to_col_by_colstring(parenttable, table, colname) + self.constraint._validate_dest_table(table) + + def _remove_from_metadata(self, metadata): + parenttable, table_key, colname = self._resolve_col_tokens() + fk_key = (table_key, colname) + + if self in metadata._fk_memos[fk_key]: + # TODO: no test coverage for self not in memos + metadata._fk_memos[fk_key].remove(self) + + def _set_table(self, column, table): + # standalone ForeignKey - create ForeignKeyConstraint + # on the hosting Table when attached to the Table. + if self.constraint is None and isinstance(table, Table): + self.constraint = ForeignKeyConstraint( + [], [], use_alter=self.use_alter, name=self.name, + onupdate=self.onupdate, ondelete=self.ondelete, + deferrable=self.deferrable, initially=self.initially, + match=self.match, + **self._unvalidated_dialect_kw + ) + self.constraint._append_element(column, self) + self.constraint._set_parent_with_dispatch(table) + table.foreign_keys.add(self) + + # set up remote ".column" attribute, or a note to pick it + # up when the other Table/Column shows up + if isinstance(self._colspec, util.string_types): + parenttable, table_key, colname = self._resolve_col_tokens() + fk_key = (table_key, colname) + if table_key in parenttable.metadata.tables: + table = parenttable.metadata.tables[table_key] + try: + self._link_to_col_by_colstring( + parenttable, table, colname) + except exc.NoReferencedColumnError: + # this is OK, we'll try later + pass + parenttable.metadata._fk_memos[fk_key].append(self) + elif hasattr(self._colspec, '__clause_element__'): + _column = self._colspec.__clause_element__() + self._set_target_column(_column) + else: + _column = self._colspec + self._set_target_column(_column) + + +class _NotAColumnExpr(object): + def _not_a_column_expr(self): + raise exc.InvalidRequestError( + "This %s cannot be used directly " + "as a column expression." % self.__class__.__name__) + + __clause_element__ = self_group = lambda self: self._not_a_column_expr() + _from_objects = property(lambda self: self._not_a_column_expr()) + + +class DefaultGenerator(_NotAColumnExpr, SchemaItem): + """Base class for column *default* values.""" + + __visit_name__ = 'default_generator' + + is_sequence = False + is_server_default = False + column = None + + def __init__(self, for_update=False): + self.for_update = for_update + + def _set_parent(self, column): + self.column = column + if self.for_update: + self.column.onupdate = self + else: + self.column.default = self + + def execute(self, bind=None, **kwargs): + if bind is None: + bind = _bind_or_error(self) + return bind._execute_default(self, **kwargs) + + @property + def bind(self): + """Return the connectable associated with this default.""" + if getattr(self, 'column', None) is not None: + return self.column.table.bind + else: + return None + + +class ColumnDefault(DefaultGenerator): + """A plain default value on a column. + + This could correspond to a constant, a callable function, + or a SQL clause. + + :class:`.ColumnDefault` is generated automatically + whenever the ``default``, ``onupdate`` arguments of + :class:`.Column` are used. A :class:`.ColumnDefault` + can be passed positionally as well. + + For example, the following:: + + Column('foo', Integer, default=50) + + Is equivalent to:: + + Column('foo', Integer, ColumnDefault(50)) + + + """ + + def __init__(self, arg, **kwargs): + """"Construct a new :class:`.ColumnDefault`. + + + :param arg: argument representing the default value. + May be one of the following: + + * a plain non-callable Python value, such as a + string, integer, boolean, or other simple type. + The default value will be used as is each time. + * a SQL expression, that is one which derives from + :class:`.ColumnElement`. The SQL expression will + be rendered into the INSERT or UPDATE statement, + or in the case of a primary key column when + RETURNING is not used may be + pre-executed before an INSERT within a SELECT. + * A Python callable. The function will be invoked for each + new row subject to an INSERT or UPDATE. + The callable must accept exactly + zero or one positional arguments. The one-argument form + will receive an instance of the :class:`.ExecutionContext`, + which provides contextual information as to the current + :class:`.Connection` in use as well as the current + statement and parameters. + + """ + super(ColumnDefault, self).__init__(**kwargs) + if isinstance(arg, FetchedValue): + raise exc.ArgumentError( + "ColumnDefault may not be a server-side default type.") + if util.callable(arg): + arg = self._maybe_wrap_callable(arg) + self.arg = arg + + @util.memoized_property + def is_callable(self): + return util.callable(self.arg) + + @util.memoized_property + def is_clause_element(self): + return isinstance(self.arg, ClauseElement) + + @util.memoized_property + def is_scalar(self): + return not self.is_callable and \ + not self.is_clause_element and \ + not self.is_sequence + + def _maybe_wrap_callable(self, fn): + """Wrap callables that don't accept a context. + + This is to allow easy compatibility with default callables + that aren't specific to accepting of a context. + + """ + try: + argspec = util.get_callable_argspec(fn, no_self=True) + except TypeError: + return lambda ctx: fn() + + defaulted = argspec[3] is not None and len(argspec[3]) or 0 + positionals = len(argspec[0]) - defaulted + + if positionals == 0: + return lambda ctx: fn() + elif positionals == 1: + return fn + else: + raise exc.ArgumentError( + "ColumnDefault Python function takes zero or one " + "positional arguments") + + def _visit_name(self): + if self.for_update: + return "column_onupdate" + else: + return "column_default" + __visit_name__ = property(_visit_name) + + def __repr__(self): + return "ColumnDefault(%r)" % self.arg + + +class Sequence(DefaultGenerator): + """Represents a named database sequence. + + The :class:`.Sequence` object represents the name and configurational + parameters of a database sequence. It also represents + a construct that can be "executed" by a SQLAlchemy :class:`.Engine` + or :class:`.Connection`, rendering the appropriate "next value" function + for the target database and returning a result. + + The :class:`.Sequence` is typically associated with a primary key column:: + + some_table = Table( + 'some_table', metadata, + Column('id', Integer, Sequence('some_table_seq'), + primary_key=True) + ) + + When CREATE TABLE is emitted for the above :class:`.Table`, if the + target platform supports sequences, a CREATE SEQUENCE statement will + be emitted as well. For platforms that don't support sequences, + the :class:`.Sequence` construct is ignored. + + .. seealso:: + + :class:`.CreateSequence` + + :class:`.DropSequence` + + """ + + __visit_name__ = 'sequence' + + is_sequence = True + + def __init__(self, name, start=None, increment=None, minvalue=None, + maxvalue=None, nominvalue=None, nomaxvalue=None, cycle=None, + schema=None, optional=False, quote=None, metadata=None, + quote_schema=None, + for_update=False): + """Construct a :class:`.Sequence` object. + + :param name: The name of the sequence. + :param start: the starting index of the sequence. This value is + used when the CREATE SEQUENCE command is emitted to the database + as the value of the "START WITH" clause. If ``None``, the + clause is omitted, which on most platforms indicates a starting + value of 1. + :param increment: the increment value of the sequence. This + value is used when the CREATE SEQUENCE command is emitted to + the database as the value of the "INCREMENT BY" clause. If ``None``, + the clause is omitted, which on most platforms indicates an + increment of 1. + :param minvalue: the minimum value of the sequence. This + value is used when the CREATE SEQUENCE command is emitted to + the database as the value of the "MINVALUE" clause. If ``None``, + the clause is omitted, which on most platforms indicates a + minvalue of 1 and -2^63-1 for ascending and descending sequences, + respectively. + + .. versionadded:: 1.0.7 + + :param maxvalue: the maximum value of the sequence. This + value is used when the CREATE SEQUENCE command is emitted to + the database as the value of the "MAXVALUE" clause. If ``None``, + the clause is omitted, which on most platforms indicates a + maxvalue of 2^63-1 and -1 for ascending and descending sequences, + respectively. + + .. versionadded:: 1.0.7 + + :param nominvalue: no minimum value of the sequence. This + value is used when the CREATE SEQUENCE command is emitted to + the database as the value of the "NO MINVALUE" clause. If ``None``, + the clause is omitted, which on most platforms indicates a + minvalue of 1 and -2^63-1 for ascending and descending sequences, + respectively. + + .. versionadded:: 1.0.7 + + :param nomaxvalue: no maximum value of the sequence. This + value is used when the CREATE SEQUENCE command is emitted to + the database as the value of the "NO MAXVALUE" clause. If ``None``, + the clause is omitted, which on most platforms indicates a + maxvalue of 2^63-1 and -1 for ascending and descending sequences, + respectively. + + .. versionadded:: 1.0.7 + + :param cycle: allows the sequence to wrap around when the maxvalue + or minvalue has been reached by an ascending or descending sequence + respectively. This value is used when the CREATE SEQUENCE command + is emitted to the database as the "CYCLE" clause. If the limit is + reached, the next number generated will be the minvalue or maxvalue, + respectively. If cycle=False (the default) any calls to nextval + after the sequence has reached its maximum value will return an + error. + + .. versionadded:: 1.0.7 + + :param schema: Optional schema name for the sequence, if located + in a schema other than the default. + :param optional: boolean value, when ``True``, indicates that this + :class:`.Sequence` object only needs to be explicitly generated + on backends that don't provide another way to generate primary + key identifiers. Currently, it essentially means, "don't create + this sequence on the Postgresql backend, where the SERIAL keyword + creates a sequence for us automatically". + :param quote: boolean value, when ``True`` or ``False``, explicitly + forces quoting of the schema name on or off. When left at its + default of ``None``, normal quoting rules based on casing and + reserved words take place. + :param quote_schema: set the quoting preferences for the ``schema`` + name. + :param metadata: optional :class:`.MetaData` object which will be + associated with this :class:`.Sequence`. A :class:`.Sequence` + that is associated with a :class:`.MetaData` gains access to the + ``bind`` of that :class:`.MetaData`, meaning the + :meth:`.Sequence.create` and :meth:`.Sequence.drop` methods will + make usage of that engine automatically. + + .. versionchanged:: 0.7 + Additionally, the appropriate CREATE SEQUENCE/ + DROP SEQUENCE DDL commands will be emitted corresponding to this + :class:`.Sequence` when :meth:`.MetaData.create_all` and + :meth:`.MetaData.drop_all` are invoked. + + Note that when a :class:`.Sequence` is applied to a :class:`.Column`, + the :class:`.Sequence` is automatically associated with the + :class:`.MetaData` object of that column's parent :class:`.Table`, + when that association is made. The :class:`.Sequence` will then + be subject to automatic CREATE SEQUENCE/DROP SEQUENCE corresponding + to when the :class:`.Table` object itself is created or dropped, + rather than that of the :class:`.MetaData` object overall. + :param for_update: Indicates this :class:`.Sequence`, when associated + with a :class:`.Column`, should be invoked for UPDATE statements + on that column's table, rather than for INSERT statements, when + no value is otherwise present for that column in the statement. + + """ + super(Sequence, self).__init__(for_update=for_update) + self.name = quoted_name(name, quote) + self.start = start + self.increment = increment + self.minvalue = minvalue + self.maxvalue = maxvalue + self.nominvalue = nominvalue + self.nomaxvalue = nomaxvalue + self.cycle = cycle + self.optional = optional + if metadata is not None and schema is None and metadata.schema: + self.schema = schema = metadata.schema + else: + self.schema = quoted_name(schema, quote_schema) + self.metadata = metadata + self._key = _get_table_key(name, schema) + if metadata: + self._set_metadata(metadata) + + @util.memoized_property + def is_callable(self): + return False + + @util.memoized_property + def is_clause_element(self): + return False + + @util.dependencies("sqlalchemy.sql.functions.func") + def next_value(self, func): + """Return a :class:`.next_value` function element + which will render the appropriate increment function + for this :class:`.Sequence` within any SQL expression. + + """ + return func.next_value(self, bind=self.bind) + + def _set_parent(self, column): + super(Sequence, self)._set_parent(column) + column._on_table_attach(self._set_table) + + def _set_table(self, column, table): + self._set_metadata(table.metadata) + + def _set_metadata(self, metadata): + self.metadata = metadata + self.metadata._sequences[self._key] = self + + @property + def bind(self): + if self.metadata: + return self.metadata.bind + else: + return None + + def create(self, bind=None, checkfirst=True): + """Creates this sequence in the database.""" + + if bind is None: + bind = _bind_or_error(self) + bind._run_visitor(ddl.SchemaGenerator, + self, + checkfirst=checkfirst) + + def drop(self, bind=None, checkfirst=True): + """Drops this sequence from the database.""" + + if bind is None: + bind = _bind_or_error(self) + bind._run_visitor(ddl.SchemaDropper, + self, + checkfirst=checkfirst) + + def _not_a_column_expr(self): + raise exc.InvalidRequestError( + "This %s cannot be used directly " + "as a column expression. Use func.next_value(sequence) " + "to produce a 'next value' function that's usable " + "as a column element." + % self.__class__.__name__) + + +@inspection._self_inspects +class FetchedValue(_NotAColumnExpr, SchemaEventTarget): + """A marker for a transparent database-side default. + + Use :class:`.FetchedValue` when the database is configured + to provide some automatic default for a column. + + E.g.:: + + Column('foo', Integer, FetchedValue()) + + Would indicate that some trigger or default generator + will create a new value for the ``foo`` column during an + INSERT. + + .. seealso:: + + :ref:`triggered_columns` + + """ + is_server_default = True + reflected = False + has_argument = False + + def __init__(self, for_update=False): + self.for_update = for_update + + def _as_for_update(self, for_update): + if for_update == self.for_update: + return self + else: + return self._clone(for_update) + + def _clone(self, for_update): + n = self.__class__.__new__(self.__class__) + n.__dict__.update(self.__dict__) + n.__dict__.pop('column', None) + n.for_update = for_update + return n + + def _set_parent(self, column): + self.column = column + if self.for_update: + self.column.server_onupdate = self + else: + self.column.server_default = self + + def __repr__(self): + return util.generic_repr(self) + + +class DefaultClause(FetchedValue): + """A DDL-specified DEFAULT column value. + + :class:`.DefaultClause` is a :class:`.FetchedValue` + that also generates a "DEFAULT" clause when + "CREATE TABLE" is emitted. + + :class:`.DefaultClause` is generated automatically + whenever the ``server_default``, ``server_onupdate`` arguments of + :class:`.Column` are used. A :class:`.DefaultClause` + can be passed positionally as well. + + For example, the following:: + + Column('foo', Integer, server_default="50") + + Is equivalent to:: + + Column('foo', Integer, DefaultClause("50")) + + """ + + has_argument = True + + def __init__(self, arg, for_update=False, _reflected=False): + util.assert_arg_type(arg, (util.string_types[0], + ClauseElement, + TextClause), 'arg') + super(DefaultClause, self).__init__(for_update) + self.arg = arg + self.reflected = _reflected + + def __repr__(self): + return "DefaultClause(%r, for_update=%r)" % \ + (self.arg, self.for_update) + + +class PassiveDefault(DefaultClause): + """A DDL-specified DEFAULT column value. + + .. deprecated:: 0.6 + :class:`.PassiveDefault` is deprecated. + Use :class:`.DefaultClause`. + """ + @util.deprecated("0.6", + ":class:`.PassiveDefault` is deprecated. " + "Use :class:`.DefaultClause`.", + False) + def __init__(self, *arg, **kw): + DefaultClause.__init__(self, *arg, **kw) + + +class Constraint(DialectKWArgs, SchemaItem): + """A table-level SQL constraint.""" + + __visit_name__ = 'constraint' + + def __init__(self, name=None, deferrable=None, initially=None, + _create_rule=None, info=None, _type_bound=False, + **dialect_kw): + """Create a SQL constraint. + + :param name: + Optional, the in-database name of this ``Constraint``. + + :param deferrable: + Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when + issuing DDL for this constraint. + + :param initially: + Optional string. If set, emit INITIALLY when issuing DDL + for this constraint. + + :param info: Optional data dictionary which will be populated into the + :attr:`.SchemaItem.info` attribute of this object. + + .. versionadded:: 1.0.0 + + :param _create_rule: + a callable which is passed the DDLCompiler object during + compilation. Returns True or False to signal inline generation of + this Constraint. + + The AddConstraint and DropConstraint DDL constructs provide + DDLElement's more comprehensive "conditional DDL" approach that is + passed a database connection when DDL is being issued. _create_rule + is instead called during any CREATE TABLE compilation, where there + may not be any transaction/connection in progress. However, it + allows conditional compilation of the constraint even for backends + which do not support addition of constraints through ALTER TABLE, + which currently includes SQLite. + + _create_rule is used by some types to create constraints. + Currently, its call signature is subject to change at any time. + + :param \**dialect_kw: Additional keyword arguments are dialect + specific, and passed in the form ``_``. See + the documentation regarding an individual dialect at + :ref:`dialect_toplevel` for detail on documented arguments. + + """ + + self.name = name + self.deferrable = deferrable + self.initially = initially + if info: + self.info = info + self._create_rule = _create_rule + self._type_bound = _type_bound + util.set_creation_order(self) + self._validate_dialect_kwargs(dialect_kw) + + @property + def table(self): + try: + if isinstance(self.parent, Table): + return self.parent + except AttributeError: + pass + raise exc.InvalidRequestError( + "This constraint is not bound to a table. Did you " + "mean to call table.append_constraint(constraint) ?") + + def _set_parent(self, parent): + self.parent = parent + parent.constraints.add(self) + + def copy(self, **kw): + raise NotImplementedError() + + +def _to_schema_column(element): + if hasattr(element, '__clause_element__'): + element = element.__clause_element__() + if not isinstance(element, Column): + raise exc.ArgumentError("schema.Column object expected") + return element + + +def _to_schema_column_or_string(element): + if hasattr(element, '__clause_element__'): + element = element.__clause_element__() + if not isinstance(element, util.string_types + (ColumnElement, )): + msg = "Element %r is not a string name or column element" + raise exc.ArgumentError(msg % element) + return element + + +class ColumnCollectionMixin(object): + + columns = None + """A :class:`.ColumnCollection` of :class:`.Column` objects. + + This collection represents the columns which are referred to by + this object. + + """ + + _allow_multiple_tables = False + + def __init__(self, *columns, **kw): + _autoattach = kw.pop('_autoattach', True) + self.columns = ColumnCollection() + self._pending_colargs = [_to_schema_column_or_string(c) + for c in columns] + if _autoattach and self._pending_colargs: + self._check_attach() + + @classmethod + def _extract_col_expression_collection(cls, expressions): + for expr in expressions: + strname = None + column = None + if not isinstance(expr, ClauseElement): + # this assumes a string + strname = expr + else: + cols = [] + visitors.traverse(expr, {}, {'column': cols.append}) + if cols: + column = cols[0] + add_element = column if column is not None else strname + yield expr, column, strname, add_element + + def _check_attach(self, evt=False): + col_objs = [ + c for c in self._pending_colargs + if isinstance(c, Column) + ] + + cols_w_table = [ + c for c in col_objs if isinstance(c.table, Table) + ] + + cols_wo_table = set(col_objs).difference(cols_w_table) + + if cols_wo_table: + # feature #3341 - place event listeners for Column objects + # such that when all those cols are attached, we autoattach. + assert not evt, "Should not reach here on event call" + + # issue #3411 - don't do the per-column auto-attach if some of the + # columns are specified as strings. + has_string_cols = set(self._pending_colargs).difference(col_objs) + if not has_string_cols: + def _col_attached(column, table): + cols_wo_table.discard(column) + if not cols_wo_table: + self._check_attach(evt=True) + self._cols_wo_table = cols_wo_table + for col in cols_wo_table: + col._on_table_attach(_col_attached) + return + + columns = cols_w_table + + tables = set([c.table for c in columns]) + if len(tables) == 1: + self._set_parent_with_dispatch(tables.pop()) + elif len(tables) > 1 and not self._allow_multiple_tables: + table = columns[0].table + others = [c for c in columns[1:] if c.table is not table] + if others: + raise exc.ArgumentError( + "Column(s) %s are not part of table '%s'." % + (", ".join("'%s'" % c for c in others), + table.description) + ) + + def _set_parent(self, table): + for col in self._pending_colargs: + if isinstance(col, util.string_types): + col = table.c[col] + self.columns.add(col) + + +class ColumnCollectionConstraint(ColumnCollectionMixin, Constraint): + """A constraint that proxies a ColumnCollection.""" + + def __init__(self, *columns, **kw): + """ + :param \*columns: + A sequence of column names or Column objects. + + :param name: + Optional, the in-database name of this constraint. + + :param deferrable: + Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when + issuing DDL for this constraint. + + :param initially: + Optional string. If set, emit INITIALLY when issuing DDL + for this constraint. + + :param \**kw: other keyword arguments including dialect-specific + arguments are propagated to the :class:`.Constraint` superclass. + + """ + _autoattach = kw.pop('_autoattach', True) + Constraint.__init__(self, **kw) + ColumnCollectionMixin.__init__(self, *columns, _autoattach=_autoattach) + + def _set_parent(self, table): + Constraint._set_parent(self, table) + ColumnCollectionMixin._set_parent(self, table) + + def __contains__(self, x): + return x in self.columns + + def copy(self, **kw): + c = self.__class__(name=self.name, deferrable=self.deferrable, + initially=self.initially, *self.columns.keys()) + return self._schema_item_copy(c) + + def contains_column(self, col): + """Return True if this constraint contains the given column. + + Note that this object also contains an attribute ``.columns`` + which is a :class:`.ColumnCollection` of :class:`.Column` objects. + + """ + + return self.columns.contains_column(col) + + def __iter__(self): + # inlining of + # return iter(self.columns) + # ColumnCollection->OrderedProperties->OrderedDict + ordered_dict = self.columns._data + return (ordered_dict[key] for key in ordered_dict._list) + + def __len__(self): + return len(self.columns._data) + + +class CheckConstraint(ColumnCollectionConstraint): + """A table- or column-level CHECK constraint. + + Can be included in the definition of a Table or Column. + """ + + _allow_multiple_tables = True + + def __init__(self, sqltext, name=None, deferrable=None, + initially=None, table=None, info=None, _create_rule=None, + _autoattach=True, _type_bound=False): + """Construct a CHECK constraint. + + :param sqltext: + A string containing the constraint definition, which will be used + verbatim, or a SQL expression construct. If given as a string, + the object is converted to a :class:`.Text` object. If the textual + string includes a colon character, escape this using a backslash:: + + CheckConstraint(r"foo ~ E'a(?\:b|c)d") + + :param name: + Optional, the in-database name of the constraint. + + :param deferrable: + Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when + issuing DDL for this constraint. + + :param initially: + Optional string. If set, emit INITIALLY when issuing DDL + for this constraint. + + :param info: Optional data dictionary which will be populated into the + :attr:`.SchemaItem.info` attribute of this object. + + .. versionadded:: 1.0.0 + + """ + + self.sqltext = _literal_as_text(sqltext, warn=False) + + columns = [] + visitors.traverse(self.sqltext, {}, {'column': columns.append}) + + super(CheckConstraint, self).\ + __init__( + name=name, deferrable=deferrable, + initially=initially, _create_rule=_create_rule, info=info, + _type_bound=_type_bound, _autoattach=_autoattach, + *columns) + if table is not None: + self._set_parent_with_dispatch(table) + + def __visit_name__(self): + if isinstance(self.parent, Table): + return "check_constraint" + else: + return "column_check_constraint" + __visit_name__ = property(__visit_name__) + + def copy(self, target_table=None, **kw): + if target_table is not None: + def replace(col): + if self.table.c.contains_column(col): + return target_table.c[col.key] + else: + return None + sqltext = visitors.replacement_traverse(self.sqltext, {}, replace) + else: + sqltext = self.sqltext + c = CheckConstraint(sqltext, + name=self.name, + initially=self.initially, + deferrable=self.deferrable, + _create_rule=self._create_rule, + table=target_table, + _autoattach=False, + _type_bound=self._type_bound) + return self._schema_item_copy(c) + + +class ForeignKeyConstraint(ColumnCollectionConstraint): + """A table-level FOREIGN KEY constraint. + + Defines a single column or composite FOREIGN KEY ... REFERENCES + constraint. For a no-frills, single column foreign key, adding a + :class:`.ForeignKey` to the definition of a :class:`.Column` is a + shorthand equivalent for an unnamed, single column + :class:`.ForeignKeyConstraint`. + + Examples of foreign key configuration are in :ref:`metadata_foreignkeys`. + + """ + __visit_name__ = 'foreign_key_constraint' + + def __init__(self, columns, refcolumns, name=None, onupdate=None, + ondelete=None, deferrable=None, initially=None, + use_alter=False, link_to_name=False, match=None, + table=None, info=None, **dialect_kw): + """Construct a composite-capable FOREIGN KEY. + + :param columns: A sequence of local column names. The named columns + must be defined and present in the parent Table. The names should + match the ``key`` given to each column (defaults to the name) unless + ``link_to_name`` is True. + + :param refcolumns: A sequence of foreign column names or Column + objects. The columns must all be located within the same Table. + + :param name: Optional, the in-database name of the key. + + :param onupdate: Optional string. If set, emit ON UPDATE when + issuing DDL for this constraint. Typical values include CASCADE, + DELETE and RESTRICT. + + :param ondelete: Optional string. If set, emit ON DELETE when + issuing DDL for this constraint. Typical values include CASCADE, + DELETE and RESTRICT. + + :param deferrable: Optional bool. If set, emit DEFERRABLE or NOT + DEFERRABLE when issuing DDL for this constraint. + + :param initially: Optional string. If set, emit INITIALLY when + issuing DDL for this constraint. + + :param link_to_name: if True, the string name given in ``column`` is + the rendered name of the referenced column, not its locally assigned + ``key``. + + :param use_alter: If True, do not emit the DDL for this constraint as + part of the CREATE TABLE definition. Instead, generate it via an + ALTER TABLE statement issued after the full collection of tables + have been created, and drop it via an ALTER TABLE statement before + the full collection of tables are dropped. + + The use of :paramref:`.ForeignKeyConstraint.use_alter` is + particularly geared towards the case where two or more tables + are established within a mutually-dependent foreign key constraint + relationship; however, the :meth:`.MetaData.create_all` and + :meth:`.MetaData.drop_all` methods will perform this resolution + automatically, so the flag is normally not needed. + + .. versionchanged:: 1.0.0 Automatic resolution of foreign key + cycles has been added, removing the need to use the + :paramref:`.ForeignKeyConstraint.use_alter` in typical use + cases. + + .. seealso:: + + :ref:`use_alter` + + :param match: Optional string. If set, emit MATCH when issuing + DDL for this constraint. Typical values include SIMPLE, PARTIAL + and FULL. + + :param info: Optional data dictionary which will be populated into the + :attr:`.SchemaItem.info` attribute of this object. + + .. versionadded:: 1.0.0 + + :param \**dialect_kw: Additional keyword arguments are dialect + specific, and passed in the form ``_``. See + the documentation regarding an individual dialect at + :ref:`dialect_toplevel` for detail on documented arguments. + + .. versionadded:: 0.9.2 + + """ + + Constraint.__init__( + self, name=name, deferrable=deferrable, initially=initially, + info=info, **dialect_kw) + self.onupdate = onupdate + self.ondelete = ondelete + self.link_to_name = link_to_name + self.use_alter = use_alter + self.match = match + + # standalone ForeignKeyConstraint - create + # associated ForeignKey objects which will be applied to hosted + # Column objects (in col.foreign_keys), either now or when attached + # to the Table for string-specified names + self.elements = [ + ForeignKey( + refcol, + _constraint=self, + name=self.name, + onupdate=self.onupdate, + ondelete=self.ondelete, + use_alter=self.use_alter, + link_to_name=self.link_to_name, + match=self.match, + deferrable=self.deferrable, + initially=self.initially, + **self.dialect_kwargs + ) for refcol in refcolumns + ] + + ColumnCollectionMixin.__init__(self, *columns) + if table is not None: + if hasattr(self, "parent"): + assert table is self.parent + self._set_parent_with_dispatch(table) + + def _append_element(self, column, fk): + self.columns.add(column) + self.elements.append(fk) + + @property + def _elements(self): + # legacy - provide a dictionary view of (column_key, fk) + return util.OrderedDict( + zip(self.column_keys, self.elements) + ) + + @property + def _referred_schema(self): + for elem in self.elements: + return elem._referred_schema + else: + return None + + @property + def referred_table(self): + """The :class:`.Table` object to which this + :class:`.ForeignKeyConstraint` references. + + This is a dynamically calculated attribute which may not be available + if the constraint and/or parent table is not yet associated with + a metadata collection that contains the referred table. + + .. versionadded:: 1.0.0 + + """ + return self.elements[0].column.table + + def _validate_dest_table(self, table): + table_keys = set([elem._table_key() + for elem in self.elements]) + if None not in table_keys and len(table_keys) > 1: + elem0, elem1 = sorted(table_keys)[0:2] + raise exc.ArgumentError( + 'ForeignKeyConstraint on %s(%s) refers to ' + 'multiple remote tables: %s and %s' % ( + table.fullname, + self._col_description, + elem0, + elem1 + )) + + @property + def column_keys(self): + """Return a list of string keys representing the local + columns in this :class:`.ForeignKeyConstraint`. + + This list is either the original string arguments sent + to the constructor of the :class:`.ForeignKeyConstraint`, + or if the constraint has been initialized with :class:`.Column` + objects, is the string .key of each element. + + .. versionadded:: 1.0.0 + + """ + if hasattr(self, "parent"): + return self.columns.keys() + else: + return [ + col.key if isinstance(col, ColumnElement) + else str(col) for col in self._pending_colargs + ] + + @property + def _col_description(self): + return ", ".join(self.column_keys) + + def _set_parent(self, table): + Constraint._set_parent(self, table) + + try: + ColumnCollectionConstraint._set_parent(self, table) + except KeyError as ke: + raise exc.ArgumentError( + "Can't create ForeignKeyConstraint " + "on table '%s': no column " + "named '%s' is present." % (table.description, ke.args[0])) + + for col, fk in zip(self.columns, self.elements): + if not hasattr(fk, 'parent') or \ + fk.parent is not col: + fk._set_parent_with_dispatch(col) + + self._validate_dest_table(table) + + def copy(self, schema=None, target_table=None, **kw): + fkc = ForeignKeyConstraint( + [x.parent.key for x in self.elements], + [x._get_colspec( + schema=schema, + table_name=target_table.name + if target_table is not None + and x._table_key() == x.parent.table.key + else None) + for x in self.elements], + name=self.name, + onupdate=self.onupdate, + ondelete=self.ondelete, + use_alter=self.use_alter, + deferrable=self.deferrable, + initially=self.initially, + link_to_name=self.link_to_name, + match=self.match + ) + for self_fk, other_fk in zip( + self.elements, + fkc.elements): + self_fk._schema_item_copy(other_fk) + return self._schema_item_copy(fkc) + + +class PrimaryKeyConstraint(ColumnCollectionConstraint): + """A table-level PRIMARY KEY constraint. + + The :class:`.PrimaryKeyConstraint` object is present automatically + on any :class:`.Table` object; it is assigned a set of + :class:`.Column` objects corresponding to those marked with + the :paramref:`.Column.primary_key` flag:: + + >>> my_table = Table('mytable', metadata, + ... Column('id', Integer, primary_key=True), + ... Column('version_id', Integer, primary_key=True), + ... Column('data', String(50)) + ... ) + >>> my_table.primary_key + PrimaryKeyConstraint( + Column('id', Integer(), table=, + primary_key=True, nullable=False), + Column('version_id', Integer(), table=, + primary_key=True, nullable=False) + ) + + The primary key of a :class:`.Table` can also be specified by using + a :class:`.PrimaryKeyConstraint` object explicitly; in this mode of usage, + the "name" of the constraint can also be specified, as well as other + options which may be recognized by dialects:: + + my_table = Table('mytable', metadata, + Column('id', Integer), + Column('version_id', Integer), + Column('data', String(50)), + PrimaryKeyConstraint('id', 'version_id', + name='mytable_pk') + ) + + The two styles of column-specification should generally not be mixed. + An warning is emitted if the columns present in the + :class:`.PrimaryKeyConstraint` + don't match the columns that were marked as ``primary_key=True``, if both + are present; in this case, the columns are taken strictly from the + :class:`.PrimaryKeyConstraint` declaration, and those columns otherwise + marked as ``primary_key=True`` are ignored. This behavior is intended to + be backwards compatible with previous behavior. + + .. versionchanged:: 0.9.2 Using a mixture of columns within a + :class:`.PrimaryKeyConstraint` in addition to columns marked as + ``primary_key=True`` now emits a warning if the lists don't match. + The ultimate behavior of ignoring those columns marked with the flag + only is currently maintained for backwards compatibility; this warning + may raise an exception in a future release. + + For the use case where specific options are to be specified on the + :class:`.PrimaryKeyConstraint`, but the usual style of using + ``primary_key=True`` flags is still desirable, an empty + :class:`.PrimaryKeyConstraint` may be specified, which will take on the + primary key column collection from the :class:`.Table` based on the + flags:: + + my_table = Table('mytable', metadata, + Column('id', Integer, primary_key=True), + Column('version_id', Integer, primary_key=True), + Column('data', String(50)), + PrimaryKeyConstraint(name='mytable_pk', + mssql_clustered=True) + ) + + .. versionadded:: 0.9.2 an empty :class:`.PrimaryKeyConstraint` may now + be specified for the purposes of establishing keyword arguments with + the constraint, independently of the specification of "primary key" + columns within the :class:`.Table` itself; columns marked as + ``primary_key=True`` will be gathered into the empty constraint's + column collection. + + """ + + __visit_name__ = 'primary_key_constraint' + + def _set_parent(self, table): + super(PrimaryKeyConstraint, self)._set_parent(table) + + if table.primary_key is not self: + table.constraints.discard(table.primary_key) + table.primary_key = self + table.constraints.add(self) + + table_pks = [c for c in table.c if c.primary_key] + if self.columns and table_pks and \ + set(table_pks) != set(self.columns.values()): + util.warn( + "Table '%s' specifies columns %s as primary_key=True, " + "not matching locally specified columns %s; setting the " + "current primary key columns to %s. This warning " + "may become an exception in a future release" % + ( + table.name, + ", ".join("'%s'" % c.name for c in table_pks), + ", ".join("'%s'" % c.name for c in self.columns), + ", ".join("'%s'" % c.name for c in self.columns) + ) + ) + table_pks[:] = [] + + for c in self.columns: + c.primary_key = True + c.nullable = False + self.columns.extend(table_pks) + + def _reload(self, columns): + """repopulate this :class:`.PrimaryKeyConstraint` given + a set of columns. + + Existing columns in the table that are marked as primary_key=True + are maintained. + + Also fires a new event. + + This is basically like putting a whole new + :class:`.PrimaryKeyConstraint` object on the parent + :class:`.Table` object without actually replacing the object. + + The ordering of the given list of columns is also maintained; these + columns will be appended to the list of columns after any which + are already present. + + """ + + # set the primary key flag on new columns. + # note any existing PK cols on the table also have their + # flag still set. + for col in columns: + col.primary_key = True + + self.columns.extend(columns) + + self._set_parent_with_dispatch(self.table) + + def _replace(self, col): + self.columns.replace(col) + + +class UniqueConstraint(ColumnCollectionConstraint): + """A table-level UNIQUE constraint. + + Defines a single column or composite UNIQUE constraint. For a no-frills, + single column constraint, adding ``unique=True`` to the ``Column`` + definition is a shorthand equivalent for an unnamed, single column + UniqueConstraint. + """ + + __visit_name__ = 'unique_constraint' + + +class Index(DialectKWArgs, ColumnCollectionMixin, SchemaItem): + """A table-level INDEX. + + Defines a composite (one or more column) INDEX. + + E.g.:: + + sometable = Table("sometable", metadata, + Column("name", String(50)), + Column("address", String(100)) + ) + + Index("some_index", sometable.c.name) + + For a no-frills, single column index, adding + :class:`.Column` also supports ``index=True``:: + + sometable = Table("sometable", metadata, + Column("name", String(50), index=True) + ) + + For a composite index, multiple columns can be specified:: + + Index("some_index", sometable.c.name, sometable.c.address) + + Functional indexes are supported as well, typically by using the + :data:`.func` construct in conjunction with table-bound + :class:`.Column` objects:: + + Index("some_index", func.lower(sometable.c.name)) + + .. versionadded:: 0.8 support for functional and expression-based indexes. + + An :class:`.Index` can also be manually associated with a :class:`.Table`, + either through inline declaration or using + :meth:`.Table.append_constraint`. When this approach is used, the names + of the indexed columns can be specified as strings:: + + Table("sometable", metadata, + Column("name", String(50)), + Column("address", String(100)), + Index("some_index", "name", "address") + ) + + To support functional or expression-based indexes in this form, the + :func:`.text` construct may be used:: + + from sqlalchemy import text + + Table("sometable", metadata, + Column("name", String(50)), + Column("address", String(100)), + Index("some_index", text("lower(name)")) + ) + + .. versionadded:: 0.9.5 the :func:`.text` construct may be used to + specify :class:`.Index` expressions, provided the :class:`.Index` + is explicitly associated with the :class:`.Table`. + + + .. seealso:: + + :ref:`schema_indexes` - General information on :class:`.Index`. + + :ref:`postgresql_indexes` - PostgreSQL-specific options available for + the :class:`.Index` construct. + + :ref:`mysql_indexes` - MySQL-specific options available for the + :class:`.Index` construct. + + :ref:`mssql_indexes` - MSSQL-specific options available for the + :class:`.Index` construct. + + """ + + __visit_name__ = 'index' + + def __init__(self, name, *expressions, **kw): + """Construct an index object. + + :param name: + The name of the index + + :param \*expressions: + Column expressions to include in the index. The expressions + are normally instances of :class:`.Column`, but may also + be arbitrary SQL expressions which ultimately refer to a + :class:`.Column`. + + :param unique=False: + Keyword only argument; if True, create a unique index. + + :param quote=None: + Keyword only argument; whether to apply quoting to the name of + the index. Works in the same manner as that of + :paramref:`.Column.quote`. + + :param info=None: Optional data dictionary which will be populated + into the :attr:`.SchemaItem.info` attribute of this object. + + .. versionadded:: 1.0.0 + + :param \**kw: Additional keyword arguments not mentioned above are + dialect specific, and passed in the form + ``_``. See the documentation regarding an + individual dialect at :ref:`dialect_toplevel` for detail on + documented arguments. + + """ + self.table = None + + columns = [] + for expr, column, strname, add_element in self.\ + _extract_col_expression_collection(expressions): + if add_element is not None: + columns.append(add_element) + + self.expressions = expressions + self.name = quoted_name(name, kw.pop("quote", None)) + self.unique = kw.pop('unique', False) + if 'info' in kw: + self.info = kw.pop('info') + self._validate_dialect_kwargs(kw) + + # will call _set_parent() if table-bound column + # objects are present + ColumnCollectionMixin.__init__(self, *columns) + + def _set_parent(self, table): + ColumnCollectionMixin._set_parent(self, table) + + if self.table is not None and table is not self.table: + raise exc.ArgumentError( + "Index '%s' is against table '%s', and " + "cannot be associated with table '%s'." % ( + self.name, + self.table.description, + table.description + ) + ) + self.table = table + table.indexes.add(self) + + self.expressions = [ + expr if isinstance(expr, ClauseElement) + else colexpr + for expr, colexpr in util.zip_longest(self.expressions, + self.columns) + ] + + @property + def bind(self): + """Return the connectable associated with this Index.""" + + return self.table.bind + + def create(self, bind=None): + """Issue a ``CREATE`` statement for this + :class:`.Index`, using the given :class:`.Connectable` + for connectivity. + + .. seealso:: + + :meth:`.MetaData.create_all`. + + """ + if bind is None: + bind = _bind_or_error(self) + bind._run_visitor(ddl.SchemaGenerator, self) + return self + + def drop(self, bind=None): + """Issue a ``DROP`` statement for this + :class:`.Index`, using the given :class:`.Connectable` + for connectivity. + + .. seealso:: + + :meth:`.MetaData.drop_all`. + + """ + if bind is None: + bind = _bind_or_error(self) + bind._run_visitor(ddl.SchemaDropper, self) + + def __repr__(self): + return 'Index(%s)' % ( + ", ".join( + [repr(self.name)] + + [repr(e) for e in self.expressions] + + (self.unique and ["unique=True"] or []) + )) + + +DEFAULT_NAMING_CONVENTION = util.immutabledict({ + "ix": 'ix_%(column_0_label)s' +}) + + +class MetaData(SchemaItem): + """A collection of :class:`.Table` objects and their associated schema + constructs. + + Holds a collection of :class:`.Table` objects as well as + an optional binding to an :class:`.Engine` or + :class:`.Connection`. If bound, the :class:`.Table` objects + in the collection and their columns may participate in implicit SQL + execution. + + The :class:`.Table` objects themselves are stored in the + :attr:`.MetaData.tables` dictionary. + + :class:`.MetaData` is a thread-safe object for read operations. + Construction of new tables within a single :class:`.MetaData` object, + either explicitly or via reflection, may not be completely thread-safe. + + .. seealso:: + + :ref:`metadata_describing` - Introduction to database metadata + + """ + + __visit_name__ = 'metadata' + + def __init__(self, bind=None, reflect=False, schema=None, + quote_schema=None, + naming_convention=DEFAULT_NAMING_CONVENTION, + info=None + ): + """Create a new MetaData object. + + :param bind: + An Engine or Connection to bind to. May also be a string or URL + instance, these are passed to create_engine() and this MetaData will + be bound to the resulting engine. + + :param reflect: + Optional, automatically load all tables from the bound database. + Defaults to False. ``bind`` is required when this option is set. + + .. deprecated:: 0.8 + Please use the :meth:`.MetaData.reflect` method. + + :param schema: + The default schema to use for the :class:`.Table`, + :class:`.Sequence`, and other objects associated with this + :class:`.MetaData`. Defaults to ``None``. + + :param quote_schema: + Sets the ``quote_schema`` flag for those :class:`.Table`, + :class:`.Sequence`, and other objects which make usage of the + local ``schema`` name. + + :param info: Optional data dictionary which will be populated into the + :attr:`.SchemaItem.info` attribute of this object. + + .. versionadded:: 1.0.0 + + :param naming_convention: a dictionary referring to values which + will establish default naming conventions for :class:`.Constraint` + and :class:`.Index` objects, for those objects which are not given + a name explicitly. + + The keys of this dictionary may be: + + * a constraint or Index class, e.g. the :class:`.UniqueConstraint`, + :class:`.ForeignKeyConstraint` class, the :class:`.Index` class + + * a string mnemonic for one of the known constraint classes; + ``"fk"``, ``"pk"``, ``"ix"``, ``"ck"``, ``"uq"`` for foreign key, + primary key, index, check, and unique constraint, respectively. + + * the string name of a user-defined "token" that can be used + to define new naming tokens. + + The values associated with each "constraint class" or "constraint + mnemonic" key are string naming templates, such as + ``"uq_%(table_name)s_%(column_0_name)s"``, + which describe how the name should be composed. The values + associated with user-defined "token" keys should be callables of the + form ``fn(constraint, table)``, which accepts the constraint/index + object and :class:`.Table` as arguments, returning a string + result. + + The built-in names are as follows, some of which may only be + available for certain types of constraint: + + * ``%(table_name)s`` - the name of the :class:`.Table` object + associated with the constraint. + + * ``%(referred_table_name)s`` - the name of the :class:`.Table` + object associated with the referencing target of a + :class:`.ForeignKeyConstraint`. + + * ``%(column_0_name)s`` - the name of the :class:`.Column` at + index position "0" within the constraint. + + * ``%(column_0_label)s`` - the label of the :class:`.Column` at + index position "0", e.g. :attr:`.Column.label` + + * ``%(column_0_key)s`` - the key of the :class:`.Column` at + index position "0", e.g. :attr:`.Column.key` + + * ``%(referred_column_0_name)s`` - the name of a :class:`.Column` + at index position "0" referenced by a + :class:`.ForeignKeyConstraint`. + + * ``%(constraint_name)s`` - a special key that refers to the + existing name given to the constraint. When this key is + present, the :class:`.Constraint` object's existing name will be + replaced with one that is composed from template string that + uses this token. When this token is present, it is required that + the :class:`.Constraint` is given an expicit name ahead of time. + + * user-defined: any additional token may be implemented by passing + it along with a ``fn(constraint, table)`` callable to the + naming_convention dictionary. + + .. versionadded:: 0.9.2 + + .. seealso:: + + :ref:`constraint_naming_conventions` - for detailed usage + examples. + + """ + self.tables = util.immutabledict() + self.schema = quoted_name(schema, quote_schema) + self.naming_convention = naming_convention + if info: + self.info = info + self._schemas = set() + self._sequences = {} + self._fk_memos = collections.defaultdict(list) + + self.bind = bind + if reflect: + util.warn_deprecated("reflect=True is deprecate; please " + "use the reflect() method.") + if not bind: + raise exc.ArgumentError( + "A bind must be supplied in conjunction " + "with reflect=True") + self.reflect() + + tables = None + """A dictionary of :class:`.Table` objects keyed to their name or "table key". + + The exact key is that determined by the :attr:`.Table.key` attribute; + for a table with no :attr:`.Table.schema` attribute, this is the same + as :attr:`.Table.name`. For a table with a schema, it is typically of the + form ``schemaname.tablename``. + + .. seealso:: + + :attr:`.MetaData.sorted_tables` + + """ + + def __repr__(self): + return 'MetaData(bind=%r)' % self.bind + + def __contains__(self, table_or_key): + if not isinstance(table_or_key, util.string_types): + table_or_key = table_or_key.key + return table_or_key in self.tables + + def _add_table(self, name, schema, table): + key = _get_table_key(name, schema) + dict.__setitem__(self.tables, key, table) + if schema: + self._schemas.add(schema) + + def _remove_table(self, name, schema): + key = _get_table_key(name, schema) + removed = dict.pop(self.tables, key, None) + if removed is not None: + for fk in removed.foreign_keys: + fk._remove_from_metadata(self) + if self._schemas: + self._schemas = set([t.schema + for t in self.tables.values() + if t.schema is not None]) + + def __getstate__(self): + return {'tables': self.tables, + 'schema': self.schema, + 'schemas': self._schemas, + 'sequences': self._sequences, + 'fk_memos': self._fk_memos, + 'naming_convention': self.naming_convention + } + + def __setstate__(self, state): + self.tables = state['tables'] + self.schema = state['schema'] + self.naming_convention = state['naming_convention'] + self._bind = None + self._sequences = state['sequences'] + self._schemas = state['schemas'] + self._fk_memos = state['fk_memos'] + + def is_bound(self): + """True if this MetaData is bound to an Engine or Connection.""" + + return self._bind is not None + + def bind(self): + """An :class:`.Engine` or :class:`.Connection` to which this + :class:`.MetaData` is bound. + + Typically, a :class:`.Engine` is assigned to this attribute + so that "implicit execution" may be used, or alternatively + as a means of providing engine binding information to an + ORM :class:`.Session` object:: + + engine = create_engine("someurl://") + metadata.bind = engine + + .. seealso:: + + :ref:`dbengine_implicit` - background on "bound metadata" + + """ + return self._bind + + @util.dependencies("sqlalchemy.engine.url") + def _bind_to(self, url, bind): + """Bind this MetaData to an Engine, Connection, string or URL.""" + + if isinstance(bind, util.string_types + (url.URL, )): + self._bind = sqlalchemy.create_engine(bind) + else: + self._bind = bind + bind = property(bind, _bind_to) + + def clear(self): + """Clear all Table objects from this MetaData.""" + + dict.clear(self.tables) + self._schemas.clear() + self._fk_memos.clear() + + def remove(self, table): + """Remove the given Table object from this MetaData.""" + + self._remove_table(table.name, table.schema) + + @property + def sorted_tables(self): + """Returns a list of :class:`.Table` objects sorted in order of + foreign key dependency. + + The sorting will place :class:`.Table` objects that have dependencies + first, before the dependencies themselves, representing the + order in which they can be created. To get the order in which + the tables would be dropped, use the ``reversed()`` Python built-in. + + .. warning:: + + The :attr:`.sorted_tables` accessor cannot by itself accommodate + automatic resolution of dependency cycles between tables, which + are usually caused by mutually dependent foreign key constraints. + To resolve these cycles, either the + :paramref:`.ForeignKeyConstraint.use_alter` parameter may be appled + to those constraints, or use the + :func:`.schema.sort_tables_and_constraints` function which will break + out foreign key constraints involved in cycles separately. + + .. seealso:: + + :func:`.schema.sort_tables` + + :func:`.schema.sort_tables_and_constraints` + + :attr:`.MetaData.tables` + + :meth:`.Inspector.get_table_names` + + :meth:`.Inspector.get_sorted_table_and_fkc_names` + + + """ + return ddl.sort_tables(sorted(self.tables.values(), key=lambda t: t.key)) + + def reflect(self, bind=None, schema=None, views=False, only=None, + extend_existing=False, + autoload_replace=True, + **dialect_kwargs): + """Load all available table definitions from the database. + + Automatically creates ``Table`` entries in this ``MetaData`` for any + table available in the database but not yet present in the + ``MetaData``. May be called multiple times to pick up tables recently + added to the database, however no special action is taken if a table + in this ``MetaData`` no longer exists in the database. + + :param bind: + A :class:`.Connectable` used to access the database; if None, uses + the existing bind on this ``MetaData``, if any. + + :param schema: + Optional, query and reflect tables from an alterate schema. + If None, the schema associated with this :class:`.MetaData` + is used, if any. + + :param views: + If True, also reflect views. + + :param only: + Optional. Load only a sub-set of available named tables. May be + specified as a sequence of names or a callable. + + If a sequence of names is provided, only those tables will be + reflected. An error is raised if a table is requested but not + available. Named tables already present in this ``MetaData`` are + ignored. + + If a callable is provided, it will be used as a boolean predicate to + filter the list of potential table names. The callable is called + with a table name and this ``MetaData`` instance as positional + arguments and should return a true value for any table to reflect. + + :param extend_existing: Passed along to each :class:`.Table` as + :paramref:`.Table.extend_existing`. + + .. versionadded:: 0.9.1 + + :param autoload_replace: Passed along to each :class:`.Table` as + :paramref:`.Table.autoload_replace`. + + .. versionadded:: 0.9.1 + + :param \**dialect_kwargs: Additional keyword arguments not mentioned + above are dialect specific, and passed in the form + ``_``. See the documentation regarding an + individual dialect at :ref:`dialect_toplevel` for detail on + documented arguments. + + .. versionadded:: 0.9.2 - Added + :paramref:`.MetaData.reflect.**dialect_kwargs` to support + dialect-level reflection options for all :class:`.Table` + objects reflected. + + """ + if bind is None: + bind = _bind_or_error(self) + + with bind.connect() as conn: + + reflect_opts = { + 'autoload': True, + 'autoload_with': conn, + 'extend_existing': extend_existing, + 'autoload_replace': autoload_replace + } + + reflect_opts.update(dialect_kwargs) + + if schema is None: + schema = self.schema + + if schema is not None: + reflect_opts['schema'] = schema + + available = util.OrderedSet( + bind.engine.table_names(schema, connection=conn)) + if views: + available.update( + bind.dialect.get_view_names(conn, schema) + ) + + if schema is not None: + available_w_schema = util.OrderedSet(["%s.%s" % (schema, name) + for name in available]) + else: + available_w_schema = available + + current = set(self.tables) + + if only is None: + load = [name for name, schname in + zip(available, available_w_schema) + if extend_existing or schname not in current] + elif util.callable(only): + load = [name for name, schname in + zip(available, available_w_schema) + if (extend_existing or schname not in current) + and only(name, self)] + else: + missing = [name for name in only if name not in available] + if missing: + s = schema and (" schema '%s'" % schema) or '' + raise exc.InvalidRequestError( + 'Could not reflect: requested table(s) not available ' + 'in %s%s: (%s)' % + (bind.engine.url, s, ', '.join(missing))) + load = [name for name in only if extend_existing or + name not in current] + + for name in load: + Table(name, self, **reflect_opts) + + def append_ddl_listener(self, event_name, listener): + """Append a DDL event listener to this ``MetaData``. + + .. deprecated:: 0.7 + See :class:`.DDLEvents`. + + """ + def adapt_listener(target, connection, **kw): + tables = kw['tables'] + listener(event, target, connection, tables=tables) + + event.listen(self, "" + event_name.replace('-', '_'), adapt_listener) + + def create_all(self, bind=None, tables=None, checkfirst=True): + """Create all tables stored in this metadata. + + Conditional by default, will not attempt to recreate tables already + present in the target database. + + :param bind: + A :class:`.Connectable` used to access the + database; if None, uses the existing bind on this ``MetaData``, if + any. + + :param tables: + Optional list of ``Table`` objects, which is a subset of the total + tables in the ``MetaData`` (others are ignored). + + :param checkfirst: + Defaults to True, don't issue CREATEs for tables already present + in the target database. + + """ + if bind is None: + bind = _bind_or_error(self) + bind._run_visitor(ddl.SchemaGenerator, + self, + checkfirst=checkfirst, + tables=tables) + + def drop_all(self, bind=None, tables=None, checkfirst=True): + """Drop all tables stored in this metadata. + + Conditional by default, will not attempt to drop tables not present in + the target database. + + :param bind: + A :class:`.Connectable` used to access the + database; if None, uses the existing bind on this ``MetaData``, if + any. + + :param tables: + Optional list of ``Table`` objects, which is a subset of the + total tables in the ``MetaData`` (others are ignored). + + :param checkfirst: + Defaults to True, only issue DROPs for tables confirmed to be + present in the target database. + + """ + if bind is None: + bind = _bind_or_error(self) + bind._run_visitor(ddl.SchemaDropper, + self, + checkfirst=checkfirst, + tables=tables) + + +class ThreadLocalMetaData(MetaData): + """A MetaData variant that presents a different ``bind`` in every thread. + + Makes the ``bind`` property of the MetaData a thread-local value, allowing + this collection of tables to be bound to different ``Engine`` + implementations or connections in each thread. + + The ThreadLocalMetaData starts off bound to None in each thread. Binds + must be made explicitly by assigning to the ``bind`` property or using + ``connect()``. You can also re-bind dynamically multiple times per + thread, just like a regular ``MetaData``. + + """ + + __visit_name__ = 'metadata' + + def __init__(self): + """Construct a ThreadLocalMetaData.""" + + self.context = util.threading.local() + self.__engines = {} + super(ThreadLocalMetaData, self).__init__() + + def bind(self): + """The bound Engine or Connection for this thread. + + This property may be assigned an Engine or Connection, or assigned a + string or URL to automatically create a basic Engine for this bind + with ``create_engine()``.""" + + return getattr(self.context, '_engine', None) + + @util.dependencies("sqlalchemy.engine.url") + def _bind_to(self, url, bind): + """Bind to a Connectable in the caller's thread.""" + + if isinstance(bind, util.string_types + (url.URL, )): + try: + self.context._engine = self.__engines[bind] + except KeyError: + e = sqlalchemy.create_engine(bind) + self.__engines[bind] = e + self.context._engine = e + else: + # TODO: this is squirrely. we shouldn't have to hold onto engines + # in a case like this + if bind not in self.__engines: + self.__engines[bind] = bind + self.context._engine = bind + + bind = property(bind, _bind_to) + + def is_bound(self): + """True if there is a bind for this thread.""" + return (hasattr(self.context, '_engine') and + self.context._engine is not None) + + def dispose(self): + """Dispose all bound engines, in all thread contexts.""" + + for e in self.__engines.values(): + if hasattr(e, 'dispose'): + e.dispose() diff --git a/lib/python3.4/site-packages/sqlalchemy/sql/selectable.py b/lib/python3.4/site-packages/sqlalchemy/sql/selectable.py new file mode 100644 index 0000000..b986fd5 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/sql/selectable.py @@ -0,0 +1,3436 @@ +# sql/selectable.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""The :class:`.FromClause` class of SQL expression elements, representing +SQL tables and derived rowsets. + +""" + +from .elements import ClauseElement, TextClause, ClauseList, \ + and_, Grouping, UnaryExpression, literal_column, BindParameter +from .elements import _clone, \ + _literal_as_text, _interpret_as_column_or_from, _expand_cloned,\ + _select_iterables, _anonymous_label, _clause_element_as_expr,\ + _cloned_intersection, _cloned_difference, True_, \ + _literal_as_label_reference, _literal_and_labels_as_label_reference +from .base import Immutable, Executable, _generative, \ + ColumnCollection, ColumnSet, _from_objects, Generative +from . import type_api +from .. import inspection +from .. import util +from .. import exc +from operator import attrgetter +from . import operators +import operator +import collections +from .annotation import Annotated +import itertools +from sqlalchemy.sql.visitors import Visitable + + +def _interpret_as_from(element): + insp = inspection.inspect(element, raiseerr=False) + if insp is None: + if isinstance(element, util.string_types): + util.warn_limited( + "Textual SQL FROM expression %(expr)r should be " + "explicitly declared as text(%(expr)r), " + "or use table(%(expr)r) for more specificity", + {"expr": util.ellipses_string(element)}) + + return TextClause(util.text_type(element)) + try: + return insp.selectable + except AttributeError: + raise exc.ArgumentError("FROM expression expected") + + +def _interpret_as_select(element): + element = _interpret_as_from(element) + if isinstance(element, Alias): + element = element.original + if not isinstance(element, SelectBase): + element = element.select() + return element + + +class _OffsetLimitParam(BindParameter): + @property + def _limit_offset_value(self): + return self.effective_value + + +def _offset_or_limit_clause(element, name=None, type_=None): + """Convert the given value to an "offset or limit" clause. + + This handles incoming integers and converts to an expression; if + an expression is already given, it is passed through. + + """ + if element is None: + return None + elif hasattr(element, '__clause_element__'): + return element.__clause_element__() + elif isinstance(element, Visitable): + return element + else: + value = util.asint(element) + return _OffsetLimitParam(name, value, type_=type_, unique=True) + + +def _offset_or_limit_clause_asint(clause, attrname): + """Convert the "offset or limit" clause of a select construct to an + integer. + + This is only possible if the value is stored as a simple bound parameter. + Otherwise, a compilation error is raised. + + """ + if clause is None: + return None + try: + value = clause._limit_offset_value + except AttributeError: + raise exc.CompileError( + "This SELECT structure does not use a simple " + "integer value for %s" % attrname) + else: + return util.asint(value) + + +def subquery(alias, *args, **kwargs): + """Return an :class:`.Alias` object derived + from a :class:`.Select`. + + name + alias name + + \*args, \**kwargs + + all other arguments are delivered to the + :func:`select` function. + + """ + return Select(*args, **kwargs).alias(alias) + + +def alias(selectable, name=None, flat=False): + """Return an :class:`.Alias` object. + + An :class:`.Alias` represents any :class:`.FromClause` + with an alternate name assigned within SQL, typically using the ``AS`` + clause when generated, e.g. ``SELECT * FROM table AS aliasname``. + + Similar functionality is available via the + :meth:`~.FromClause.alias` method + available on all :class:`.FromClause` subclasses. + + When an :class:`.Alias` is created from a :class:`.Table` object, + this has the effect of the table being rendered + as ``tablename AS aliasname`` in a SELECT statement. + + For :func:`.select` objects, the effect is that of creating a named + subquery, i.e. ``(select ...) AS aliasname``. + + The ``name`` parameter is optional, and provides the name + to use in the rendered SQL. If blank, an "anonymous" name + will be deterministically generated at compile time. + Deterministic means the name is guaranteed to be unique against + other constructs used in the same statement, and will also be the + same name for each successive compilation of the same statement + object. + + :param selectable: any :class:`.FromClause` subclass, + such as a table, select statement, etc. + + :param name: string name to be assigned as the alias. + If ``None``, a name will be deterministically generated + at compile time. + + :param flat: Will be passed through to if the given selectable + is an instance of :class:`.Join` - see :meth:`.Join.alias` + for details. + + .. versionadded:: 0.9.0 + + """ + return selectable.alias(name=name, flat=flat) + + +class Selectable(ClauseElement): + """mark a class as being selectable""" + __visit_name__ = 'selectable' + + is_selectable = True + + @property + def selectable(self): + return self + + +class HasPrefixes(object): + _prefixes = () + + @_generative + def prefix_with(self, *expr, **kw): + """Add one or more expressions following the statement keyword, i.e. + SELECT, INSERT, UPDATE, or DELETE. Generative. + + This is used to support backend-specific prefix keywords such as those + provided by MySQL. + + E.g.:: + + stmt = table.insert().prefix_with("LOW_PRIORITY", dialect="mysql") + + Multiple prefixes can be specified by multiple calls + to :meth:`.prefix_with`. + + :param \*expr: textual or :class:`.ClauseElement` construct which + will be rendered following the INSERT, UPDATE, or DELETE + keyword. + :param \**kw: A single keyword 'dialect' is accepted. This is an + optional string dialect name which will + limit rendering of this prefix to only that dialect. + + """ + dialect = kw.pop('dialect', None) + if kw: + raise exc.ArgumentError("Unsupported argument(s): %s" % + ",".join(kw)) + self._setup_prefixes(expr, dialect) + + def _setup_prefixes(self, prefixes, dialect=None): + self._prefixes = self._prefixes + tuple( + [(_literal_as_text(p, warn=False), dialect) for p in prefixes]) + + +class HasSuffixes(object): + _suffixes = () + + @_generative + def suffix_with(self, *expr, **kw): + """Add one or more expressions following the statement as a whole. + + This is used to support backend-specific suffix keywords on + certain constructs. + + E.g.:: + + stmt = select([col1, col2]).cte().suffix_with( + "cycle empno set y_cycle to 1 default 0", dialect="oracle") + + Multiple suffixes can be specified by multiple calls + to :meth:`.suffix_with`. + + :param \*expr: textual or :class:`.ClauseElement` construct which + will be rendered following the target clause. + :param \**kw: A single keyword 'dialect' is accepted. This is an + optional string dialect name which will + limit rendering of this suffix to only that dialect. + + """ + dialect = kw.pop('dialect', None) + if kw: + raise exc.ArgumentError("Unsupported argument(s): %s" % + ",".join(kw)) + self._setup_suffixes(expr, dialect) + + def _setup_suffixes(self, suffixes, dialect=None): + self._suffixes = self._suffixes + tuple( + [(_literal_as_text(p, warn=False), dialect) for p in suffixes]) + + +class FromClause(Selectable): + """Represent an element that can be used within the ``FROM`` + clause of a ``SELECT`` statement. + + The most common forms of :class:`.FromClause` are the + :class:`.Table` and the :func:`.select` constructs. Key + features common to all :class:`.FromClause` objects include: + + * a :attr:`.c` collection, which provides per-name access to a collection + of :class:`.ColumnElement` objects. + * a :attr:`.primary_key` attribute, which is a collection of all those + :class:`.ColumnElement` objects that indicate the ``primary_key`` flag. + * Methods to generate various derivations of a "from" clause, including + :meth:`.FromClause.alias`, :meth:`.FromClause.join`, + :meth:`.FromClause.select`. + + + """ + __visit_name__ = 'fromclause' + named_with_column = False + _hide_froms = [] + + _is_join = False + _is_select = False + _is_from_container = False + + _textual = False + """a marker that allows us to easily distinguish a :class:`.TextAsFrom` + or similar object from other kinds of :class:`.FromClause` objects.""" + + schema = None + """Define the 'schema' attribute for this :class:`.FromClause`. + + This is typically ``None`` for most objects except that of + :class:`.Table`, where it is taken as the value of the + :paramref:`.Table.schema` argument. + + """ + + _memoized_property = util.group_expirable_memoized_property(["_columns"]) + + @util.dependencies("sqlalchemy.sql.functions") + def count(self, functions, whereclause=None, **params): + """return a SELECT COUNT generated against this + :class:`.FromClause`.""" + + if self.primary_key: + col = list(self.primary_key)[0] + else: + col = list(self.columns)[0] + return Select( + [functions.func.count(col).label('tbl_row_count')], + whereclause, + from_obj=[self], + **params) + + def select(self, whereclause=None, **params): + """return a SELECT of this :class:`.FromClause`. + + .. seealso:: + + :func:`~.sql.expression.select` - general purpose + method which allows for arbitrary column lists. + + """ + + return Select([self], whereclause, **params) + + def join(self, right, onclause=None, isouter=False): + """Return a :class:`.Join` from this :class:`.FromClause` + to another :class:`FromClause`. + + E.g.:: + + from sqlalchemy import join + + j = user_table.join(address_table, + user_table.c.id == address_table.c.user_id) + stmt = select([user_table]).select_from(j) + + would emit SQL along the lines of:: + + SELECT user.id, user.name FROM user + JOIN address ON user.id = address.user_id + + :param right: the right side of the join; this is any + :class:`.FromClause` object such as a :class:`.Table` object, and + may also be a selectable-compatible object such as an ORM-mapped + class. + + :param onclause: a SQL expression representing the ON clause of the + join. If left at ``None``, :meth:`.FromClause.join` will attempt to + join the two tables based on a foreign key relationship. + + :param isouter: if True, render a LEFT OUTER JOIN, instead of JOIN. + + .. seealso:: + + :func:`.join` - standalone function + + :class:`.Join` - the type of object produced + + """ + + return Join(self, right, onclause, isouter) + + def outerjoin(self, right, onclause=None): + """Return a :class:`.Join` from this :class:`.FromClause` + to another :class:`FromClause`, with the "isouter" flag set to + True. + + E.g.:: + + from sqlalchemy import outerjoin + + j = user_table.outerjoin(address_table, + user_table.c.id == address_table.c.user_id) + + The above is equivalent to:: + + j = user_table.join( + address_table, + user_table.c.id == address_table.c.user_id, + isouter=True) + + :param right: the right side of the join; this is any + :class:`.FromClause` object such as a :class:`.Table` object, and + may also be a selectable-compatible object such as an ORM-mapped + class. + + :param onclause: a SQL expression representing the ON clause of the + join. If left at ``None``, :meth:`.FromClause.join` will attempt to + join the two tables based on a foreign key relationship. + + .. seealso:: + + :meth:`.FromClause.join` + + :class:`.Join` + + """ + + return Join(self, right, onclause, True) + + def alias(self, name=None, flat=False): + """return an alias of this :class:`.FromClause`. + + This is shorthand for calling:: + + from sqlalchemy import alias + a = alias(self, name=name) + + See :func:`~.expression.alias` for details. + + """ + + return Alias(self, name) + + def is_derived_from(self, fromclause): + """Return True if this FromClause is 'derived' from the given + FromClause. + + An example would be an Alias of a Table is derived from that Table. + + """ + # this is essentially an "identity" check in the base class. + # Other constructs override this to traverse through + # contained elements. + return fromclause in self._cloned_set + + def _is_lexical_equivalent(self, other): + """Return True if this FromClause and the other represent + the same lexical identity. + + This tests if either one is a copy of the other, or + if they are the same via annotation identity. + + """ + return self._cloned_set.intersection(other._cloned_set) + + @util.dependencies("sqlalchemy.sql.util") + def replace_selectable(self, sqlutil, old, alias): + """replace all occurrences of FromClause 'old' with the given Alias + object, returning a copy of this :class:`.FromClause`. + + """ + + return sqlutil.ClauseAdapter(alias).traverse(self) + + def correspond_on_equivalents(self, column, equivalents): + """Return corresponding_column for the given column, or if None + search for a match in the given dictionary. + + """ + col = self.corresponding_column(column, require_embedded=True) + if col is None and col in equivalents: + for equiv in equivalents[col]: + nc = self.corresponding_column(equiv, require_embedded=True) + if nc: + return nc + return col + + def corresponding_column(self, column, require_embedded=False): + """Given a :class:`.ColumnElement`, return the exported + :class:`.ColumnElement` object from this :class:`.Selectable` + which corresponds to that original + :class:`~sqlalchemy.schema.Column` via a common ancestor + column. + + :param column: the target :class:`.ColumnElement` to be matched + + :param require_embedded: only return corresponding columns for + the given :class:`.ColumnElement`, if the given + :class:`.ColumnElement` is actually present within a sub-element + of this :class:`.FromClause`. Normally the column will match if + it merely shares a common ancestor with one of the exported + columns of this :class:`.FromClause`. + + """ + + def embedded(expanded_proxy_set, target_set): + for t in target_set.difference(expanded_proxy_set): + if not set(_expand_cloned([t]) + ).intersection(expanded_proxy_set): + return False + return True + + # don't dig around if the column is locally present + if self.c.contains_column(column): + return column + col, intersect = None, None + target_set = column.proxy_set + cols = self.c._all_columns + for c in cols: + expanded_proxy_set = set(_expand_cloned(c.proxy_set)) + i = target_set.intersection(expanded_proxy_set) + if i and (not require_embedded + or embedded(expanded_proxy_set, target_set)): + if col is None: + + # no corresponding column yet, pick this one. + + col, intersect = c, i + elif len(i) > len(intersect): + + # 'c' has a larger field of correspondence than + # 'col'. i.e. selectable.c.a1_x->a1.c.x->table.c.x + # matches a1.c.x->table.c.x better than + # selectable.c.x->table.c.x does. + + col, intersect = c, i + elif i == intersect: + + # they have the same field of correspondence. see + # which proxy_set has fewer columns in it, which + # indicates a closer relationship with the root + # column. Also take into account the "weight" + # attribute which CompoundSelect() uses to give + # higher precedence to columns based on vertical + # position in the compound statement, and discard + # columns that have no reference to the target + # column (also occurs with CompoundSelect) + + col_distance = util.reduce( + operator.add, + [sc._annotations.get('weight', 1) for sc in + col.proxy_set if sc.shares_lineage(column)]) + c_distance = util.reduce( + operator.add, + [sc._annotations.get('weight', 1) for sc in + c.proxy_set if sc.shares_lineage(column)]) + if c_distance < col_distance: + col, intersect = c, i + return col + + @property + def description(self): + """a brief description of this FromClause. + + Used primarily for error message formatting. + + """ + return getattr(self, 'name', self.__class__.__name__ + " object") + + def _reset_exported(self): + """delete memoized collections when a FromClause is cloned.""" + + self._memoized_property.expire_instance(self) + + @_memoized_property + def columns(self): + """A named-based collection of :class:`.ColumnElement` objects + maintained by this :class:`.FromClause`. + + The :attr:`.columns`, or :attr:`.c` collection, is the gateway + to the construction of SQL expressions using table-bound or + other selectable-bound columns:: + + select([mytable]).where(mytable.c.somecolumn == 5) + + """ + + if '_columns' not in self.__dict__: + self._init_collections() + self._populate_column_collection() + return self._columns.as_immutable() + + @_memoized_property + def primary_key(self): + """Return the collection of Column objects which comprise the + primary key of this FromClause.""" + + self._init_collections() + self._populate_column_collection() + return self.primary_key + + @_memoized_property + def foreign_keys(self): + """Return the collection of ForeignKey objects which this + FromClause references.""" + + self._init_collections() + self._populate_column_collection() + return self.foreign_keys + + c = property(attrgetter('columns'), + doc="An alias for the :attr:`.columns` attribute.") + _select_iterable = property(attrgetter('columns')) + + def _init_collections(self): + assert '_columns' not in self.__dict__ + assert 'primary_key' not in self.__dict__ + assert 'foreign_keys' not in self.__dict__ + + self._columns = ColumnCollection() + self.primary_key = ColumnSet() + self.foreign_keys = set() + + @property + def _cols_populated(self): + return '_columns' in self.__dict__ + + def _populate_column_collection(self): + """Called on subclasses to establish the .c collection. + + Each implementation has a different way of establishing + this collection. + + """ + + def _refresh_for_new_column(self, column): + """Given a column added to the .c collection of an underlying + selectable, produce the local version of that column, assuming this + selectable ultimately should proxy this column. + + this is used to "ping" a derived selectable to add a new column + to its .c. collection when a Column has been added to one of the + Table objects it ultimtely derives from. + + If the given selectable hasn't populated its .c. collection yet, + it should at least pass on the message to the contained selectables, + but it will return None. + + This method is currently used by Declarative to allow Table + columns to be added to a partially constructed inheritance + mapping that may have already produced joins. The method + isn't public right now, as the full span of implications + and/or caveats aren't yet clear. + + It's also possible that this functionality could be invoked by + default via an event, which would require that + selectables maintain a weak referencing collection of all + derivations. + + """ + if not self._cols_populated: + return None + elif (column.key in self.columns and + self.columns[column.key] is column): + return column + else: + return None + + +class Join(FromClause): + """represent a ``JOIN`` construct between two :class:`.FromClause` + elements. + + The public constructor function for :class:`.Join` is the module-level + :func:`.join()` function, as well as the :meth:`.FromClause.join` method + of any :class:`.FromClause` (e.g. such as :class:`.Table`). + + .. seealso:: + + :func:`.join` + + :meth:`.FromClause.join` + + """ + __visit_name__ = 'join' + + _is_join = True + + def __init__(self, left, right, onclause=None, isouter=False): + """Construct a new :class:`.Join`. + + The usual entrypoint here is the :func:`~.expression.join` + function or the :meth:`.FromClause.join` method of any + :class:`.FromClause` object. + + """ + self.left = _interpret_as_from(left) + self.right = _interpret_as_from(right).self_group() + + if onclause is None: + self.onclause = self._match_primaries(self.left, self.right) + else: + self.onclause = onclause + + self.isouter = isouter + + @classmethod + def _create_outerjoin(cls, left, right, onclause=None): + """Return an ``OUTER JOIN`` clause element. + + The returned object is an instance of :class:`.Join`. + + Similar functionality is also available via the + :meth:`~.FromClause.outerjoin()` method on any + :class:`.FromClause`. + + :param left: The left side of the join. + + :param right: The right side of the join. + + :param onclause: Optional criterion for the ``ON`` clause, is + derived from foreign key relationships established between + left and right otherwise. + + To chain joins together, use the :meth:`.FromClause.join` or + :meth:`.FromClause.outerjoin` methods on the resulting + :class:`.Join` object. + + """ + return cls(left, right, onclause, isouter=True) + + @classmethod + def _create_join(cls, left, right, onclause=None, isouter=False): + """Produce a :class:`.Join` object, given two :class:`.FromClause` + expressions. + + E.g.:: + + j = join(user_table, address_table, + user_table.c.id == address_table.c.user_id) + stmt = select([user_table]).select_from(j) + + would emit SQL along the lines of:: + + SELECT user.id, user.name FROM user + JOIN address ON user.id = address.user_id + + Similar functionality is available given any + :class:`.FromClause` object (e.g. such as a :class:`.Table`) using + the :meth:`.FromClause.join` method. + + :param left: The left side of the join. + + :param right: the right side of the join; this is any + :class:`.FromClause` object such as a :class:`.Table` object, and + may also be a selectable-compatible object such as an ORM-mapped + class. + + :param onclause: a SQL expression representing the ON clause of the + join. If left at ``None``, :meth:`.FromClause.join` will attempt to + join the two tables based on a foreign key relationship. + + :param isouter: if True, render a LEFT OUTER JOIN, instead of JOIN. + + .. seealso:: + + :meth:`.FromClause.join` - method form, based on a given left side + + :class:`.Join` - the type of object produced + + """ + + return cls(left, right, onclause, isouter) + + @property + def description(self): + return "Join object on %s(%d) and %s(%d)" % ( + self.left.description, + id(self.left), + self.right.description, + id(self.right)) + + def is_derived_from(self, fromclause): + return fromclause is self or \ + self.left.is_derived_from(fromclause) or \ + self.right.is_derived_from(fromclause) + + def self_group(self, against=None): + return FromGrouping(self) + + @util.dependencies("sqlalchemy.sql.util") + def _populate_column_collection(self, sqlutil): + columns = [c for c in self.left.columns] + \ + [c for c in self.right.columns] + + self.primary_key.extend(sqlutil.reduce_columns( + (c for c in columns if c.primary_key), self.onclause)) + self._columns.update((col._label, col) for col in columns) + self.foreign_keys.update(itertools.chain( + *[col.foreign_keys for col in columns])) + + def _refresh_for_new_column(self, column): + col = self.left._refresh_for_new_column(column) + if col is None: + col = self.right._refresh_for_new_column(column) + if col is not None: + if self._cols_populated: + self._columns[col._label] = col + self.foreign_keys.add(col) + if col.primary_key: + self.primary_key.add(col) + return col + return None + + def _copy_internals(self, clone=_clone, **kw): + self._reset_exported() + self.left = clone(self.left, **kw) + self.right = clone(self.right, **kw) + self.onclause = clone(self.onclause, **kw) + + def get_children(self, **kwargs): + return self.left, self.right, self.onclause + + def _match_primaries(self, left, right): + if isinstance(left, Join): + left_right = left.right + else: + left_right = None + return self._join_condition(left, right, a_subset=left_right) + + @classmethod + def _join_condition(cls, a, b, ignore_nonexistent_tables=False, + a_subset=None, + consider_as_foreign_keys=None): + """create a join condition between two tables or selectables. + + e.g.:: + + join_condition(tablea, tableb) + + would produce an expression along the lines of:: + + tablea.c.id==tableb.c.tablea_id + + The join is determined based on the foreign key relationships + between the two selectables. If there are multiple ways + to join, or no way to join, an error is raised. + + :param ignore_nonexistent_tables: Deprecated - this + flag is no longer used. Only resolution errors regarding + the two given tables are propagated. + + :param a_subset: An optional expression that is a sub-component + of ``a``. An attempt will be made to join to just this sub-component + first before looking at the full ``a`` construct, and if found + will be successful even if there are other ways to join to ``a``. + This allows the "right side" of a join to be passed thereby + providing a "natural join". + + """ + constraints = cls._joincond_scan_left_right( + a, a_subset, b, consider_as_foreign_keys) + + if len(constraints) > 1: + cls._joincond_trim_constraints( + a, b, constraints, consider_as_foreign_keys) + + if len(constraints) == 0: + if isinstance(b, FromGrouping): + hint = " Perhaps you meant to convert the right side to a "\ + "subquery using alias()?" + else: + hint = "" + raise exc.NoForeignKeysError( + "Can't find any foreign key relationships " + "between '%s' and '%s'.%s" % + (a.description, b.description, hint)) + + crit = [(x == y) for x, y in list(constraints.values())[0]] + if len(crit) == 1: + return (crit[0]) + else: + return and_(*crit) + + @classmethod + def _joincond_scan_left_right( + cls, a, a_subset, b, consider_as_foreign_keys): + constraints = collections.defaultdict(list) + + for left in (a_subset, a): + if left is None: + continue + for fk in sorted( + b.foreign_keys, + key=lambda fk: fk.parent._creation_order): + if consider_as_foreign_keys is not None and \ + fk.parent not in consider_as_foreign_keys: + continue + try: + col = fk.get_referent(left) + except exc.NoReferenceError as nrte: + if nrte.table_name == left.name: + raise + else: + continue + + if col is not None: + constraints[fk.constraint].append((col, fk.parent)) + if left is not b: + for fk in sorted( + left.foreign_keys, + key=lambda fk: fk.parent._creation_order): + if consider_as_foreign_keys is not None and \ + fk.parent not in consider_as_foreign_keys: + continue + try: + col = fk.get_referent(b) + except exc.NoReferenceError as nrte: + if nrte.table_name == b.name: + raise + else: + continue + + if col is not None: + constraints[fk.constraint].append((col, fk.parent)) + if constraints: + break + return constraints + + @classmethod + def _joincond_trim_constraints( + cls, a, b, constraints, consider_as_foreign_keys): + # more than one constraint matched. narrow down the list + # to include just those FKCs that match exactly to + # "consider_as_foreign_keys". + if consider_as_foreign_keys: + for const in list(constraints): + if set(f.parent for f in const.elements) != set( + consider_as_foreign_keys): + del constraints[const] + + # if still multiple constraints, but + # they all refer to the exact same end result, use it. + if len(constraints) > 1: + dedupe = set(tuple(crit) for crit in constraints.values()) + if len(dedupe) == 1: + key = list(constraints)[0] + constraints = {key: constraints[key]} + + if len(constraints) != 1: + raise exc.AmbiguousForeignKeysError( + "Can't determine join between '%s' and '%s'; " + "tables have more than one foreign key " + "constraint relationship between them. " + "Please specify the 'onclause' of this " + "join explicitly." % (a.description, b.description)) + + def select(self, whereclause=None, **kwargs): + """Create a :class:`.Select` from this :class:`.Join`. + + The equivalent long-hand form, given a :class:`.Join` object + ``j``, is:: + + from sqlalchemy import select + j = select([j.left, j.right], **kw).\\ + where(whereclause).\\ + select_from(j) + + :param whereclause: the WHERE criterion that will be sent to + the :func:`select()` function + + :param \**kwargs: all other kwargs are sent to the + underlying :func:`select()` function. + + """ + collist = [self.left, self.right] + + return Select(collist, whereclause, from_obj=[self], **kwargs) + + @property + def bind(self): + return self.left.bind or self.right.bind + + @util.dependencies("sqlalchemy.sql.util") + def alias(self, sqlutil, name=None, flat=False): + """return an alias of this :class:`.Join`. + + The default behavior here is to first produce a SELECT + construct from this :class:`.Join`, then to produce an + :class:`.Alias` from that. So given a join of the form:: + + j = table_a.join(table_b, table_a.c.id == table_b.c.a_id) + + The JOIN by itself would look like:: + + table_a JOIN table_b ON table_a.id = table_b.a_id + + Whereas the alias of the above, ``j.alias()``, would in a + SELECT context look like:: + + (SELECT table_a.id AS table_a_id, table_b.id AS table_b_id, + table_b.a_id AS table_b_a_id + FROM table_a + JOIN table_b ON table_a.id = table_b.a_id) AS anon_1 + + The equivalent long-hand form, given a :class:`.Join` object + ``j``, is:: + + from sqlalchemy import select, alias + j = alias( + select([j.left, j.right]).\\ + select_from(j).\\ + with_labels(True).\\ + correlate(False), + name=name + ) + + The selectable produced by :meth:`.Join.alias` features the same + columns as that of the two individual selectables presented under + a single name - the individual columns are "auto-labeled", meaning + the ``.c.`` collection of the resulting :class:`.Alias` represents + the names of the individual columns using a + ``_`` scheme:: + + j.c.table_a_id + j.c.table_b_a_id + + :meth:`.Join.alias` also features an alternate + option for aliasing joins which produces no enclosing SELECT and + does not normally apply labels to the column names. The + ``flat=True`` option will call :meth:`.FromClause.alias` + against the left and right sides individually. + Using this option, no new ``SELECT`` is produced; + we instead, from a construct as below:: + + j = table_a.join(table_b, table_a.c.id == table_b.c.a_id) + j = j.alias(flat=True) + + we get a result like this:: + + table_a AS table_a_1 JOIN table_b AS table_b_1 ON + table_a_1.id = table_b_1.a_id + + The ``flat=True`` argument is also propagated to the contained + selectables, so that a composite join such as:: + + j = table_a.join( + table_b.join(table_c, + table_b.c.id == table_c.c.b_id), + table_b.c.a_id == table_a.c.id + ).alias(flat=True) + + Will produce an expression like:: + + table_a AS table_a_1 JOIN ( + table_b AS table_b_1 JOIN table_c AS table_c_1 + ON table_b_1.id = table_c_1.b_id + ) ON table_a_1.id = table_b_1.a_id + + The standalone :func:`~.expression.alias` function as well as the + base :meth:`.FromClause.alias` method also support the ``flat=True`` + argument as a no-op, so that the argument can be passed to the + ``alias()`` method of any selectable. + + .. versionadded:: 0.9.0 Added the ``flat=True`` option to create + "aliases" of joins without enclosing inside of a SELECT + subquery. + + :param name: name given to the alias. + + :param flat: if True, produce an alias of the left and right + sides of this :class:`.Join` and return the join of those + two selectables. This produces join expression that does not + include an enclosing SELECT. + + .. versionadded:: 0.9.0 + + .. seealso:: + + :func:`~.expression.alias` + + """ + if flat: + assert name is None, "Can't send name argument with flat" + left_a, right_a = self.left.alias(flat=True), \ + self.right.alias(flat=True) + adapter = sqlutil.ClauseAdapter(left_a).\ + chain(sqlutil.ClauseAdapter(right_a)) + + return left_a.join(right_a, adapter.traverse(self.onclause), + isouter=self.isouter) + else: + return self.select(use_labels=True, correlate=False).alias(name) + + @property + def _hide_froms(self): + return itertools.chain(*[_from_objects(x.left, x.right) + for x in self._cloned_set]) + + @property + def _from_objects(self): + return [self] + \ + self.onclause._from_objects + \ + self.left._from_objects + \ + self.right._from_objects + + +class Alias(FromClause): + """Represents an table or selectable alias (AS). + + Represents an alias, as typically applied to any table or + sub-select within a SQL statement using the ``AS`` keyword (or + without the keyword on certain databases such as Oracle). + + This object is constructed from the :func:`~.expression.alias` module + level function as well as the :meth:`.FromClause.alias` method available + on all :class:`.FromClause` subclasses. + + """ + + __visit_name__ = 'alias' + named_with_column = True + + _is_from_container = True + + def __init__(self, selectable, name=None): + baseselectable = selectable + while isinstance(baseselectable, Alias): + baseselectable = baseselectable.element + self.original = baseselectable + self.supports_execution = baseselectable.supports_execution + if self.supports_execution: + self._execution_options = baseselectable._execution_options + self.element = selectable + if name is None: + if self.original.named_with_column: + name = getattr(self.original, 'name', None) + name = _anonymous_label('%%(%d %s)s' % (id(self), name + or 'anon')) + self.name = name + + @property + def description(self): + if util.py3k: + return self.name + else: + return self.name.encode('ascii', 'backslashreplace') + + def as_scalar(self): + try: + return self.element.as_scalar() + except AttributeError: + raise AttributeError("Element %s does not support " + "'as_scalar()'" % self.element) + + def is_derived_from(self, fromclause): + if fromclause in self._cloned_set: + return True + return self.element.is_derived_from(fromclause) + + def _populate_column_collection(self): + for col in self.element.columns._all_columns: + col._make_proxy(self) + + def _refresh_for_new_column(self, column): + col = self.element._refresh_for_new_column(column) + if col is not None: + if not self._cols_populated: + return None + else: + return col._make_proxy(self) + else: + return None + + def _copy_internals(self, clone=_clone, **kw): + # don't apply anything to an aliased Table + # for now. May want to drive this from + # the given **kw. + if isinstance(self.element, TableClause): + return + self._reset_exported() + self.element = clone(self.element, **kw) + baseselectable = self.element + while isinstance(baseselectable, Alias): + baseselectable = baseselectable.element + self.original = baseselectable + + def get_children(self, column_collections=True, **kw): + if column_collections: + for c in self.c: + yield c + yield self.element + + @property + def _from_objects(self): + return [self] + + @property + def bind(self): + return self.element.bind + + +class CTE(Generative, HasSuffixes, Alias): + """Represent a Common Table Expression. + + The :class:`.CTE` object is obtained using the + :meth:`.SelectBase.cte` method from any selectable. + See that method for complete examples. + + .. versionadded:: 0.7.6 + + """ + __visit_name__ = 'cte' + + def __init__(self, selectable, + name=None, + recursive=False, + _cte_alias=None, + _restates=frozenset(), + _suffixes=None): + self.recursive = recursive + self._cte_alias = _cte_alias + self._restates = _restates + if _suffixes: + self._suffixes = _suffixes + super(CTE, self).__init__(selectable, name=name) + + def alias(self, name=None, flat=False): + return CTE( + self.original, + name=name, + recursive=self.recursive, + _cte_alias=self, + _suffixes=self._suffixes + ) + + def union(self, other): + return CTE( + self.original.union(other), + name=self.name, + recursive=self.recursive, + _restates=self._restates.union([self]), + _suffixes=self._suffixes + ) + + def union_all(self, other): + return CTE( + self.original.union_all(other), + name=self.name, + recursive=self.recursive, + _restates=self._restates.union([self]), + _suffixes=self._suffixes + ) + + +class FromGrouping(FromClause): + """Represent a grouping of a FROM clause""" + __visit_name__ = 'grouping' + + def __init__(self, element): + self.element = element + + def _init_collections(self): + pass + + @property + def columns(self): + return self.element.columns + + @property + def primary_key(self): + return self.element.primary_key + + @property + def foreign_keys(self): + return self.element.foreign_keys + + def is_derived_from(self, element): + return self.element.is_derived_from(element) + + def alias(self, **kw): + return FromGrouping(self.element.alias(**kw)) + + @property + def _hide_froms(self): + return self.element._hide_froms + + def get_children(self, **kwargs): + return self.element, + + def _copy_internals(self, clone=_clone, **kw): + self.element = clone(self.element, **kw) + + @property + def _from_objects(self): + return self.element._from_objects + + def __getattr__(self, attr): + return getattr(self.element, attr) + + def __getstate__(self): + return {'element': self.element} + + def __setstate__(self, state): + self.element = state['element'] + + +class TableClause(Immutable, FromClause): + """Represents a minimal "table" construct. + + This is a lightweight table object that has only a name and a + collection of columns, which are typically produced + by the :func:`.expression.column` function:: + + from sqlalchemy import table, column + + user = table("user", + column("id"), + column("name"), + column("description"), + ) + + The :class:`.TableClause` construct serves as the base for + the more commonly used :class:`~.schema.Table` object, providing + the usual set of :class:`~.expression.FromClause` services including + the ``.c.`` collection and statement generation methods. + + It does **not** provide all the additional schema-level services + of :class:`~.schema.Table`, including constraints, references to other + tables, or support for :class:`.MetaData`-level services. It's useful + on its own as an ad-hoc construct used to generate quick SQL + statements when a more fully fledged :class:`~.schema.Table` + is not on hand. + + """ + + __visit_name__ = 'table' + + named_with_column = True + + implicit_returning = False + """:class:`.TableClause` doesn't support having a primary key or column + -level defaults, so implicit returning doesn't apply.""" + + _autoincrement_column = None + """No PK or default support so no autoincrement column.""" + + def __init__(self, name, *columns): + """Produce a new :class:`.TableClause`. + + The object returned is an instance of :class:`.TableClause`, which + represents the "syntactical" portion of the schema-level + :class:`~.schema.Table` object. + It may be used to construct lightweight table constructs. + + .. versionchanged:: 1.0.0 :func:`.expression.table` can now + be imported from the plain ``sqlalchemy`` namespace like any + other SQL element. + + :param name: Name of the table. + + :param columns: A collection of :func:`.expression.column` constructs. + + """ + + super(TableClause, self).__init__() + self.name = self.fullname = name + self._columns = ColumnCollection() + self.primary_key = ColumnSet() + self.foreign_keys = set() + for c in columns: + self.append_column(c) + + def _init_collections(self): + pass + + @util.memoized_property + def description(self): + if util.py3k: + return self.name + else: + return self.name.encode('ascii', 'backslashreplace') + + def append_column(self, c): + self._columns[c.key] = c + c.table = self + + def get_children(self, column_collections=True, **kwargs): + if column_collections: + return [c for c in self.c] + else: + return [] + + @util.dependencies("sqlalchemy.sql.functions") + def count(self, functions, whereclause=None, **params): + """return a SELECT COUNT generated against this + :class:`.TableClause`.""" + + if self.primary_key: + col = list(self.primary_key)[0] + else: + col = list(self.columns)[0] + return Select( + [functions.func.count(col).label('tbl_row_count')], + whereclause, + from_obj=[self], + **params) + + @util.dependencies("sqlalchemy.sql.dml") + def insert(self, dml, values=None, inline=False, **kwargs): + """Generate an :func:`.insert` construct against this + :class:`.TableClause`. + + E.g.:: + + table.insert().values(name='foo') + + See :func:`.insert` for argument and usage information. + + """ + + return dml.Insert(self, values=values, inline=inline, **kwargs) + + @util.dependencies("sqlalchemy.sql.dml") + def update( + self, dml, whereclause=None, values=None, inline=False, **kwargs): + """Generate an :func:`.update` construct against this + :class:`.TableClause`. + + E.g.:: + + table.update().where(table.c.id==7).values(name='foo') + + See :func:`.update` for argument and usage information. + + """ + + return dml.Update(self, whereclause=whereclause, + values=values, inline=inline, **kwargs) + + @util.dependencies("sqlalchemy.sql.dml") + def delete(self, dml, whereclause=None, **kwargs): + """Generate a :func:`.delete` construct against this + :class:`.TableClause`. + + E.g.:: + + table.delete().where(table.c.id==7) + + See :func:`.delete` for argument and usage information. + + """ + + return dml.Delete(self, whereclause, **kwargs) + + @property + def _from_objects(self): + return [self] + + +class ForUpdateArg(ClauseElement): + + @classmethod + def parse_legacy_select(self, arg): + """Parse the for_update arugment of :func:`.select`. + + :param mode: Defines the lockmode to use. + + ``None`` - translates to no lockmode + + ``'update'`` - translates to ``FOR UPDATE`` + (standard SQL, supported by most dialects) + + ``'nowait'`` - translates to ``FOR UPDATE NOWAIT`` + (supported by Oracle, PostgreSQL 8.1 upwards) + + ``'read'`` - translates to ``LOCK IN SHARE MODE`` (for MySQL), + and ``FOR SHARE`` (for PostgreSQL) + + ``'read_nowait'`` - translates to ``FOR SHARE NOWAIT`` + (supported by PostgreSQL). ``FOR SHARE`` and + ``FOR SHARE NOWAIT`` (PostgreSQL). + + """ + if arg in (None, False): + return None + + nowait = read = False + if arg == 'nowait': + nowait = True + elif arg == 'read': + read = True + elif arg == 'read_nowait': + read = nowait = True + elif arg is not True: + raise exc.ArgumentError("Unknown for_update argument: %r" % arg) + + return ForUpdateArg(read=read, nowait=nowait) + + @property + def legacy_for_update_value(self): + if self.read and not self.nowait: + return "read" + elif self.read and self.nowait: + return "read_nowait" + elif self.nowait: + return "nowait" + else: + return True + + def _copy_internals(self, clone=_clone, **kw): + if self.of is not None: + self.of = [clone(col, **kw) for col in self.of] + + def __init__(self, nowait=False, read=False, of=None): + """Represents arguments specified to :meth:`.Select.for_update`. + + .. versionadded:: 0.9.0 + """ + + self.nowait = nowait + self.read = read + if of is not None: + self.of = [_interpret_as_column_or_from(elem) + for elem in util.to_list(of)] + else: + self.of = None + + +class SelectBase(Executable, FromClause): + """Base class for SELECT statements. + + + This includes :class:`.Select`, :class:`.CompoundSelect` and + :class:`.TextAsFrom`. + + + """ + + def as_scalar(self): + """return a 'scalar' representation of this selectable, which can be + used as a column expression. + + Typically, a select statement which has only one column in its columns + clause is eligible to be used as a scalar expression. + + The returned object is an instance of + :class:`ScalarSelect`. + + """ + return ScalarSelect(self) + + def label(self, name): + """return a 'scalar' representation of this selectable, embedded as a + subquery with a label. + + .. seealso:: + + :meth:`~.SelectBase.as_scalar`. + + """ + return self.as_scalar().label(name) + + def cte(self, name=None, recursive=False): + """Return a new :class:`.CTE`, or Common Table Expression instance. + + Common table expressions are a SQL standard whereby SELECT + statements can draw upon secondary statements specified along + with the primary statement, using a clause called "WITH". + Special semantics regarding UNION can also be employed to + allow "recursive" queries, where a SELECT statement can draw + upon the set of rows that have previously been selected. + + SQLAlchemy detects :class:`.CTE` objects, which are treated + similarly to :class:`.Alias` objects, as special elements + to be delivered to the FROM clause of the statement as well + as to a WITH clause at the top of the statement. + + .. versionadded:: 0.7.6 + + :param name: name given to the common table expression. Like + :meth:`._FromClause.alias`, the name can be left as ``None`` + in which case an anonymous symbol will be used at query + compile time. + :param recursive: if ``True``, will render ``WITH RECURSIVE``. + A recursive common table expression is intended to be used in + conjunction with UNION ALL in order to derive rows + from those already selected. + + The following examples illustrate two examples from + Postgresql's documentation at + http://www.postgresql.org/docs/8.4/static/queries-with.html. + + Example 1, non recursive:: + + from sqlalchemy import (Table, Column, String, Integer, + MetaData, select, func) + + metadata = MetaData() + + orders = Table('orders', metadata, + Column('region', String), + Column('amount', Integer), + Column('product', String), + Column('quantity', Integer) + ) + + regional_sales = select([ + orders.c.region, + func.sum(orders.c.amount).label('total_sales') + ]).group_by(orders.c.region).cte("regional_sales") + + + top_regions = select([regional_sales.c.region]).\\ + where( + regional_sales.c.total_sales > + select([ + func.sum(regional_sales.c.total_sales)/10 + ]) + ).cte("top_regions") + + statement = select([ + orders.c.region, + orders.c.product, + func.sum(orders.c.quantity).label("product_units"), + func.sum(orders.c.amount).label("product_sales") + ]).where(orders.c.region.in_( + select([top_regions.c.region]) + )).group_by(orders.c.region, orders.c.product) + + result = conn.execute(statement).fetchall() + + Example 2, WITH RECURSIVE:: + + from sqlalchemy import (Table, Column, String, Integer, + MetaData, select, func) + + metadata = MetaData() + + parts = Table('parts', metadata, + Column('part', String), + Column('sub_part', String), + Column('quantity', Integer), + ) + + included_parts = select([ + parts.c.sub_part, + parts.c.part, + parts.c.quantity]).\\ + where(parts.c.part=='our part').\\ + cte(recursive=True) + + + incl_alias = included_parts.alias() + parts_alias = parts.alias() + included_parts = included_parts.union_all( + select([ + parts_alias.c.sub_part, + parts_alias.c.part, + parts_alias.c.quantity + ]). + where(parts_alias.c.part==incl_alias.c.sub_part) + ) + + statement = select([ + included_parts.c.sub_part, + func.sum(included_parts.c.quantity). + label('total_quantity') + ]).\\ + group_by(included_parts.c.sub_part) + + result = conn.execute(statement).fetchall() + + + .. seealso:: + + :meth:`.orm.query.Query.cte` - ORM version of + :meth:`.SelectBase.cte`. + + """ + return CTE(self, name=name, recursive=recursive) + + @_generative + @util.deprecated('0.6', + message="``autocommit()`` is deprecated. Use " + ":meth:`.Executable.execution_options` with the " + "'autocommit' flag.") + def autocommit(self): + """return a new selectable with the 'autocommit' flag set to + True. + """ + + self._execution_options = \ + self._execution_options.union({'autocommit': True}) + + def _generate(self): + """Override the default _generate() method to also clear out + exported collections.""" + + s = self.__class__.__new__(self.__class__) + s.__dict__ = self.__dict__.copy() + s._reset_exported() + return s + + @property + def _from_objects(self): + return [self] + + +class GenerativeSelect(SelectBase): + """Base class for SELECT statements where additional elements can be + added. + + This serves as the base for :class:`.Select` and :class:`.CompoundSelect` + where elements such as ORDER BY, GROUP BY can be added and column + rendering can be controlled. Compare to :class:`.TextAsFrom`, which, + while it subclasses :class:`.SelectBase` and is also a SELECT construct, + represents a fixed textual string which cannot be altered at this level, + only wrapped as a subquery. + + .. versionadded:: 0.9.0 :class:`.GenerativeSelect` was added to + provide functionality specific to :class:`.Select` and + :class:`.CompoundSelect` while allowing :class:`.SelectBase` to be + used for other SELECT-like objects, e.g. :class:`.TextAsFrom`. + + """ + _order_by_clause = ClauseList() + _group_by_clause = ClauseList() + _limit_clause = None + _offset_clause = None + _for_update_arg = None + + def __init__(self, + use_labels=False, + for_update=False, + limit=None, + offset=None, + order_by=None, + group_by=None, + bind=None, + autocommit=None): + self.use_labels = use_labels + + if for_update is not False: + self._for_update_arg = (ForUpdateArg. + parse_legacy_select(for_update)) + + if autocommit is not None: + util.warn_deprecated('autocommit on select() is ' + 'deprecated. Use .execution_options(a' + 'utocommit=True)') + self._execution_options = \ + self._execution_options.union( + {'autocommit': autocommit}) + if limit is not None: + self._limit_clause = _offset_or_limit_clause(limit) + if offset is not None: + self._offset_clause = _offset_or_limit_clause(offset) + self._bind = bind + + if order_by is not None: + self._order_by_clause = ClauseList( + *util.to_list(order_by), + _literal_as_text=_literal_and_labels_as_label_reference) + if group_by is not None: + self._group_by_clause = ClauseList( + *util.to_list(group_by), + _literal_as_text=_literal_as_label_reference) + + @property + def for_update(self): + """Provide legacy dialect support for the ``for_update`` attribute. + """ + if self._for_update_arg is not None: + return self._for_update_arg.legacy_for_update_value + else: + return None + + @for_update.setter + def for_update(self, value): + self._for_update_arg = ForUpdateArg.parse_legacy_select(value) + + @_generative + def with_for_update(self, nowait=False, read=False, of=None): + """Specify a ``FOR UPDATE`` clause for this :class:`.GenerativeSelect`. + + E.g.:: + + stmt = select([table]).with_for_update(nowait=True) + + On a database like Postgresql or Oracle, the above would render a + statement like:: + + SELECT table.a, table.b FROM table FOR UPDATE NOWAIT + + on other backends, the ``nowait`` option is ignored and instead + would produce:: + + SELECT table.a, table.b FROM table FOR UPDATE + + When called with no arguments, the statement will render with + the suffix ``FOR UPDATE``. Additional arguments can then be + provided which allow for common database-specific + variants. + + :param nowait: boolean; will render ``FOR UPDATE NOWAIT`` on Oracle + and Postgresql dialects. + + :param read: boolean; will render ``LOCK IN SHARE MODE`` on MySQL, + ``FOR SHARE`` on Postgresql. On Postgresql, when combined with + ``nowait``, will render ``FOR SHARE NOWAIT``. + + :param of: SQL expression or list of SQL expression elements + (typically :class:`.Column` objects or a compatible expression) which + will render into a ``FOR UPDATE OF`` clause; supported by PostgreSQL + and Oracle. May render as a table or as a column depending on + backend. + + .. versionadded:: 0.9.0 + + """ + self._for_update_arg = ForUpdateArg(nowait=nowait, read=read, of=of) + + @_generative + def apply_labels(self): + """return a new selectable with the 'use_labels' flag set to True. + + This will result in column expressions being generated using labels + against their table name, such as "SELECT somecolumn AS + tablename_somecolumn". This allows selectables which contain multiple + FROM clauses to produce a unique set of column names regardless of + name conflicts among the individual FROM clauses. + + """ + self.use_labels = True + + @property + def _limit(self): + """Get an integer value for the limit. This should only be used + by code that cannot support a limit as a BindParameter or + other custom clause as it will throw an exception if the limit + isn't currently set to an integer. + + """ + return _offset_or_limit_clause_asint(self._limit_clause, "limit") + + @property + def _simple_int_limit(self): + """True if the LIMIT clause is a simple integer, False + if it is not present or is a SQL expression. + """ + return isinstance(self._limit_clause, _OffsetLimitParam) + + @property + def _simple_int_offset(self): + """True if the OFFSET clause is a simple integer, False + if it is not present or is a SQL expression. + """ + return isinstance(self._offset_clause, _OffsetLimitParam) + + @property + def _offset(self): + """Get an integer value for the offset. This should only be used + by code that cannot support an offset as a BindParameter or + other custom clause as it will throw an exception if the + offset isn't currently set to an integer. + + """ + return _offset_or_limit_clause_asint(self._offset_clause, "offset") + + @_generative + def limit(self, limit): + """return a new selectable with the given LIMIT criterion + applied. + + This is a numerical value which usually renders as a ``LIMIT`` + expression in the resulting select. Backends that don't + support ``LIMIT`` will attempt to provide similar + functionality. + + .. versionchanged:: 1.0.0 - :meth:`.Select.limit` can now + accept arbitrary SQL expressions as well as integer values. + + :param limit: an integer LIMIT parameter, or a SQL expression + that provides an integer result. + + """ + + self._limit_clause = _offset_or_limit_clause(limit) + + @_generative + def offset(self, offset): + """return a new selectable with the given OFFSET criterion + applied. + + + This is a numeric value which usually renders as an ``OFFSET`` + expression in the resulting select. Backends that don't + support ``OFFSET`` will attempt to provide similar + functionality. + + + .. versionchanged:: 1.0.0 - :meth:`.Select.offset` can now + accept arbitrary SQL expressions as well as integer values. + + :param offset: an integer OFFSET parameter, or a SQL expression + that provides an integer result. + + """ + + self._offset_clause = _offset_or_limit_clause(offset) + + @_generative + def order_by(self, *clauses): + """return a new selectable with the given list of ORDER BY + criterion applied. + + The criterion will be appended to any pre-existing ORDER BY + criterion. + + """ + + self.append_order_by(*clauses) + + @_generative + def group_by(self, *clauses): + """return a new selectable with the given list of GROUP BY + criterion applied. + + The criterion will be appended to any pre-existing GROUP BY + criterion. + + """ + + self.append_group_by(*clauses) + + def append_order_by(self, *clauses): + """Append the given ORDER BY criterion applied to this selectable. + + The criterion will be appended to any pre-existing ORDER BY criterion. + + This is an **in-place** mutation method; the + :meth:`~.GenerativeSelect.order_by` method is preferred, as it + provides standard :term:`method chaining`. + + """ + if len(clauses) == 1 and clauses[0] is None: + self._order_by_clause = ClauseList() + else: + if getattr(self, '_order_by_clause', None) is not None: + clauses = list(self._order_by_clause) + list(clauses) + self._order_by_clause = ClauseList( + *clauses, + _literal_as_text=_literal_and_labels_as_label_reference) + + def append_group_by(self, *clauses): + """Append the given GROUP BY criterion applied to this selectable. + + The criterion will be appended to any pre-existing GROUP BY criterion. + + This is an **in-place** mutation method; the + :meth:`~.GenerativeSelect.group_by` method is preferred, as it + provides standard :term:`method chaining`. + + """ + if len(clauses) == 1 and clauses[0] is None: + self._group_by_clause = ClauseList() + else: + if getattr(self, '_group_by_clause', None) is not None: + clauses = list(self._group_by_clause) + list(clauses) + self._group_by_clause = ClauseList( + *clauses, _literal_as_text=_literal_as_label_reference) + + @property + def _label_resolve_dict(self): + raise NotImplementedError() + + def _copy_internals(self, clone=_clone, **kw): + if self._limit_clause is not None: + self._limit_clause = clone(self._limit_clause, **kw) + if self._offset_clause is not None: + self._offset_clause = clone(self._offset_clause, **kw) + + +class CompoundSelect(GenerativeSelect): + """Forms the basis of ``UNION``, ``UNION ALL``, and other + SELECT-based set operations. + + + .. seealso:: + + :func:`.union` + + :func:`.union_all` + + :func:`.intersect` + + :func:`.intersect_all` + + :func:`.except` + + :func:`.except_all` + + """ + + __visit_name__ = 'compound_select' + + UNION = util.symbol('UNION') + UNION_ALL = util.symbol('UNION ALL') + EXCEPT = util.symbol('EXCEPT') + EXCEPT_ALL = util.symbol('EXCEPT ALL') + INTERSECT = util.symbol('INTERSECT') + INTERSECT_ALL = util.symbol('INTERSECT ALL') + + _is_from_container = True + + def __init__(self, keyword, *selects, **kwargs): + self._auto_correlate = kwargs.pop('correlate', False) + self.keyword = keyword + self.selects = [] + + numcols = None + + # some DBs do not like ORDER BY in the inner queries of a UNION, etc. + for n, s in enumerate(selects): + s = _clause_element_as_expr(s) + + if not numcols: + numcols = len(s.c._all_columns) + elif len(s.c._all_columns) != numcols: + raise exc.ArgumentError( + 'All selectables passed to ' + 'CompoundSelect must have identical numbers of ' + 'columns; select #%d has %d columns, select ' + '#%d has %d' % + (1, len(self.selects[0].c._all_columns), + n + 1, len(s.c._all_columns)) + ) + + self.selects.append(s.self_group(self)) + + GenerativeSelect.__init__(self, **kwargs) + + @property + def _label_resolve_dict(self): + d = dict( + (c.key, c) for c in self.c + ) + return d, d + + @classmethod + def _create_union(cls, *selects, **kwargs): + """Return a ``UNION`` of multiple selectables. + + The returned object is an instance of + :class:`.CompoundSelect`. + + A similar :func:`union()` method is available on all + :class:`.FromClause` subclasses. + + \*selects + a list of :class:`.Select` instances. + + \**kwargs + available keyword arguments are the same as those of + :func:`select`. + + """ + return CompoundSelect(CompoundSelect.UNION, *selects, **kwargs) + + @classmethod + def _create_union_all(cls, *selects, **kwargs): + """Return a ``UNION ALL`` of multiple selectables. + + The returned object is an instance of + :class:`.CompoundSelect`. + + A similar :func:`union_all()` method is available on all + :class:`.FromClause` subclasses. + + \*selects + a list of :class:`.Select` instances. + + \**kwargs + available keyword arguments are the same as those of + :func:`select`. + + """ + return CompoundSelect(CompoundSelect.UNION_ALL, *selects, **kwargs) + + @classmethod + def _create_except(cls, *selects, **kwargs): + """Return an ``EXCEPT`` of multiple selectables. + + The returned object is an instance of + :class:`.CompoundSelect`. + + \*selects + a list of :class:`.Select` instances. + + \**kwargs + available keyword arguments are the same as those of + :func:`select`. + + """ + return CompoundSelect(CompoundSelect.EXCEPT, *selects, **kwargs) + + @classmethod + def _create_except_all(cls, *selects, **kwargs): + """Return an ``EXCEPT ALL`` of multiple selectables. + + The returned object is an instance of + :class:`.CompoundSelect`. + + \*selects + a list of :class:`.Select` instances. + + \**kwargs + available keyword arguments are the same as those of + :func:`select`. + + """ + return CompoundSelect(CompoundSelect.EXCEPT_ALL, *selects, **kwargs) + + @classmethod + def _create_intersect(cls, *selects, **kwargs): + """Return an ``INTERSECT`` of multiple selectables. + + The returned object is an instance of + :class:`.CompoundSelect`. + + \*selects + a list of :class:`.Select` instances. + + \**kwargs + available keyword arguments are the same as those of + :func:`select`. + + """ + return CompoundSelect(CompoundSelect.INTERSECT, *selects, **kwargs) + + @classmethod + def _create_intersect_all(cls, *selects, **kwargs): + """Return an ``INTERSECT ALL`` of multiple selectables. + + The returned object is an instance of + :class:`.CompoundSelect`. + + \*selects + a list of :class:`.Select` instances. + + \**kwargs + available keyword arguments are the same as those of + :func:`select`. + + """ + return CompoundSelect( + CompoundSelect.INTERSECT_ALL, *selects, **kwargs) + + def _scalar_type(self): + return self.selects[0]._scalar_type() + + def self_group(self, against=None): + return FromGrouping(self) + + def is_derived_from(self, fromclause): + for s in self.selects: + if s.is_derived_from(fromclause): + return True + return False + + def _populate_column_collection(self): + for cols in zip(*[s.c._all_columns for s in self.selects]): + + # this is a slightly hacky thing - the union exports a + # column that resembles just that of the *first* selectable. + # to get at a "composite" column, particularly foreign keys, + # you have to dig through the proxies collection which we + # generate below. We may want to improve upon this, such as + # perhaps _make_proxy can accept a list of other columns + # that are "shared" - schema.column can then copy all the + # ForeignKeys in. this would allow the union() to have all + # those fks too. + + proxy = cols[0]._make_proxy( + self, name=cols[0]._label if self.use_labels else None, + key=cols[0]._key_label if self.use_labels else None) + + # hand-construct the "_proxies" collection to include all + # derived columns place a 'weight' annotation corresponding + # to how low in the list of select()s the column occurs, so + # that the corresponding_column() operation can resolve + # conflicts + + proxy._proxies = [ + c._annotate({'weight': i + 1}) for (i, c) in enumerate(cols)] + + def _refresh_for_new_column(self, column): + for s in self.selects: + s._refresh_for_new_column(column) + + if not self._cols_populated: + return None + + raise NotImplementedError("CompoundSelect constructs don't support " + "addition of columns to underlying " + "selectables") + + def _copy_internals(self, clone=_clone, **kw): + super(CompoundSelect, self)._copy_internals(clone, **kw) + self._reset_exported() + self.selects = [clone(s, **kw) for s in self.selects] + if hasattr(self, '_col_map'): + del self._col_map + for attr in ( + '_order_by_clause', '_group_by_clause', '_for_update_arg'): + if getattr(self, attr) is not None: + setattr(self, attr, clone(getattr(self, attr), **kw)) + + def get_children(self, column_collections=True, **kwargs): + return (column_collections and list(self.c) or []) \ + + [self._order_by_clause, self._group_by_clause] \ + + list(self.selects) + + def bind(self): + if self._bind: + return self._bind + for s in self.selects: + e = s.bind + if e: + return e + else: + return None + + def _set_bind(self, bind): + self._bind = bind + bind = property(bind, _set_bind) + + +class Select(HasPrefixes, HasSuffixes, GenerativeSelect): + """Represents a ``SELECT`` statement. + + """ + + __visit_name__ = 'select' + + _prefixes = () + _suffixes = () + _hints = util.immutabledict() + _statement_hints = () + _distinct = False + _from_cloned = None + _correlate = () + _correlate_except = None + _memoized_property = SelectBase._memoized_property + _is_select = True + + def __init__(self, + columns=None, + whereclause=None, + from_obj=None, + distinct=False, + having=None, + correlate=True, + prefixes=None, + suffixes=None, + **kwargs): + """Construct a new :class:`.Select`. + + Similar functionality is also available via the + :meth:`.FromClause.select` method on any :class:`.FromClause`. + + All arguments which accept :class:`.ClauseElement` arguments also + accept string arguments, which will be converted as appropriate into + either :func:`text()` or :func:`literal_column()` constructs. + + .. seealso:: + + :ref:`coretutorial_selecting` - Core Tutorial description of + :func:`.select`. + + :param columns: + A list of :class:`.ColumnElement` or :class:`.FromClause` + objects which will form the columns clause of the resulting + statement. For those objects that are instances of + :class:`.FromClause` (typically :class:`.Table` or :class:`.Alias` + objects), the :attr:`.FromClause.c` collection is extracted + to form a collection of :class:`.ColumnElement` objects. + + This parameter will also accept :class:`.Text` constructs as + given, as well as ORM-mapped classes. + + .. note:: + + The :paramref:`.select.columns` parameter is not available + in the method form of :func:`.select`, e.g. + :meth:`.FromClause.select`. + + .. seealso:: + + :meth:`.Select.column` + + :meth:`.Select.with_only_columns` + + :param whereclause: + A :class:`.ClauseElement` expression which will be used to form the + ``WHERE`` clause. It is typically preferable to add WHERE + criterion to an existing :class:`.Select` using method chaining + with :meth:`.Select.where`. + + .. seealso:: + + :meth:`.Select.where` + + :param from_obj: + A list of :class:`.ClauseElement` objects which will be added to the + ``FROM`` clause of the resulting statement. This is equivalent + to calling :meth:`.Select.select_from` using method chaining on + an existing :class:`.Select` object. + + .. seealso:: + + :meth:`.Select.select_from` - full description of explicit + FROM clause specification. + + :param autocommit: + Deprecated. Use ``.execution_options(autocommit=)`` + to set the autocommit option. + + .. seealso:: + + :meth:`.Executable.execution_options` + + :param bind=None: + an :class:`~.Engine` or :class:`~.Connection` instance + to which the + resulting :class:`.Select` object will be bound. The + :class:`.Select` object will otherwise automatically bind to + whatever :class:`~.base.Connectable` instances can be located within + its contained :class:`.ClauseElement` members. + + :param correlate=True: + indicates that this :class:`.Select` object should have its + contained :class:`.FromClause` elements "correlated" to an enclosing + :class:`.Select` object. It is typically preferable to specify + correlations on an existing :class:`.Select` construct using + :meth:`.Select.correlate`. + + .. seealso:: + + :meth:`.Select.correlate` - full description of correlation. + + :param distinct=False: + when ``True``, applies a ``DISTINCT`` qualifier to the columns + clause of the resulting statement. + + The boolean argument may also be a column expression or list + of column expressions - this is a special calling form which + is understood by the Postgresql dialect to render the + ``DISTINCT ON ()`` syntax. + + ``distinct`` is also available on an existing :class:`.Select` + object via the :meth:`~.Select.distinct` method. + + .. seealso:: + + :meth:`.Select.distinct` + + :param for_update=False: + when ``True``, applies ``FOR UPDATE`` to the end of the + resulting statement. + + .. deprecated:: 0.9.0 - use + :meth:`.Select.with_for_update` to specify the + structure of the ``FOR UPDATE`` clause. + + ``for_update`` accepts various string values interpreted by + specific backends, including: + + * ``"read"`` - on MySQL, translates to ``LOCK IN SHARE MODE``; + on Postgresql, translates to ``FOR SHARE``. + * ``"nowait"`` - on Postgresql and Oracle, translates to + ``FOR UPDATE NOWAIT``. + * ``"read_nowait"`` - on Postgresql, translates to + ``FOR SHARE NOWAIT``. + + .. seealso:: + + :meth:`.Select.with_for_update` - improved API for + specifying the ``FOR UPDATE`` clause. + + :param group_by: + a list of :class:`.ClauseElement` objects which will comprise the + ``GROUP BY`` clause of the resulting select. This parameter + is typically specified more naturally using the + :meth:`.Select.group_by` method on an existing :class:`.Select`. + + .. seealso:: + + :meth:`.Select.group_by` + + :param having: + a :class:`.ClauseElement` that will comprise the ``HAVING`` clause + of the resulting select when ``GROUP BY`` is used. This parameter + is typically specified more naturally using the + :meth:`.Select.having` method on an existing :class:`.Select`. + + .. seealso:: + + :meth:`.Select.having` + + :param limit=None: + a numerical value which usually renders as a ``LIMIT`` + expression in the resulting select. Backends that don't + support ``LIMIT`` will attempt to provide similar + functionality. This parameter is typically specified more naturally + using the :meth:`.Select.limit` method on an existing + :class:`.Select`. + + .. seealso:: + + :meth:`.Select.limit` + + :param offset=None: + a numeric value which usually renders as an ``OFFSET`` + expression in the resulting select. Backends that don't + support ``OFFSET`` will attempt to provide similar + functionality. This parameter is typically specified more naturally + using the :meth:`.Select.offset` method on an existing + :class:`.Select`. + + .. seealso:: + + :meth:`.Select.offset` + + :param order_by: + a scalar or list of :class:`.ClauseElement` objects which will + comprise the ``ORDER BY`` clause of the resulting select. + This parameter is typically specified more naturally using the + :meth:`.Select.order_by` method on an existing :class:`.Select`. + + .. seealso:: + + :meth:`.Select.order_by` + + :param use_labels=False: + when ``True``, the statement will be generated using labels + for each column in the columns clause, which qualify each + column with its parent table's (or aliases) name so that name + conflicts between columns in different tables don't occur. + The format of the label is _. The "c" + collection of the resulting :class:`.Select` object will use these + names as well for targeting column members. + + This parameter can also be specified on an existing + :class:`.Select` object using the :meth:`.Select.apply_labels` + method. + + .. seealso:: + + :meth:`.Select.apply_labels` + + """ + self._auto_correlate = correlate + if distinct is not False: + if distinct is True: + self._distinct = True + else: + self._distinct = [ + _literal_as_text(e) + for e in util.to_list(distinct) + ] + + if from_obj is not None: + self._from_obj = util.OrderedSet( + _interpret_as_from(f) + for f in util.to_list(from_obj)) + else: + self._from_obj = util.OrderedSet() + + try: + cols_present = bool(columns) + except TypeError: + raise exc.ArgumentError("columns argument to select() must " + "be a Python list or other iterable") + + if cols_present: + self._raw_columns = [] + for c in columns: + c = _interpret_as_column_or_from(c) + if isinstance(c, ScalarSelect): + c = c.self_group(against=operators.comma_op) + self._raw_columns.append(c) + else: + self._raw_columns = [] + + if whereclause is not None: + self._whereclause = _literal_as_text( + whereclause).self_group(against=operators._asbool) + else: + self._whereclause = None + + if having is not None: + self._having = _literal_as_text( + having).self_group(against=operators._asbool) + else: + self._having = None + + if prefixes: + self._setup_prefixes(prefixes) + + if suffixes: + self._setup_suffixes(suffixes) + + GenerativeSelect.__init__(self, **kwargs) + + @property + def _froms(self): + # would love to cache this, + # but there's just enough edge cases, particularly now that + # declarative encourages construction of SQL expressions + # without tables present, to just regen this each time. + froms = [] + seen = set() + translate = self._from_cloned + + for item in itertools.chain( + _from_objects(*self._raw_columns), + _from_objects(self._whereclause) + if self._whereclause is not None else (), + self._from_obj + ): + if item is self: + raise exc.InvalidRequestError( + "select() construct refers to itself as a FROM") + if translate and item in translate: + item = translate[item] + if not seen.intersection(item._cloned_set): + froms.append(item) + seen.update(item._cloned_set) + + return froms + + def _get_display_froms(self, explicit_correlate_froms=None, + implicit_correlate_froms=None): + """Return the full list of 'from' clauses to be displayed. + + Takes into account a set of existing froms which may be + rendered in the FROM clause of enclosing selects; this Select + may want to leave those absent if it is automatically + correlating. + + """ + froms = self._froms + + toremove = set(itertools.chain(*[ + _expand_cloned(f._hide_froms) + for f in froms])) + if toremove: + # if we're maintaining clones of froms, + # add the copies out to the toremove list. only include + # clones that are lexical equivalents. + if self._from_cloned: + toremove.update( + self._from_cloned[f] for f in + toremove.intersection(self._from_cloned) + if self._from_cloned[f]._is_lexical_equivalent(f) + ) + # filter out to FROM clauses not in the list, + # using a list to maintain ordering + froms = [f for f in froms if f not in toremove] + + if self._correlate: + to_correlate = self._correlate + if to_correlate: + froms = [ + f for f in froms if f not in + _cloned_intersection( + _cloned_intersection( + froms, explicit_correlate_froms or ()), + to_correlate + ) + ] + + if self._correlate_except is not None: + + froms = [ + f for f in froms if f not in + _cloned_difference( + _cloned_intersection( + froms, explicit_correlate_froms or ()), + self._correlate_except + ) + ] + + if self._auto_correlate and \ + implicit_correlate_froms and \ + len(froms) > 1: + + froms = [ + f for f in froms if f not in + _cloned_intersection(froms, implicit_correlate_froms) + ] + + if not len(froms): + raise exc.InvalidRequestError("Select statement '%s" + "' returned no FROM clauses " + "due to auto-correlation; " + "specify correlate() " + "to control correlation " + "manually." % self) + + return froms + + def _scalar_type(self): + elem = self._raw_columns[0] + cols = list(elem._select_iterable) + return cols[0].type + + @property + def froms(self): + """Return the displayed list of FromClause elements.""" + + return self._get_display_froms() + + def with_statement_hint(self, text, dialect_name='*'): + """add a statement hint to this :class:`.Select`. + + This method is similar to :meth:`.Select.with_hint` except that + it does not require an individual table, and instead applies to the + statement as a whole. + + Hints here are specific to the backend database and may include + directives such as isolation levels, file directives, fetch directives, + etc. + + .. versionadded:: 1.0.0 + + .. seealso:: + + :meth:`.Select.with_hint` + + """ + return self.with_hint(None, text, dialect_name) + + @_generative + def with_hint(self, selectable, text, dialect_name='*'): + """Add an indexing or other executional context hint for the given + selectable to this :class:`.Select`. + + The text of the hint is rendered in the appropriate + location for the database backend in use, relative + to the given :class:`.Table` or :class:`.Alias` passed as the + ``selectable`` argument. The dialect implementation + typically uses Python string substitution syntax + with the token ``%(name)s`` to render the name of + the table or alias. E.g. when using Oracle, the + following:: + + select([mytable]).\\ + with_hint(mytable, "index(%(name)s ix_mytable)") + + Would render SQL as:: + + select /*+ index(mytable ix_mytable) */ ... from mytable + + The ``dialect_name`` option will limit the rendering of a particular + hint to a particular backend. Such as, to add hints for both Oracle + and Sybase simultaneously:: + + select([mytable]).\\ + with_hint(mytable, "index(%(name)s ix_mytable)", 'oracle').\\ + with_hint(mytable, "WITH INDEX ix_mytable", 'sybase') + + .. seealso:: + + :meth:`.Select.with_statement_hint` + + """ + if selectable is None: + self._statement_hints += ((dialect_name, text), ) + else: + self._hints = self._hints.union( + {(selectable, dialect_name): text}) + + @property + def type(self): + raise exc.InvalidRequestError("Select objects don't have a type. " + "Call as_scalar() on this Select " + "object to return a 'scalar' version " + "of this Select.") + + @_memoized_property.method + def locate_all_froms(self): + """return a Set of all FromClause elements referenced by this Select. + + This set is a superset of that returned by the ``froms`` property, + which is specifically for those FromClause elements that would + actually be rendered. + + """ + froms = self._froms + return froms + list(_from_objects(*froms)) + + @property + def inner_columns(self): + """an iterator of all ColumnElement expressions which would + be rendered into the columns clause of the resulting SELECT statement. + + """ + return _select_iterables(self._raw_columns) + + @_memoized_property + def _label_resolve_dict(self): + with_cols = dict( + (c._resolve_label or c._label or c.key, c) + for c in _select_iterables(self._raw_columns) + if c._allow_label_resolve) + only_froms = dict( + (c.key, c) for c in + _select_iterables(self.froms) if c._allow_label_resolve) + for key, value in only_froms.items(): + with_cols.setdefault(key, value) + + return with_cols, only_froms + + def is_derived_from(self, fromclause): + if self in fromclause._cloned_set: + return True + + for f in self.locate_all_froms(): + if f.is_derived_from(fromclause): + return True + return False + + def _copy_internals(self, clone=_clone, **kw): + super(Select, self)._copy_internals(clone, **kw) + + # Select() object has been cloned and probably adapted by the + # given clone function. Apply the cloning function to internal + # objects + + # 1. keep a dictionary of the froms we've cloned, and what + # they've become. This is consulted later when we derive + # additional froms from "whereclause" and the columns clause, + # which may still reference the uncloned parent table. + # as of 0.7.4 we also put the current version of _froms, which + # gets cleared on each generation. previously we were "baking" + # _froms into self._from_obj. + self._from_cloned = from_cloned = dict( + (f, clone(f, **kw)) for f in self._from_obj.union(self._froms)) + + # 3. update persistent _from_obj with the cloned versions. + self._from_obj = util.OrderedSet(from_cloned[f] for f in + self._from_obj) + + # the _correlate collection is done separately, what can happen + # here is the same item is _correlate as in _from_obj but the + # _correlate version has an annotation on it - (specifically + # RelationshipProperty.Comparator._criterion_exists() does + # this). Also keep _correlate liberally open with its previous + # contents, as this set is used for matching, not rendering. + self._correlate = set(clone(f) for f in + self._correlate).union(self._correlate) + + # 4. clone other things. The difficulty here is that Column + # objects are not actually cloned, and refer to their original + # .table, resulting in the wrong "from" parent after a clone + # operation. Hence _from_cloned and _from_obj supersede what is + # present here. + self._raw_columns = [clone(c, **kw) for c in self._raw_columns] + for attr in '_whereclause', '_having', '_order_by_clause', \ + '_group_by_clause', '_for_update_arg': + if getattr(self, attr) is not None: + setattr(self, attr, clone(getattr(self, attr), **kw)) + + # erase exported column list, _froms collection, + # etc. + self._reset_exported() + + def get_children(self, column_collections=True, **kwargs): + """return child elements as per the ClauseElement specification.""" + + return (column_collections and list(self.columns) or []) + \ + self._raw_columns + list(self._froms) + \ + [x for x in + (self._whereclause, self._having, + self._order_by_clause, self._group_by_clause) + if x is not None] + + @_generative + def column(self, column): + """return a new select() construct with the given column expression + added to its columns clause. + + """ + self.append_column(column) + + @util.dependencies("sqlalchemy.sql.util") + def reduce_columns(self, sqlutil, only_synonyms=True): + """Return a new :func`.select` construct with redundantly + named, equivalently-valued columns removed from the columns clause. + + "Redundant" here means two columns where one refers to the + other either based on foreign key, or via a simple equality + comparison in the WHERE clause of the statement. The primary purpose + of this method is to automatically construct a select statement + with all uniquely-named columns, without the need to use + table-qualified labels as :meth:`.apply_labels` does. + + When columns are omitted based on foreign key, the referred-to + column is the one that's kept. When columns are omitted based on + WHERE eqivalence, the first column in the columns clause is the + one that's kept. + + :param only_synonyms: when True, limit the removal of columns + to those which have the same name as the equivalent. Otherwise, + all columns that are equivalent to another are removed. + + .. versionadded:: 0.8 + + """ + return self.with_only_columns( + sqlutil.reduce_columns( + self.inner_columns, + only_synonyms=only_synonyms, + *(self._whereclause, ) + tuple(self._from_obj) + ) + ) + + @_generative + def with_only_columns(self, columns): + """Return a new :func:`.select` construct with its columns + clause replaced with the given columns. + + .. versionchanged:: 0.7.3 + Due to a bug fix, this method has a slight + behavioral change as of version 0.7.3. + Prior to version 0.7.3, the FROM clause of + a :func:`.select` was calculated upfront and as new columns + were added; in 0.7.3 and later it's calculated + at compile time, fixing an issue regarding late binding + of columns to parent tables. This changes the behavior of + :meth:`.Select.with_only_columns` in that FROM clauses no + longer represented in the new list are dropped, + but this behavior is more consistent in + that the FROM clauses are consistently derived from the + current columns clause. The original intent of this method + is to allow trimming of the existing columns list to be fewer + columns than originally present; the use case of replacing + the columns list with an entirely different one hadn't + been anticipated until 0.7.3 was released; the usage + guidelines below illustrate how this should be done. + + This method is exactly equivalent to as if the original + :func:`.select` had been called with the given columns + clause. I.e. a statement:: + + s = select([table1.c.a, table1.c.b]) + s = s.with_only_columns([table1.c.b]) + + should be exactly equivalent to:: + + s = select([table1.c.b]) + + This means that FROM clauses which are only derived + from the column list will be discarded if the new column + list no longer contains that FROM:: + + >>> table1 = table('t1', column('a'), column('b')) + >>> table2 = table('t2', column('a'), column('b')) + >>> s1 = select([table1.c.a, table2.c.b]) + >>> print s1 + SELECT t1.a, t2.b FROM t1, t2 + >>> s2 = s1.with_only_columns([table2.c.b]) + >>> print s2 + SELECT t2.b FROM t1 + + The preferred way to maintain a specific FROM clause + in the construct, assuming it won't be represented anywhere + else (i.e. not in the WHERE clause, etc.) is to set it using + :meth:`.Select.select_from`:: + + >>> s1 = select([table1.c.a, table2.c.b]).\\ + ... select_from(table1.join(table2, + ... table1.c.a==table2.c.a)) + >>> s2 = s1.with_only_columns([table2.c.b]) + >>> print s2 + SELECT t2.b FROM t1 JOIN t2 ON t1.a=t2.a + + Care should also be taken to use the correct + set of column objects passed to :meth:`.Select.with_only_columns`. + Since the method is essentially equivalent to calling the + :func:`.select` construct in the first place with the given + columns, the columns passed to :meth:`.Select.with_only_columns` + should usually be a subset of those which were passed + to the :func:`.select` construct, not those which are available + from the ``.c`` collection of that :func:`.select`. That + is:: + + s = select([table1.c.a, table1.c.b]).select_from(table1) + s = s.with_only_columns([table1.c.b]) + + and **not**:: + + # usually incorrect + s = s.with_only_columns([s.c.b]) + + The latter would produce the SQL:: + + SELECT b + FROM (SELECT t1.a AS a, t1.b AS b + FROM t1), t1 + + Since the :func:`.select` construct is essentially being + asked to select both from ``table1`` as well as itself. + + """ + self._reset_exported() + rc = [] + for c in columns: + c = _interpret_as_column_or_from(c) + if isinstance(c, ScalarSelect): + c = c.self_group(against=operators.comma_op) + rc.append(c) + self._raw_columns = rc + + @_generative + def where(self, whereclause): + """return a new select() construct with the given expression added to + its WHERE clause, joined to the existing clause via AND, if any. + + """ + + self.append_whereclause(whereclause) + + @_generative + def having(self, having): + """return a new select() construct with the given expression added to + its HAVING clause, joined to the existing clause via AND, if any. + + """ + self.append_having(having) + + @_generative + def distinct(self, *expr): + """Return a new select() construct which will apply DISTINCT to its + columns clause. + + :param \*expr: optional column expressions. When present, + the Postgresql dialect will render a ``DISTINCT ON (>)`` + construct. + + """ + if expr: + expr = [_literal_as_label_reference(e) for e in expr] + if isinstance(self._distinct, list): + self._distinct = self._distinct + expr + else: + self._distinct = expr + else: + self._distinct = True + + @_generative + def select_from(self, fromclause): + """return a new :func:`.select` construct with the + given FROM expression + merged into its list of FROM objects. + + E.g.:: + + table1 = table('t1', column('a')) + table2 = table('t2', column('b')) + s = select([table1.c.a]).\\ + select_from( + table1.join(table2, table1.c.a==table2.c.b) + ) + + The "from" list is a unique set on the identity of each element, + so adding an already present :class:`.Table` or other selectable + will have no effect. Passing a :class:`.Join` that refers + to an already present :class:`.Table` or other selectable will have + the effect of concealing the presence of that selectable as + an individual element in the rendered FROM list, instead + rendering it into a JOIN clause. + + While the typical purpose of :meth:`.Select.select_from` is to + replace the default, derived FROM clause with a join, it can + also be called with individual table elements, multiple times + if desired, in the case that the FROM clause cannot be fully + derived from the columns clause:: + + select([func.count('*')]).select_from(table1) + + """ + self.append_from(fromclause) + + @_generative + def correlate(self, *fromclauses): + """return a new :class:`.Select` which will correlate the given FROM + clauses to that of an enclosing :class:`.Select`. + + Calling this method turns off the :class:`.Select` object's + default behavior of "auto-correlation". Normally, FROM elements + which appear in a :class:`.Select` that encloses this one via + its :term:`WHERE clause`, ORDER BY, HAVING or + :term:`columns clause` will be omitted from this :class:`.Select` + object's :term:`FROM clause`. + Setting an explicit correlation collection using the + :meth:`.Select.correlate` method provides a fixed list of FROM objects + that can potentially take place in this process. + + When :meth:`.Select.correlate` is used to apply specific FROM clauses + for correlation, the FROM elements become candidates for + correlation regardless of how deeply nested this :class:`.Select` + object is, relative to an enclosing :class:`.Select` which refers to + the same FROM object. This is in contrast to the behavior of + "auto-correlation" which only correlates to an immediate enclosing + :class:`.Select`. Multi-level correlation ensures that the link + between enclosed and enclosing :class:`.Select` is always via + at least one WHERE/ORDER BY/HAVING/columns clause in order for + correlation to take place. + + If ``None`` is passed, the :class:`.Select` object will correlate + none of its FROM entries, and all will render unconditionally + in the local FROM clause. + + :param \*fromclauses: a list of one or more :class:`.FromClause` + constructs, or other compatible constructs (i.e. ORM-mapped + classes) to become part of the correlate collection. + + .. versionchanged:: 0.8.0 ORM-mapped classes are accepted by + :meth:`.Select.correlate`. + + .. versionchanged:: 0.8.0 The :meth:`.Select.correlate` method no + longer unconditionally removes entries from the FROM clause; + instead, the candidate FROM entries must also be matched by a FROM + entry located in an enclosing :class:`.Select`, which ultimately + encloses this one as present in the WHERE clause, ORDER BY clause, + HAVING clause, or columns clause of an enclosing :meth:`.Select`. + + .. versionchanged:: 0.8.2 explicit correlation takes place + via any level of nesting of :class:`.Select` objects; in previous + 0.8 versions, correlation would only occur relative to the + immediate enclosing :class:`.Select` construct. + + .. seealso:: + + :meth:`.Select.correlate_except` + + :ref:`correlated_subqueries` + + """ + self._auto_correlate = False + if fromclauses and fromclauses[0] is None: + self._correlate = () + else: + self._correlate = set(self._correlate).union( + _interpret_as_from(f) for f in fromclauses) + + @_generative + def correlate_except(self, *fromclauses): + """return a new :class:`.Select` which will omit the given FROM + clauses from the auto-correlation process. + + Calling :meth:`.Select.correlate_except` turns off the + :class:`.Select` object's default behavior of + "auto-correlation" for the given FROM elements. An element + specified here will unconditionally appear in the FROM list, while + all other FROM elements remain subject to normal auto-correlation + behaviors. + + .. versionchanged:: 0.8.2 The :meth:`.Select.correlate_except` + method was improved to fully prevent FROM clauses specified here + from being omitted from the immediate FROM clause of this + :class:`.Select`. + + If ``None`` is passed, the :class:`.Select` object will correlate + all of its FROM entries. + + .. versionchanged:: 0.8.2 calling ``correlate_except(None)`` will + correctly auto-correlate all FROM clauses. + + :param \*fromclauses: a list of one or more :class:`.FromClause` + constructs, or other compatible constructs (i.e. ORM-mapped + classes) to become part of the correlate-exception collection. + + .. seealso:: + + :meth:`.Select.correlate` + + :ref:`correlated_subqueries` + + """ + + self._auto_correlate = False + if fromclauses and fromclauses[0] is None: + self._correlate_except = () + else: + self._correlate_except = set(self._correlate_except or ()).union( + _interpret_as_from(f) for f in fromclauses) + + def append_correlation(self, fromclause): + """append the given correlation expression to this select() + construct. + + This is an **in-place** mutation method; the + :meth:`~.Select.correlate` method is preferred, as it provides + standard :term:`method chaining`. + + """ + + self._auto_correlate = False + self._correlate = set(self._correlate).union( + _interpret_as_from(f) for f in fromclause) + + def append_column(self, column): + """append the given column expression to the columns clause of this + select() construct. + + This is an **in-place** mutation method; the + :meth:`~.Select.column` method is preferred, as it provides standard + :term:`method chaining`. + + """ + self._reset_exported() + column = _interpret_as_column_or_from(column) + + if isinstance(column, ScalarSelect): + column = column.self_group(against=operators.comma_op) + + self._raw_columns = self._raw_columns + [column] + + def append_prefix(self, clause): + """append the given columns clause prefix expression to this select() + construct. + + This is an **in-place** mutation method; the + :meth:`~.Select.prefix_with` method is preferred, as it provides + standard :term:`method chaining`. + + """ + clause = _literal_as_text(clause) + self._prefixes = self._prefixes + (clause,) + + def append_whereclause(self, whereclause): + """append the given expression to this select() construct's WHERE + criterion. + + The expression will be joined to existing WHERE criterion via AND. + + This is an **in-place** mutation method; the + :meth:`~.Select.where` method is preferred, as it provides standard + :term:`method chaining`. + + """ + + self._reset_exported() + self._whereclause = and_( + True_._ifnone(self._whereclause), whereclause) + + def append_having(self, having): + """append the given expression to this select() construct's HAVING + criterion. + + The expression will be joined to existing HAVING criterion via AND. + + This is an **in-place** mutation method; the + :meth:`~.Select.having` method is preferred, as it provides standard + :term:`method chaining`. + + """ + self._reset_exported() + self._having = and_(True_._ifnone(self._having), having) + + def append_from(self, fromclause): + """append the given FromClause expression to this select() construct's + FROM clause. + + This is an **in-place** mutation method; the + :meth:`~.Select.select_from` method is preferred, as it provides + standard :term:`method chaining`. + + """ + self._reset_exported() + fromclause = _interpret_as_from(fromclause) + self._from_obj = self._from_obj.union([fromclause]) + + @_memoized_property + def _columns_plus_names(self): + if self.use_labels: + names = set() + + def name_for_col(c): + if c._label is None or not c._render_label_in_columns_clause: + return (None, c) + + name = c._label + if name in names: + name = c.anon_label + else: + names.add(name) + return name, c + + return [ + name_for_col(c) + for c in util.unique_list( + _select_iterables(self._raw_columns)) + ] + else: + return [ + (None, c) + for c in util.unique_list( + _select_iterables(self._raw_columns)) + ] + + def _populate_column_collection(self): + for name, c in self._columns_plus_names: + if not hasattr(c, '_make_proxy'): + continue + if name is None: + key = None + elif self.use_labels: + key = c._key_label + if key is not None and key in self.c: + key = c.anon_label + else: + key = None + + c._make_proxy(self, key=key, + name=name, + name_is_truncatable=True) + + def _refresh_for_new_column(self, column): + for fromclause in self._froms: + col = fromclause._refresh_for_new_column(column) + if col is not None: + if col in self.inner_columns and self._cols_populated: + our_label = col._key_label if self.use_labels else col.key + if our_label not in self.c: + return col._make_proxy( + self, + name=col._label if self.use_labels else None, + key=col._key_label if self.use_labels else None, + name_is_truncatable=True) + return None + return None + + def self_group(self, against=None): + """return a 'grouping' construct as per the ClauseElement + specification. + + This produces an element that can be embedded in an expression. Note + that this method is called automatically as needed when constructing + expressions and should not require explicit use. + + """ + if isinstance(against, CompoundSelect): + return self + return FromGrouping(self) + + def union(self, other, **kwargs): + """return a SQL UNION of this select() construct against the given + selectable.""" + + return CompoundSelect._create_union(self, other, **kwargs) + + def union_all(self, other, **kwargs): + """return a SQL UNION ALL of this select() construct against the given + selectable. + + """ + return CompoundSelect._create_union_all(self, other, **kwargs) + + def except_(self, other, **kwargs): + """return a SQL EXCEPT of this select() construct against the given + selectable.""" + + return CompoundSelect._create_except(self, other, **kwargs) + + def except_all(self, other, **kwargs): + """return a SQL EXCEPT ALL of this select() construct against the + given selectable. + + """ + return CompoundSelect._create_except_all(self, other, **kwargs) + + def intersect(self, other, **kwargs): + """return a SQL INTERSECT of this select() construct against the given + selectable. + + """ + return CompoundSelect._create_intersect(self, other, **kwargs) + + def intersect_all(self, other, **kwargs): + """return a SQL INTERSECT ALL of this select() construct against the + given selectable. + + """ + return CompoundSelect._create_intersect_all(self, other, **kwargs) + + def bind(self): + if self._bind: + return self._bind + froms = self._froms + if not froms: + for c in self._raw_columns: + e = c.bind + if e: + self._bind = e + return e + else: + e = list(froms)[0].bind + if e: + self._bind = e + return e + + return None + + def _set_bind(self, bind): + self._bind = bind + bind = property(bind, _set_bind) + + +class ScalarSelect(Generative, Grouping): + _from_objects = [] + _is_from_container = True + + def __init__(self, element): + self.element = element + self.type = element._scalar_type() + + @property + def columns(self): + raise exc.InvalidRequestError('Scalar Select expression has no ' + 'columns; use this object directly ' + 'within a column-level expression.') + c = columns + + @_generative + def where(self, crit): + """Apply a WHERE clause to the SELECT statement referred to + by this :class:`.ScalarSelect`. + + """ + self.element = self.element.where(crit) + + def self_group(self, **kwargs): + return self + + +class Exists(UnaryExpression): + """Represent an ``EXISTS`` clause. + + """ + __visit_name__ = UnaryExpression.__visit_name__ + _from_objects = [] + + def __init__(self, *args, **kwargs): + """Construct a new :class:`.Exists` against an existing + :class:`.Select` object. + + Calling styles are of the following forms:: + + # use on an existing select() + s = select([table.c.col1]).where(table.c.col2==5) + s = exists(s) + + # construct a select() at once + exists(['*'], **select_arguments).where(criterion) + + # columns argument is optional, generates "EXISTS (SELECT *)" + # by default. + exists().where(table.c.col2==5) + + """ + if args and isinstance(args[0], (SelectBase, ScalarSelect)): + s = args[0] + else: + if not args: + args = ([literal_column('*')],) + s = Select(*args, **kwargs).as_scalar().self_group() + + UnaryExpression.__init__(self, s, operator=operators.exists, + type_=type_api.BOOLEANTYPE, + wraps_column_expression=True) + + def select(self, whereclause=None, **params): + return Select([self], whereclause, **params) + + def correlate(self, *fromclause): + e = self._clone() + e.element = self.element.correlate(*fromclause).self_group() + return e + + def correlate_except(self, *fromclause): + e = self._clone() + e.element = self.element.correlate_except(*fromclause).self_group() + return e + + def select_from(self, clause): + """return a new :class:`.Exists` construct, applying the given + expression to the :meth:`.Select.select_from` method of the select + statement contained. + + """ + e = self._clone() + e.element = self.element.select_from(clause).self_group() + return e + + def where(self, clause): + """return a new exists() construct with the given expression added to + its WHERE clause, joined to the existing clause via AND, if any. + + """ + e = self._clone() + e.element = self.element.where(clause).self_group() + return e + + +class TextAsFrom(SelectBase): + """Wrap a :class:`.TextClause` construct within a :class:`.SelectBase` + interface. + + This allows the :class:`.TextClause` object to gain a ``.c`` collection + and other FROM-like capabilities such as :meth:`.FromClause.alias`, + :meth:`.SelectBase.cte`, etc. + + The :class:`.TextAsFrom` construct is produced via the + :meth:`.TextClause.columns` method - see that method for details. + + .. versionadded:: 0.9.0 + + .. seealso:: + + :func:`.text` + + :meth:`.TextClause.columns` + + """ + __visit_name__ = "text_as_from" + + _textual = True + + def __init__(self, text, columns): + self.element = text + self.column_args = columns + + @property + def _bind(self): + return self.element._bind + + @_generative + def bindparams(self, *binds, **bind_as_values): + self.element = self.element.bindparams(*binds, **bind_as_values) + + def _populate_column_collection(self): + for c in self.column_args: + c._make_proxy(self) + + def _copy_internals(self, clone=_clone, **kw): + self._reset_exported() + self.element = clone(self.element, **kw) + + def _scalar_type(self): + return self.column_args[0].type + + +class AnnotatedFromClause(Annotated): + def __init__(self, element, values): + # force FromClause to generate their internal + # collections into __dict__ + element.c + Annotated.__init__(self, element, values) diff --git a/lib/python3.4/site-packages/sqlalchemy/sql/sqltypes.py b/lib/python3.4/site-packages/sqlalchemy/sql/sqltypes.py new file mode 100644 index 0000000..20a9b21 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/sql/sqltypes.py @@ -0,0 +1,1709 @@ +# sql/sqltypes.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""SQL specific types. + +""" + +import datetime as dt +import codecs + +from .type_api import TypeEngine, TypeDecorator, to_instance +from .elements import quoted_name, type_coerce, _defer_name +from .. import exc, util, processors +from .base import _bind_or_error, SchemaEventTarget +from . import operators +from .. import event +from ..util import pickle +import decimal + +if util.jython: + import array + + +class _DateAffinity(object): + + """Mixin date/time specific expression adaptations. + + Rules are implemented within Date,Time,Interval,DateTime, Numeric, + Integer. Based on http://www.postgresql.org/docs/current/static + /functions-datetime.html. + + """ + + @property + def _expression_adaptations(self): + raise NotImplementedError() + + class Comparator(TypeEngine.Comparator): + _blank_dict = util.immutabledict() + + def _adapt_expression(self, op, other_comparator): + othertype = other_comparator.type._type_affinity + return ( + op, to_instance( + self.type._expression_adaptations. + get(op, self._blank_dict). + get(othertype, NULLTYPE)) + ) + comparator_factory = Comparator + + +class Concatenable(object): + + """A mixin that marks a type as supporting 'concatenation', + typically strings.""" + + class Comparator(TypeEngine.Comparator): + + def _adapt_expression(self, op, other_comparator): + if (op is operators.add and + isinstance( + other_comparator, + (Concatenable.Comparator, NullType.Comparator) + )): + return operators.concat_op, self.expr.type + else: + return op, self.expr.type + + comparator_factory = Comparator + + +class String(Concatenable, TypeEngine): + + """The base for all string and character types. + + In SQL, corresponds to VARCHAR. Can also take Python unicode objects + and encode to the database's encoding in bind params (and the reverse for + result sets.) + + The `length` field is usually required when the `String` type is + used within a CREATE TABLE statement, as VARCHAR requires a length + on most databases. + + """ + + __visit_name__ = 'string' + + def __init__(self, length=None, collation=None, + convert_unicode=False, + unicode_error=None, + _warn_on_bytestring=False + ): + """ + Create a string-holding type. + + :param length: optional, a length for the column for use in + DDL and CAST expressions. May be safely omitted if no ``CREATE + TABLE`` will be issued. Certain databases may require a + ``length`` for use in DDL, and will raise an exception when + the ``CREATE TABLE`` DDL is issued if a ``VARCHAR`` + with no length is included. Whether the value is + interpreted as bytes or characters is database specific. + + :param collation: Optional, a column-level collation for + use in DDL and CAST expressions. Renders using the + COLLATE keyword supported by SQLite, MySQL, and Postgresql. + E.g.:: + + >>> from sqlalchemy import cast, select, String + >>> print select([cast('some string', String(collation='utf8'))]) + SELECT CAST(:param_1 AS VARCHAR COLLATE utf8) AS anon_1 + + .. versionadded:: 0.8 Added support for COLLATE to all + string types. + + :param convert_unicode: When set to ``True``, the + :class:`.String` type will assume that + input is to be passed as Python ``unicode`` objects, + and results returned as Python ``unicode`` objects. + If the DBAPI in use does not support Python unicode + (which is fewer and fewer these days), SQLAlchemy + will encode/decode the value, using the + value of the ``encoding`` parameter passed to + :func:`.create_engine` as the encoding. + + When using a DBAPI that natively supports Python + unicode objects, this flag generally does not + need to be set. For columns that are explicitly + intended to store non-ASCII data, the :class:`.Unicode` + or :class:`.UnicodeText` + types should be used regardless, which feature + the same behavior of ``convert_unicode`` but + also indicate an underlying column type that + directly supports unicode, such as ``NVARCHAR``. + + For the extremely rare case that Python ``unicode`` + is to be encoded/decoded by SQLAlchemy on a backend + that does natively support Python ``unicode``, + the value ``force`` can be passed here which will + cause SQLAlchemy's encode/decode services to be + used unconditionally. + + :param unicode_error: Optional, a method to use to handle Unicode + conversion errors. Behaves like the ``errors`` keyword argument to + the standard library's ``string.decode()`` functions. This flag + requires that ``convert_unicode`` is set to ``force`` - otherwise, + SQLAlchemy is not guaranteed to handle the task of unicode + conversion. Note that this flag adds significant performance + overhead to row-fetching operations for backends that already + return unicode objects natively (which most DBAPIs do). This + flag should only be used as a last resort for reading + strings from a column with varied or corrupted encodings. + + """ + if unicode_error is not None and convert_unicode != 'force': + raise exc.ArgumentError("convert_unicode must be 'force' " + "when unicode_error is set.") + + self.length = length + self.collation = collation + self.convert_unicode = convert_unicode + self.unicode_error = unicode_error + self._warn_on_bytestring = _warn_on_bytestring + + def literal_processor(self, dialect): + def process(value): + value = value.replace("'", "''") + return "'%s'" % value + return process + + def bind_processor(self, dialect): + if self.convert_unicode or dialect.convert_unicode: + if dialect.supports_unicode_binds and \ + self.convert_unicode != 'force': + if self._warn_on_bytestring: + def process(value): + if isinstance(value, util.binary_type): + util.warn_limited( + "Unicode type received non-unicode " + "bind param value %r.", + (util.ellipses_string(value),)) + return value + return process + else: + return None + else: + encoder = codecs.getencoder(dialect.encoding) + warn_on_bytestring = self._warn_on_bytestring + + def process(value): + if isinstance(value, util.text_type): + return encoder(value, self.unicode_error)[0] + elif warn_on_bytestring and value is not None: + util.warn_limited( + "Unicode type received non-unicode bind " + "param value %r.", + (util.ellipses_string(value),)) + return value + return process + else: + return None + + def result_processor(self, dialect, coltype): + wants_unicode = self.convert_unicode or dialect.convert_unicode + needs_convert = wants_unicode and \ + (dialect.returns_unicode_strings is not True or + self.convert_unicode in ('force', 'force_nocheck')) + needs_isinstance = ( + needs_convert and + dialect.returns_unicode_strings and + self.convert_unicode != 'force_nocheck' + ) + if needs_convert: + if needs_isinstance: + return processors.to_conditional_unicode_processor_factory( + dialect.encoding, self.unicode_error) + else: + return processors.to_unicode_processor_factory( + dialect.encoding, self.unicode_error) + else: + return None + + @property + def python_type(self): + if self.convert_unicode: + return util.text_type + else: + return str + + def get_dbapi_type(self, dbapi): + return dbapi.STRING + + +class Text(String): + + """A variably sized string type. + + In SQL, usually corresponds to CLOB or TEXT. Can also take Python + unicode objects and encode to the database's encoding in bind + params (and the reverse for result sets.) In general, TEXT objects + do not have a length; while some databases will accept a length + argument here, it will be rejected by others. + + """ + __visit_name__ = 'text' + + +class Unicode(String): + + """A variable length Unicode string type. + + The :class:`.Unicode` type is a :class:`.String` subclass + that assumes input and output as Python ``unicode`` data, + and in that regard is equivalent to the usage of the + ``convert_unicode`` flag with the :class:`.String` type. + However, unlike plain :class:`.String`, it also implies an + underlying column type that is explicitly supporting of non-ASCII + data, such as ``NVARCHAR`` on Oracle and SQL Server. + This can impact the output of ``CREATE TABLE`` statements + and ``CAST`` functions at the dialect level, and can + also affect the handling of bound parameters in some + specific DBAPI scenarios. + + The encoding used by the :class:`.Unicode` type is usually + determined by the DBAPI itself; most modern DBAPIs + feature support for Python ``unicode`` objects as bound + values and result set values, and the encoding should + be configured as detailed in the notes for the target + DBAPI in the :ref:`dialect_toplevel` section. + + For those DBAPIs which do not support, or are not configured + to accommodate Python ``unicode`` objects + directly, SQLAlchemy does the encoding and decoding + outside of the DBAPI. The encoding in this scenario + is determined by the ``encoding`` flag passed to + :func:`.create_engine`. + + When using the :class:`.Unicode` type, it is only appropriate + to pass Python ``unicode`` objects, and not plain ``str``. + If a plain ``str`` is passed under Python 2, a warning + is emitted. If you notice your application emitting these warnings but + you're not sure of the source of them, the Python + ``warnings`` filter, documented at + http://docs.python.org/library/warnings.html, + can be used to turn these warnings into exceptions + which will illustrate a stack trace:: + + import warnings + warnings.simplefilter('error') + + For an application that wishes to pass plain bytestrings + and Python ``unicode`` objects to the ``Unicode`` type + equally, the bytestrings must first be decoded into + unicode. The recipe at :ref:`coerce_to_unicode` illustrates + how this is done. + + See also: + + :class:`.UnicodeText` - unlengthed textual counterpart + to :class:`.Unicode`. + + """ + + __visit_name__ = 'unicode' + + def __init__(self, length=None, **kwargs): + """ + Create a :class:`.Unicode` object. + + Parameters are the same as that of :class:`.String`, + with the exception that ``convert_unicode`` + defaults to ``True``. + + """ + kwargs.setdefault('convert_unicode', True) + kwargs.setdefault('_warn_on_bytestring', True) + super(Unicode, self).__init__(length=length, **kwargs) + + +class UnicodeText(Text): + + """An unbounded-length Unicode string type. + + See :class:`.Unicode` for details on the unicode + behavior of this object. + + Like :class:`.Unicode`, usage the :class:`.UnicodeText` type implies a + unicode-capable type being used on the backend, such as + ``NCLOB``, ``NTEXT``. + + """ + + __visit_name__ = 'unicode_text' + + def __init__(self, length=None, **kwargs): + """ + Create a Unicode-converting Text type. + + Parameters are the same as that of :class:`.Text`, + with the exception that ``convert_unicode`` + defaults to ``True``. + + """ + kwargs.setdefault('convert_unicode', True) + kwargs.setdefault('_warn_on_bytestring', True) + super(UnicodeText, self).__init__(length=length, **kwargs) + + +class Integer(_DateAffinity, TypeEngine): + + """A type for ``int`` integers.""" + + __visit_name__ = 'integer' + + def get_dbapi_type(self, dbapi): + return dbapi.NUMBER + + @property + def python_type(self): + return int + + def literal_processor(self, dialect): + def process(value): + return str(value) + return process + + @util.memoized_property + def _expression_adaptations(self): + # TODO: need a dictionary object that will + # handle operators generically here, this is incomplete + return { + operators.add: { + Date: Date, + Integer: self.__class__, + Numeric: Numeric, + }, + operators.mul: { + Interval: Interval, + Integer: self.__class__, + Numeric: Numeric, + }, + operators.div: { + Integer: self.__class__, + Numeric: Numeric, + }, + operators.truediv: { + Integer: self.__class__, + Numeric: Numeric, + }, + operators.sub: { + Integer: self.__class__, + Numeric: Numeric, + }, + } + + +class SmallInteger(Integer): + + """A type for smaller ``int`` integers. + + Typically generates a ``SMALLINT`` in DDL, and otherwise acts like + a normal :class:`.Integer` on the Python side. + + """ + + __visit_name__ = 'small_integer' + + +class BigInteger(Integer): + + """A type for bigger ``int`` integers. + + Typically generates a ``BIGINT`` in DDL, and otherwise acts like + a normal :class:`.Integer` on the Python side. + + """ + + __visit_name__ = 'big_integer' + + +class Numeric(_DateAffinity, TypeEngine): + + """A type for fixed precision numbers, such as ``NUMERIC`` or ``DECIMAL``. + + This type returns Python ``decimal.Decimal`` objects by default, unless + the :paramref:`.Numeric.asdecimal` flag is set to False, in which case + they are coerced to Python ``float`` objects. + + .. note:: + + The :class:`.Numeric` type is designed to receive data from a database + type that is explicitly known to be a decimal type + (e.g. ``DECIMAL``, ``NUMERIC``, others) and not a floating point + type (e.g. ``FLOAT``, ``REAL``, others). + If the database column on the server is in fact a floating-point type + type, such as ``FLOAT`` or ``REAL``, use the :class:`.Float` + type or a subclass, otherwise numeric coercion between + ``float``/``Decimal`` may or may not function as expected. + + .. note:: + + The Python ``decimal.Decimal`` class is generally slow + performing; cPython 3.3 has now switched to use the `cdecimal + `_ library natively. For + older Python versions, the ``cdecimal`` library can be patched + into any application where it will replace the ``decimal`` + library fully, however this needs to be applied globally and + before any other modules have been imported, as follows:: + + import sys + import cdecimal + sys.modules["decimal"] = cdecimal + + Note that the ``cdecimal`` and ``decimal`` libraries are **not + compatible with each other**, so patching ``cdecimal`` at the + global level is the only way it can be used effectively with + various DBAPIs that hardcode to import the ``decimal`` library. + + """ + + __visit_name__ = 'numeric' + + _default_decimal_return_scale = 10 + + def __init__(self, precision=None, scale=None, + decimal_return_scale=None, asdecimal=True): + """ + Construct a Numeric. + + :param precision: the numeric precision for use in DDL ``CREATE + TABLE``. + + :param scale: the numeric scale for use in DDL ``CREATE TABLE``. + + :param asdecimal: default True. Return whether or not + values should be sent as Python Decimal objects, or + as floats. Different DBAPIs send one or the other based on + datatypes - the Numeric type will ensure that return values + are one or the other across DBAPIs consistently. + + :param decimal_return_scale: Default scale to use when converting + from floats to Python decimals. Floating point values will typically + be much longer due to decimal inaccuracy, and most floating point + database types don't have a notion of "scale", so by default the + float type looks for the first ten decimal places when converting. + Specfiying this value will override that length. Types which + do include an explicit ".scale" value, such as the base + :class:`.Numeric` as well as the MySQL float types, will use the + value of ".scale" as the default for decimal_return_scale, if not + otherwise specified. + + .. versionadded:: 0.9.0 + + When using the ``Numeric`` type, care should be taken to ensure + that the asdecimal setting is apppropriate for the DBAPI in use - + when Numeric applies a conversion from Decimal->float or float-> + Decimal, this conversion incurs an additional performance overhead + for all result columns received. + + DBAPIs that return Decimal natively (e.g. psycopg2) will have + better accuracy and higher performance with a setting of ``True``, + as the native translation to Decimal reduces the amount of floating- + point issues at play, and the Numeric type itself doesn't need + to apply any further conversions. However, another DBAPI which + returns floats natively *will* incur an additional conversion + overhead, and is still subject to floating point data loss - in + which case ``asdecimal=False`` will at least remove the extra + conversion overhead. + + """ + self.precision = precision + self.scale = scale + self.decimal_return_scale = decimal_return_scale + self.asdecimal = asdecimal + + @property + def _effective_decimal_return_scale(self): + if self.decimal_return_scale is not None: + return self.decimal_return_scale + elif getattr(self, "scale", None) is not None: + return self.scale + else: + return self._default_decimal_return_scale + + def get_dbapi_type(self, dbapi): + return dbapi.NUMBER + + def literal_processor(self, dialect): + def process(value): + return str(value) + return process + + @property + def python_type(self): + if self.asdecimal: + return decimal.Decimal + else: + return float + + def bind_processor(self, dialect): + if dialect.supports_native_decimal: + return None + else: + return processors.to_float + + def result_processor(self, dialect, coltype): + if self.asdecimal: + if dialect.supports_native_decimal: + # we're a "numeric", DBAPI will give us Decimal directly + return None + else: + util.warn('Dialect %s+%s does *not* support Decimal ' + 'objects natively, and SQLAlchemy must ' + 'convert from floating point - rounding ' + 'errors and other issues may occur. Please ' + 'consider storing Decimal numbers as strings ' + 'or integers on this platform for lossless ' + 'storage.' % (dialect.name, dialect.driver)) + + # we're a "numeric", DBAPI returns floats, convert. + return processors.to_decimal_processor_factory( + decimal.Decimal, + self.scale if self.scale is not None + else self._default_decimal_return_scale) + else: + if dialect.supports_native_decimal: + return processors.to_float + else: + return None + + @util.memoized_property + def _expression_adaptations(self): + return { + operators.mul: { + Interval: Interval, + Numeric: self.__class__, + Integer: self.__class__, + }, + operators.div: { + Numeric: self.__class__, + Integer: self.__class__, + }, + operators.truediv: { + Numeric: self.__class__, + Integer: self.__class__, + }, + operators.add: { + Numeric: self.__class__, + Integer: self.__class__, + }, + operators.sub: { + Numeric: self.__class__, + Integer: self.__class__, + } + } + + +class Float(Numeric): + + """Type representing floating point types, such as ``FLOAT`` or ``REAL``. + + This type returns Python ``float`` objects by default, unless the + :paramref:`.Float.asdecimal` flag is set to True, in which case they + are coerced to ``decimal.Decimal`` objects. + + .. note:: + + The :class:`.Float` type is designed to receive data from a database + type that is explicitly known to be a floating point type + (e.g. ``FLOAT``, ``REAL``, others) + and not a decimal type (e.g. ``DECIMAL``, ``NUMERIC``, others). + If the database column on the server is in fact a Numeric + type, such as ``DECIMAL`` or ``NUMERIC``, use the :class:`.Numeric` + type or a subclass, otherwise numeric coercion between + ``float``/``Decimal`` may or may not function as expected. + + """ + + __visit_name__ = 'float' + + scale = None + + def __init__(self, precision=None, asdecimal=False, + decimal_return_scale=None, **kwargs): + """ + Construct a Float. + + :param precision: the numeric precision for use in DDL ``CREATE + TABLE``. + + :param asdecimal: the same flag as that of :class:`.Numeric`, but + defaults to ``False``. Note that setting this flag to ``True`` + results in floating point conversion. + + :param decimal_return_scale: Default scale to use when converting + from floats to Python decimals. Floating point values will typically + be much longer due to decimal inaccuracy, and most floating point + database types don't have a notion of "scale", so by default the + float type looks for the first ten decimal places when converting. + Specfiying this value will override that length. Note that the + MySQL float types, which do include "scale", will use "scale" + as the default for decimal_return_scale, if not otherwise specified. + + .. versionadded:: 0.9.0 + + :param \**kwargs: deprecated. Additional arguments here are ignored + by the default :class:`.Float` type. For database specific + floats that support additional arguments, see that dialect's + documentation for details, such as + :class:`sqlalchemy.dialects.mysql.FLOAT`. + + """ + self.precision = precision + self.asdecimal = asdecimal + self.decimal_return_scale = decimal_return_scale + if kwargs: + util.warn_deprecated("Additional keyword arguments " + "passed to Float ignored.") + + def result_processor(self, dialect, coltype): + if self.asdecimal: + return processors.to_decimal_processor_factory( + decimal.Decimal, + self._effective_decimal_return_scale) + else: + return None + + @util.memoized_property + def _expression_adaptations(self): + return { + operators.mul: { + Interval: Interval, + Numeric: self.__class__, + }, + operators.div: { + Numeric: self.__class__, + }, + operators.truediv: { + Numeric: self.__class__, + }, + operators.add: { + Numeric: self.__class__, + }, + operators.sub: { + Numeric: self.__class__, + } + } + + +class DateTime(_DateAffinity, TypeEngine): + + """A type for ``datetime.datetime()`` objects. + + Date and time types return objects from the Python ``datetime`` + module. Most DBAPIs have built in support for the datetime + module, with the noted exception of SQLite. In the case of + SQLite, date and time types are stored as strings which are then + converted back to datetime objects when rows are returned. + + """ + + __visit_name__ = 'datetime' + + def __init__(self, timezone=False): + """Construct a new :class:`.DateTime`. + + :param timezone: boolean. If True, and supported by the + backend, will produce 'TIMESTAMP WITH TIMEZONE'. For backends + that don't support timezone aware timestamps, has no + effect. + + """ + self.timezone = timezone + + def get_dbapi_type(self, dbapi): + return dbapi.DATETIME + + @property + def python_type(self): + return dt.datetime + + @util.memoized_property + def _expression_adaptations(self): + return { + operators.add: { + Interval: self.__class__, + }, + operators.sub: { + Interval: self.__class__, + DateTime: Interval, + }, + } + + +class Date(_DateAffinity, TypeEngine): + + """A type for ``datetime.date()`` objects.""" + + __visit_name__ = 'date' + + def get_dbapi_type(self, dbapi): + return dbapi.DATETIME + + @property + def python_type(self): + return dt.date + + @util.memoized_property + def _expression_adaptations(self): + return { + operators.add: { + Integer: self.__class__, + Interval: DateTime, + Time: DateTime, + }, + operators.sub: { + # date - integer = date + Integer: self.__class__, + + # date - date = integer. + Date: Integer, + + Interval: DateTime, + + # date - datetime = interval, + # this one is not in the PG docs + # but works + DateTime: Interval, + }, + } + + +class Time(_DateAffinity, TypeEngine): + + """A type for ``datetime.time()`` objects.""" + + __visit_name__ = 'time' + + def __init__(self, timezone=False): + self.timezone = timezone + + def get_dbapi_type(self, dbapi): + return dbapi.DATETIME + + @property + def python_type(self): + return dt.time + + @util.memoized_property + def _expression_adaptations(self): + return { + operators.add: { + Date: DateTime, + Interval: self.__class__ + }, + operators.sub: { + Time: Interval, + Interval: self.__class__, + }, + } + + +class _Binary(TypeEngine): + + """Define base behavior for binary types.""" + + def __init__(self, length=None): + self.length = length + + def literal_processor(self, dialect): + def process(value): + value = value.decode(dialect.encoding).replace("'", "''") + return "'%s'" % value + return process + + @property + def python_type(self): + return util.binary_type + + # Python 3 - sqlite3 doesn't need the `Binary` conversion + # here, though pg8000 does to indicate "bytea" + def bind_processor(self, dialect): + if dialect.dbapi is None: + return None + + DBAPIBinary = dialect.dbapi.Binary + + def process(value): + if value is not None: + return DBAPIBinary(value) + else: + return None + return process + + # Python 3 has native bytes() type + # both sqlite3 and pg8000 seem to return it, + # psycopg2 as of 2.5 returns 'memoryview' + if util.py2k: + def result_processor(self, dialect, coltype): + if util.jython: + def process(value): + if value is not None: + if isinstance(value, array.array): + return value.tostring() + return str(value) + else: + return None + else: + process = processors.to_str + return process + else: + def result_processor(self, dialect, coltype): + def process(value): + if value is not None: + value = bytes(value) + return value + return process + + def coerce_compared_value(self, op, value): + """See :meth:`.TypeEngine.coerce_compared_value` for a description.""" + + if isinstance(value, util.string_types): + return self + else: + return super(_Binary, self).coerce_compared_value(op, value) + + def get_dbapi_type(self, dbapi): + return dbapi.BINARY + + +class LargeBinary(_Binary): + + """A type for large binary byte data. + + The :class:`.LargeBinary` type corresponds to a large and/or unlengthed + binary type for the target platform, such as BLOB on MySQL and BYTEA for + Postgresql. It also handles the necessary conversions for the DBAPI. + + """ + + __visit_name__ = 'large_binary' + + def __init__(self, length=None): + """ + Construct a LargeBinary type. + + :param length: optional, a length for the column for use in + DDL statements, for those binary types that accept a length, + such as the MySQL BLOB type. + + """ + _Binary.__init__(self, length=length) + + +class Binary(LargeBinary): + + """Deprecated. Renamed to LargeBinary.""" + + def __init__(self, *arg, **kw): + util.warn_deprecated('The Binary type has been renamed to ' + 'LargeBinary.') + LargeBinary.__init__(self, *arg, **kw) + + +class SchemaType(SchemaEventTarget): + + """Mark a type as possibly requiring schema-level DDL for usage. + + Supports types that must be explicitly created/dropped (i.e. PG ENUM type) + as well as types that are complimented by table or schema level + constraints, triggers, and other rules. + + :class:`.SchemaType` classes can also be targets for the + :meth:`.DDLEvents.before_parent_attach` and + :meth:`.DDLEvents.after_parent_attach` events, where the events fire off + surrounding the association of the type object with a parent + :class:`.Column`. + + .. seealso:: + + :class:`.Enum` + + :class:`.Boolean` + + + """ + + def __init__(self, name=None, schema=None, metadata=None, + inherit_schema=False, quote=None, _create_events=True): + if name is not None: + self.name = quoted_name(name, quote) + else: + self.name = None + self.schema = schema + self.metadata = metadata + self.inherit_schema = inherit_schema + self._create_events = _create_events + + if _create_events and self.metadata: + event.listen( + self.metadata, + "before_create", + util.portable_instancemethod(self._on_metadata_create) + ) + event.listen( + self.metadata, + "after_drop", + util.portable_instancemethod(self._on_metadata_drop) + ) + + def _set_parent(self, column): + column._on_table_attach(util.portable_instancemethod(self._set_table)) + + def _set_table(self, column, table): + if self.inherit_schema: + self.schema = table.schema + + if not self._create_events: + return + + event.listen( + table, + "before_create", + util.portable_instancemethod( + self._on_table_create) + ) + event.listen( + table, + "after_drop", + util.portable_instancemethod(self._on_table_drop) + ) + if self.metadata is None: + # TODO: what's the difference between self.metadata + # and table.metadata here ? + event.listen( + table.metadata, + "before_create", + util.portable_instancemethod(self._on_metadata_create) + ) + event.listen( + table.metadata, + "after_drop", + util.portable_instancemethod(self._on_metadata_drop) + ) + + def copy(self, **kw): + return self.adapt(self.__class__, _create_events=True) + + def adapt(self, impltype, **kw): + schema = kw.pop('schema', self.schema) + metadata = kw.pop('metadata', self.metadata) + _create_events = kw.pop('_create_events', False) + + return impltype(name=self.name, + schema=schema, + inherit_schema=self.inherit_schema, + metadata=metadata, + _create_events=_create_events, + **kw) + + @property + def bind(self): + return self.metadata and self.metadata.bind or None + + def create(self, bind=None, checkfirst=False): + """Issue CREATE ddl for this type, if applicable.""" + + if bind is None: + bind = _bind_or_error(self) + t = self.dialect_impl(bind.dialect) + if t.__class__ is not self.__class__ and isinstance(t, SchemaType): + t.create(bind=bind, checkfirst=checkfirst) + + def drop(self, bind=None, checkfirst=False): + """Issue DROP ddl for this type, if applicable.""" + + if bind is None: + bind = _bind_or_error(self) + t = self.dialect_impl(bind.dialect) + if t.__class__ is not self.__class__ and isinstance(t, SchemaType): + t.drop(bind=bind, checkfirst=checkfirst) + + def _on_table_create(self, target, bind, **kw): + t = self.dialect_impl(bind.dialect) + if t.__class__ is not self.__class__ and isinstance(t, SchemaType): + t._on_table_create(target, bind, **kw) + + def _on_table_drop(self, target, bind, **kw): + t = self.dialect_impl(bind.dialect) + if t.__class__ is not self.__class__ and isinstance(t, SchemaType): + t._on_table_drop(target, bind, **kw) + + def _on_metadata_create(self, target, bind, **kw): + t = self.dialect_impl(bind.dialect) + if t.__class__ is not self.__class__ and isinstance(t, SchemaType): + t._on_metadata_create(target, bind, **kw) + + def _on_metadata_drop(self, target, bind, **kw): + t = self.dialect_impl(bind.dialect) + if t.__class__ is not self.__class__ and isinstance(t, SchemaType): + t._on_metadata_drop(target, bind, **kw) + + +class Enum(String, SchemaType): + + """Generic Enum Type. + + The Enum type provides a set of possible string values which the + column is constrained towards. + + By default, uses the backend's native ENUM type if available, + else uses VARCHAR + a CHECK constraint. + + .. seealso:: + + :class:`~.postgresql.ENUM` - PostgreSQL-specific type, + which has additional functionality. + + """ + + __visit_name__ = 'enum' + + def __init__(self, *enums, **kw): + """Construct an enum. + + Keyword arguments which don't apply to a specific backend are ignored + by that backend. + + :param \*enums: string or unicode enumeration labels. If unicode + labels are present, the `convert_unicode` flag is auto-enabled. + + :param convert_unicode: Enable unicode-aware bind parameter and + result-set processing for this Enum's data. This is set + automatically based on the presence of unicode label strings. + + :param metadata: Associate this type directly with a ``MetaData`` + object. For types that exist on the target database as an + independent schema construct (Postgresql), this type will be + created and dropped within ``create_all()`` and ``drop_all()`` + operations. If the type is not associated with any ``MetaData`` + object, it will associate itself with each ``Table`` in which it is + used, and will be created when any of those individual tables are + created, after a check is performed for its existence. The type is + only dropped when ``drop_all()`` is called for that ``Table`` + object's metadata, however. + + :param name: The name of this type. This is required for Postgresql + and any future supported database which requires an explicitly + named type, or an explicitly named constraint in order to generate + the type and/or a table that uses it. + + :param native_enum: Use the database's native ENUM type when + available. Defaults to True. When False, uses VARCHAR + check + constraint for all backends. + + :param schema: Schema name of this type. For types that exist on the + target database as an independent schema construct (Postgresql), + this parameter specifies the named schema in which the type is + present. + + .. note:: + + The ``schema`` of the :class:`.Enum` type does not + by default make use of the ``schema`` established on the + owning :class:`.Table`. If this behavior is desired, + set the ``inherit_schema`` flag to ``True``. + + :param quote: Set explicit quoting preferences for the type's name. + + :param inherit_schema: When ``True``, the "schema" from the owning + :class:`.Table` will be copied to the "schema" attribute of this + :class:`.Enum`, replacing whatever value was passed for the + ``schema`` attribute. This also takes effect when using the + :meth:`.Table.tometadata` operation. + + .. versionadded:: 0.8 + + """ + self.enums = enums + self.native_enum = kw.pop('native_enum', True) + convert_unicode = kw.pop('convert_unicode', None) + if convert_unicode is None: + for e in enums: + if isinstance(e, util.text_type): + convert_unicode = True + break + else: + convert_unicode = False + + if self.enums: + length = max(len(x) for x in self.enums) + else: + length = 0 + String.__init__(self, + length=length, + convert_unicode=convert_unicode, + ) + SchemaType.__init__(self, **kw) + + def __repr__(self): + return util.generic_repr(self, + additional_kw=[('native_enum', True)], + to_inspect=[Enum, SchemaType], + ) + + def _should_create_constraint(self, compiler): + return not self.native_enum or \ + not compiler.dialect.supports_native_enum + + @util.dependencies("sqlalchemy.sql.schema") + def _set_table(self, schema, column, table): + if self.native_enum: + SchemaType._set_table(self, column, table) + + e = schema.CheckConstraint( + type_coerce(column, self).in_(self.enums), + name=_defer_name(self.name), + _create_rule=util.portable_instancemethod( + self._should_create_constraint), + _type_bound=True + ) + assert e.table is table + + def adapt(self, impltype, **kw): + schema = kw.pop('schema', self.schema) + metadata = kw.pop('metadata', self.metadata) + _create_events = kw.pop('_create_events', False) + if issubclass(impltype, Enum): + return impltype(name=self.name, + schema=schema, + metadata=metadata, + convert_unicode=self.convert_unicode, + native_enum=self.native_enum, + inherit_schema=self.inherit_schema, + _create_events=_create_events, + *self.enums, + **kw) + else: + # TODO: why would we be here? + return super(Enum, self).adapt(impltype, **kw) + + +class PickleType(TypeDecorator): + + """Holds Python objects, which are serialized using pickle. + + PickleType builds upon the Binary type to apply Python's + ``pickle.dumps()`` to incoming objects, and ``pickle.loads()`` on + the way out, allowing any pickleable Python object to be stored as + a serialized binary field. + + To allow ORM change events to propagate for elements associated + with :class:`.PickleType`, see :ref:`mutable_toplevel`. + + """ + + impl = LargeBinary + + def __init__(self, protocol=pickle.HIGHEST_PROTOCOL, + pickler=None, comparator=None): + """ + Construct a PickleType. + + :param protocol: defaults to ``pickle.HIGHEST_PROTOCOL``. + + :param pickler: defaults to cPickle.pickle or pickle.pickle if + cPickle is not available. May be any object with + pickle-compatible ``dumps` and ``loads`` methods. + + :param comparator: a 2-arg callable predicate used + to compare values of this type. If left as ``None``, + the Python "equals" operator is used to compare values. + + """ + self.protocol = protocol + self.pickler = pickler or pickle + self.comparator = comparator + super(PickleType, self).__init__() + + def __reduce__(self): + return PickleType, (self.protocol, + None, + self.comparator) + + def bind_processor(self, dialect): + impl_processor = self.impl.bind_processor(dialect) + dumps = self.pickler.dumps + protocol = self.protocol + if impl_processor: + def process(value): + if value is not None: + value = dumps(value, protocol) + return impl_processor(value) + else: + def process(value): + if value is not None: + value = dumps(value, protocol) + return value + return process + + def result_processor(self, dialect, coltype): + impl_processor = self.impl.result_processor(dialect, coltype) + loads = self.pickler.loads + if impl_processor: + def process(value): + value = impl_processor(value) + if value is None: + return None + return loads(value) + else: + def process(value): + if value is None: + return None + return loads(value) + return process + + def compare_values(self, x, y): + if self.comparator: + return self.comparator(x, y) + else: + return x == y + + +class Boolean(TypeEngine, SchemaType): + + """A bool datatype. + + Boolean typically uses BOOLEAN or SMALLINT on the DDL side, and on + the Python side deals in ``True`` or ``False``. + + """ + + __visit_name__ = 'boolean' + + def __init__( + self, create_constraint=True, name=None, _create_events=True): + """Construct a Boolean. + + :param create_constraint: defaults to True. If the boolean + is generated as an int/smallint, also create a CHECK constraint + on the table that ensures 1 or 0 as a value. + + :param name: if a CHECK constraint is generated, specify + the name of the constraint. + + """ + self.create_constraint = create_constraint + self.name = name + self._create_events = _create_events + + def _should_create_constraint(self, compiler): + return not compiler.dialect.supports_native_boolean + + @util.dependencies("sqlalchemy.sql.schema") + def _set_table(self, schema, column, table): + if not self.create_constraint: + return + + e = schema.CheckConstraint( + type_coerce(column, self).in_([0, 1]), + name=_defer_name(self.name), + _create_rule=util.portable_instancemethod( + self._should_create_constraint), + _type_bound=True + ) + assert e.table is table + + @property + def python_type(self): + return bool + + def literal_processor(self, dialect): + if dialect.supports_native_boolean: + def process(value): + return "true" if value else "false" + else: + def process(value): + return str(1 if value else 0) + return process + + def bind_processor(self, dialect): + if dialect.supports_native_boolean: + return None + else: + return processors.boolean_to_int + + def result_processor(self, dialect, coltype): + if dialect.supports_native_boolean: + return None + else: + return processors.int_to_boolean + + +class Interval(_DateAffinity, TypeDecorator): + + """A type for ``datetime.timedelta()`` objects. + + The Interval type deals with ``datetime.timedelta`` objects. In + PostgreSQL, the native ``INTERVAL`` type is used; for others, the + value is stored as a date which is relative to the "epoch" + (Jan. 1, 1970). + + Note that the ``Interval`` type does not currently provide date arithmetic + operations on platforms which do not support interval types natively. Such + operations usually require transformation of both sides of the expression + (such as, conversion of both sides into integer epoch values first) which + currently is a manual procedure (such as via + :attr:`~sqlalchemy.sql.expression.func`). + + """ + + impl = DateTime + epoch = dt.datetime.utcfromtimestamp(0) + + def __init__(self, native=True, + second_precision=None, + day_precision=None): + """Construct an Interval object. + + :param native: when True, use the actual + INTERVAL type provided by the database, if + supported (currently Postgresql, Oracle). + Otherwise, represent the interval data as + an epoch value regardless. + + :param second_precision: For native interval types + which support a "fractional seconds precision" parameter, + i.e. Oracle and Postgresql + + :param day_precision: for native interval types which + support a "day precision" parameter, i.e. Oracle. + + """ + super(Interval, self).__init__() + self.native = native + self.second_precision = second_precision + self.day_precision = day_precision + + def adapt(self, cls, **kw): + if self.native and hasattr(cls, '_adapt_from_generic_interval'): + return cls._adapt_from_generic_interval(self, **kw) + else: + return self.__class__( + native=self.native, + second_precision=self.second_precision, + day_precision=self.day_precision, + **kw) + + @property + def python_type(self): + return dt.timedelta + + def bind_processor(self, dialect): + impl_processor = self.impl.bind_processor(dialect) + epoch = self.epoch + if impl_processor: + def process(value): + if value is not None: + value = epoch + value + return impl_processor(value) + else: + def process(value): + if value is not None: + value = epoch + value + return value + return process + + def result_processor(self, dialect, coltype): + impl_processor = self.impl.result_processor(dialect, coltype) + epoch = self.epoch + if impl_processor: + def process(value): + value = impl_processor(value) + if value is None: + return None + return value - epoch + else: + def process(value): + if value is None: + return None + return value - epoch + return process + + @util.memoized_property + def _expression_adaptations(self): + return { + operators.add: { + Date: DateTime, + Interval: self.__class__, + DateTime: DateTime, + Time: Time, + }, + operators.sub: { + Interval: self.__class__ + }, + operators.mul: { + Numeric: self.__class__ + }, + operators.truediv: { + Numeric: self.__class__ + }, + operators.div: { + Numeric: self.__class__ + } + } + + @property + def _type_affinity(self): + return Interval + + def coerce_compared_value(self, op, value): + """See :meth:`.TypeEngine.coerce_compared_value` for a description.""" + + return self.impl.coerce_compared_value(op, value) + + +class REAL(Float): + + """The SQL REAL type.""" + + __visit_name__ = 'REAL' + + +class FLOAT(Float): + + """The SQL FLOAT type.""" + + __visit_name__ = 'FLOAT' + + +class NUMERIC(Numeric): + + """The SQL NUMERIC type.""" + + __visit_name__ = 'NUMERIC' + + +class DECIMAL(Numeric): + + """The SQL DECIMAL type.""" + + __visit_name__ = 'DECIMAL' + + +class INTEGER(Integer): + + """The SQL INT or INTEGER type.""" + + __visit_name__ = 'INTEGER' +INT = INTEGER + + +class SMALLINT(SmallInteger): + + """The SQL SMALLINT type.""" + + __visit_name__ = 'SMALLINT' + + +class BIGINT(BigInteger): + + """The SQL BIGINT type.""" + + __visit_name__ = 'BIGINT' + + +class TIMESTAMP(DateTime): + + """The SQL TIMESTAMP type.""" + + __visit_name__ = 'TIMESTAMP' + + def get_dbapi_type(self, dbapi): + return dbapi.TIMESTAMP + + +class DATETIME(DateTime): + + """The SQL DATETIME type.""" + + __visit_name__ = 'DATETIME' + + +class DATE(Date): + + """The SQL DATE type.""" + + __visit_name__ = 'DATE' + + +class TIME(Time): + + """The SQL TIME type.""" + + __visit_name__ = 'TIME' + + +class TEXT(Text): + + """The SQL TEXT type.""" + + __visit_name__ = 'TEXT' + + +class CLOB(Text): + + """The CLOB type. + + This type is found in Oracle and Informix. + """ + + __visit_name__ = 'CLOB' + + +class VARCHAR(String): + + """The SQL VARCHAR type.""" + + __visit_name__ = 'VARCHAR' + + +class NVARCHAR(Unicode): + + """The SQL NVARCHAR type.""" + + __visit_name__ = 'NVARCHAR' + + +class CHAR(String): + + """The SQL CHAR type.""" + + __visit_name__ = 'CHAR' + + +class NCHAR(Unicode): + + """The SQL NCHAR type.""" + + __visit_name__ = 'NCHAR' + + +class BLOB(LargeBinary): + + """The SQL BLOB type.""" + + __visit_name__ = 'BLOB' + + +class BINARY(_Binary): + + """The SQL BINARY type.""" + + __visit_name__ = 'BINARY' + + +class VARBINARY(_Binary): + + """The SQL VARBINARY type.""" + + __visit_name__ = 'VARBINARY' + + +class BOOLEAN(Boolean): + + """The SQL BOOLEAN type.""" + + __visit_name__ = 'BOOLEAN' + + +class NullType(TypeEngine): + + """An unknown type. + + :class:`.NullType` is used as a default type for those cases where + a type cannot be determined, including: + + * During table reflection, when the type of a column is not recognized + by the :class:`.Dialect` + * When constructing SQL expressions using plain Python objects of + unknown types (e.g. ``somecolumn == my_special_object``) + * When a new :class:`.Column` is created, and the given type is passed + as ``None`` or is not passed at all. + + The :class:`.NullType` can be used within SQL expression invocation + without issue, it just has no behavior either at the expression + construction level or at the bind-parameter/result processing level. + :class:`.NullType` will result in a :exc:`.CompileError` if the compiler + is asked to render the type itself, such as if it is used in a + :func:`.cast` operation or within a schema creation operation such as that + invoked by :meth:`.MetaData.create_all` or the :class:`.CreateTable` + construct. + + """ + __visit_name__ = 'null' + + _isnull = True + + def literal_processor(self, dialect): + def process(value): + return "NULL" + return process + + class Comparator(TypeEngine.Comparator): + + def _adapt_expression(self, op, other_comparator): + if isinstance(other_comparator, NullType.Comparator) or \ + not operators.is_commutative(op): + return op, self.expr.type + else: + return other_comparator._adapt_expression(op, self) + comparator_factory = Comparator + + +class MatchType(Boolean): + """Refers to the return type of the MATCH operator. + + As the :meth:`.ColumnOperators.match` is probably the most open-ended + operator in generic SQLAlchemy Core, we can't assume the return type + at SQL evaluation time, as MySQL returns a floating point, not a boolean, + and other backends might do something different. So this type + acts as a placeholder, currently subclassing :class:`.Boolean`. + The type allows dialects to inject result-processing functionality + if needed, and on MySQL will return floating-point values. + + .. versionadded:: 1.0.0 + + """ + +NULLTYPE = NullType() +BOOLEANTYPE = Boolean() +STRINGTYPE = String() +INTEGERTYPE = Integer() +MATCHTYPE = MatchType() + +_type_map = { + int: Integer(), + float: Numeric(), + bool: BOOLEANTYPE, + decimal.Decimal: Numeric(), + dt.date: Date(), + dt.datetime: DateTime(), + dt.time: Time(), + dt.timedelta: Interval(), + util.NoneType: NULLTYPE +} + +if util.py3k: + _type_map[bytes] = LargeBinary() + _type_map[str] = Unicode() +else: + _type_map[unicode] = Unicode() + _type_map[str] = String() + + +# back-assign to type_api +from . import type_api +type_api.BOOLEANTYPE = BOOLEANTYPE +type_api.STRINGTYPE = STRINGTYPE +type_api.INTEGERTYPE = INTEGERTYPE +type_api.NULLTYPE = NULLTYPE +type_api.MATCHTYPE = MATCHTYPE +type_api._type_map = _type_map + +TypeEngine.Comparator.BOOLEANTYPE = BOOLEANTYPE diff --git a/lib/python3.4/site-packages/sqlalchemy/sql/type_api.py b/lib/python3.4/site-packages/sqlalchemy/sql/type_api.py new file mode 100644 index 0000000..2997a8d --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/sql/type_api.py @@ -0,0 +1,1186 @@ +# sql/types_api.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Base types API. + +""" + + +from .. import exc, util +from . import operators +from .visitors import Visitable, VisitableType + +# these are back-assigned by sqltypes. +BOOLEANTYPE = None +INTEGERTYPE = None +NULLTYPE = None +STRINGTYPE = None +MATCHTYPE = None + + +class TypeEngine(Visitable): + """The ultimate base class for all SQL datatypes. + + Common subclasses of :class:`.TypeEngine` include + :class:`.String`, :class:`.Integer`, and :class:`.Boolean`. + + For an overview of the SQLAlchemy typing system, see + :ref:`types_toplevel`. + + .. seealso:: + + :ref:`types_toplevel` + + """ + + _sqla_type = True + _isnull = False + + class Comparator(operators.ColumnOperators): + """Base class for custom comparison operations defined at the + type level. See :attr:`.TypeEngine.comparator_factory`. + + + """ + __slots__ = 'expr', 'type' + + default_comparator = None + + def __init__(self, expr): + self.expr = expr + self.type = expr.type + + @util.dependencies('sqlalchemy.sql.default_comparator') + def operate(self, default_comparator, op, *other, **kwargs): + o = default_comparator.operator_lookup[op.__name__] + return o[0](self.expr, op, *(other + o[1:]), **kwargs) + + @util.dependencies('sqlalchemy.sql.default_comparator') + def reverse_operate(self, default_comparator, op, other, **kwargs): + o = default_comparator.operator_lookup[op.__name__] + return o[0](self.expr, op, other, + reverse=True, *o[1:], **kwargs) + + def _adapt_expression(self, op, other_comparator): + """evaluate the return type of , + and apply any adaptations to the given operator. + + This method determines the type of a resulting binary expression + given two source types and an operator. For example, two + :class:`.Column` objects, both of the type :class:`.Integer`, will + produce a :class:`.BinaryExpression` that also has the type + :class:`.Integer` when compared via the addition (``+``) operator. + However, using the addition operator with an :class:`.Integer` + and a :class:`.Date` object will produce a :class:`.Date`, assuming + "days delta" behavior by the database (in reality, most databases + other than Postgresql don't accept this particular operation). + + The method returns a tuple of the form , . + The resulting operator and type will be those applied to the + resulting :class:`.BinaryExpression` as the final operator and the + right-hand side of the expression. + + Note that only a subset of operators make usage of + :meth:`._adapt_expression`, + including math operators and user-defined operators, but not + boolean comparison or special SQL keywords like MATCH or BETWEEN. + + """ + return op, other_comparator.type + + def __reduce__(self): + return _reconstitute_comparator, (self.expr, ) + + hashable = True + """Flag, if False, means values from this type aren't hashable. + + Used by the ORM when uniquing result lists. + + """ + + comparator_factory = Comparator + """A :class:`.TypeEngine.Comparator` class which will apply + to operations performed by owning :class:`.ColumnElement` objects. + + The :attr:`.comparator_factory` attribute is a hook consulted by + the core expression system when column and SQL expression operations + are performed. When a :class:`.TypeEngine.Comparator` class is + associated with this attribute, it allows custom re-definition of + all existing operators, as well as definition of new operators. + Existing operators include those provided by Python operator overloading + such as :meth:`.operators.ColumnOperators.__add__` and + :meth:`.operators.ColumnOperators.__eq__`, + those provided as standard + attributes of :class:`.operators.ColumnOperators` such as + :meth:`.operators.ColumnOperators.like` + and :meth:`.operators.ColumnOperators.in_`. + + Rudimentary usage of this hook is allowed through simple subclassing + of existing types, or alternatively by using :class:`.TypeDecorator`. + See the documentation section :ref:`types_operators` for examples. + + .. versionadded:: 0.8 The expression system was enhanced to support + customization of operators on a per-type level. + + """ + + def compare_against_backend(self, dialect, conn_type): + """Compare this type against the given backend type. + + This function is currently not implemented for SQLAlchemy + types, and for all built in types will return ``None``. However, + it can be implemented by a user-defined type + where it can be consumed by schema comparison tools such as + Alembic autogenerate. + + A future release of SQLAlchemy will potentially impement this method + for builtin types as well. + + The function should return True if this type is equivalent to the + given type; the type is typically reflected from the database + so should be database specific. The dialect in use is also + passed. It can also return False to assert that the type is + not equivalent. + + :param dialect: a :class:`.Dialect` that is involved in the comparison. + + :param conn_type: the type object reflected from the backend. + + .. versionadded:: 1.0.3 + + """ + return None + + def copy_value(self, value): + return value + + def literal_processor(self, dialect): + """Return a conversion function for processing literal values that are + to be rendered directly without using binds. + + This function is used when the compiler makes use of the + "literal_binds" flag, typically used in DDL generation as well + as in certain scenarios where backends don't accept bound parameters. + + .. versionadded:: 0.9.0 + + """ + return None + + def bind_processor(self, dialect): + """Return a conversion function for processing bind values. + + Returns a callable which will receive a bind parameter value + as the sole positional argument and will return a value to + send to the DB-API. + + If processing is not necessary, the method should return ``None``. + + :param dialect: Dialect instance in use. + + """ + return None + + def result_processor(self, dialect, coltype): + """Return a conversion function for processing result row values. + + Returns a callable which will receive a result row column + value as the sole positional argument and will return a value + to return to the user. + + If processing is not necessary, the method should return ``None``. + + :param dialect: Dialect instance in use. + + :param coltype: DBAPI coltype argument received in cursor.description. + + """ + return None + + def column_expression(self, colexpr): + """Given a SELECT column expression, return a wrapping SQL expression. + + This is typically a SQL function that wraps a column expression + as rendered in the columns clause of a SELECT statement. + It is used for special data types that require + columns to be wrapped in some special database function in order + to coerce the value before being sent back to the application. + It is the SQL analogue of the :meth:`.TypeEngine.result_processor` + method. + + The method is evaluated at statement compile time, as opposed + to statement construction time. + + See also: + + :ref:`types_sql_value_processing` + + """ + + return None + + @util.memoized_property + def _has_column_expression(self): + """memoized boolean, check if column_expression is implemented. + + Allows the method to be skipped for the vast majority of expression + types that don't use this feature. + + """ + + return self.__class__.column_expression.__code__ \ + is not TypeEngine.column_expression.__code__ + + def bind_expression(self, bindvalue): + """"Given a bind value (i.e. a :class:`.BindParameter` instance), + return a SQL expression in its place. + + This is typically a SQL function that wraps the existing bound + parameter within the statement. It is used for special data types + that require literals being wrapped in some special database function + in order to coerce an application-level value into a database-specific + format. It is the SQL analogue of the + :meth:`.TypeEngine.bind_processor` method. + + The method is evaluated at statement compile time, as opposed + to statement construction time. + + Note that this method, when implemented, should always return + the exact same structure, without any conditional logic, as it + may be used in an executemany() call against an arbitrary number + of bound parameter sets. + + See also: + + :ref:`types_sql_value_processing` + + """ + return None + + @util.memoized_property + def _has_bind_expression(self): + """memoized boolean, check if bind_expression is implemented. + + Allows the method to be skipped for the vast majority of expression + types that don't use this feature. + + """ + + return self.__class__.bind_expression.__code__ \ + is not TypeEngine.bind_expression.__code__ + + def compare_values(self, x, y): + """Compare two values for equality.""" + + return x == y + + def get_dbapi_type(self, dbapi): + """Return the corresponding type object from the underlying DB-API, if + any. + + This can be useful for calling ``setinputsizes()``, for example. + + """ + return None + + @property + def python_type(self): + """Return the Python type object expected to be returned + by instances of this type, if known. + + Basically, for those types which enforce a return type, + or are known across the board to do such for all common + DBAPIs (like ``int`` for example), will return that type. + + If a return type is not defined, raises + ``NotImplementedError``. + + Note that any type also accommodates NULL in SQL which + means you can also get back ``None`` from any type + in practice. + + """ + raise NotImplementedError() + + def with_variant(self, type_, dialect_name): + """Produce a new type object that will utilize the given + type when applied to the dialect of the given name. + + e.g.:: + + from sqlalchemy.types import String + from sqlalchemy.dialects import mysql + + s = String() + + s = s.with_variant(mysql.VARCHAR(collation='foo'), 'mysql') + + The construction of :meth:`.TypeEngine.with_variant` is always + from the "fallback" type to that which is dialect specific. + The returned type is an instance of :class:`.Variant`, which + itself provides a :meth:`.Variant.with_variant` + that can be called repeatedly. + + :param type_: a :class:`.TypeEngine` that will be selected + as a variant from the originating type, when a dialect + of the given name is in use. + :param dialect_name: base name of the dialect which uses + this type. (i.e. ``'postgresql'``, ``'mysql'``, etc.) + + .. versionadded:: 0.7.2 + + """ + return Variant(self, {dialect_name: to_instance(type_)}) + + @util.memoized_property + def _type_affinity(self): + """Return a rudimental 'affinity' value expressing the general class + of type.""" + + typ = None + for t in self.__class__.__mro__: + if t in (TypeEngine, UserDefinedType): + return typ + elif issubclass(t, (TypeEngine, UserDefinedType)): + typ = t + else: + return self.__class__ + + def dialect_impl(self, dialect): + """Return a dialect-specific implementation for this + :class:`.TypeEngine`. + + """ + try: + return dialect._type_memos[self]['impl'] + except KeyError: + return self._dialect_info(dialect)['impl'] + + def _cached_literal_processor(self, dialect): + """Return a dialect-specific literal processor for this type.""" + try: + return dialect._type_memos[self]['literal'] + except KeyError: + d = self._dialect_info(dialect) + d['literal'] = lp = d['impl'].literal_processor(dialect) + return lp + + def _cached_bind_processor(self, dialect): + """Return a dialect-specific bind processor for this type.""" + + try: + return dialect._type_memos[self]['bind'] + except KeyError: + d = self._dialect_info(dialect) + d['bind'] = bp = d['impl'].bind_processor(dialect) + return bp + + def _cached_result_processor(self, dialect, coltype): + """Return a dialect-specific result processor for this type.""" + + try: + return dialect._type_memos[self][coltype] + except KeyError: + d = self._dialect_info(dialect) + # key assumption: DBAPI type codes are + # constants. Else this dictionary would + # grow unbounded. + d[coltype] = rp = d['impl'].result_processor(dialect, coltype) + return rp + + def _dialect_info(self, dialect): + """Return a dialect-specific registry which + caches a dialect-specific implementation, bind processing + function, and one or more result processing functions.""" + + if self in dialect._type_memos: + return dialect._type_memos[self] + else: + impl = self._gen_dialect_impl(dialect) + if impl is self: + impl = self.adapt(type(self)) + # this can't be self, else we create a cycle + assert impl is not self + dialect._type_memos[self] = d = {'impl': impl} + return d + + def _gen_dialect_impl(self, dialect): + return dialect.type_descriptor(self) + + def adapt(self, cls, **kw): + """Produce an "adapted" form of this type, given an "impl" class + to work with. + + This method is used internally to associate generic + types with "implementation" types that are specific to a particular + dialect. + """ + return util.constructor_copy(self, cls, **kw) + + def coerce_compared_value(self, op, value): + """Suggest a type for a 'coerced' Python value in an expression. + + Given an operator and value, gives the type a chance + to return a type which the value should be coerced into. + + The default behavior here is conservative; if the right-hand + side is already coerced into a SQL type based on its + Python type, it is usually left alone. + + End-user functionality extension here should generally be via + :class:`.TypeDecorator`, which provides more liberal behavior in that + it defaults to coercing the other side of the expression into this + type, thus applying special Python conversions above and beyond those + needed by the DBAPI to both ides. It also provides the public method + :meth:`.TypeDecorator.coerce_compared_value` which is intended for + end-user customization of this behavior. + + """ + _coerced_type = _type_map.get(type(value), NULLTYPE) + if _coerced_type is NULLTYPE or _coerced_type._type_affinity \ + is self._type_affinity: + return self + else: + return _coerced_type + + def _compare_type_affinity(self, other): + return self._type_affinity is other._type_affinity + + def compile(self, dialect=None): + """Produce a string-compiled form of this :class:`.TypeEngine`. + + When called with no arguments, uses a "default" dialect + to produce a string result. + + :param dialect: a :class:`.Dialect` instance. + + """ + # arg, return value is inconsistent with + # ClauseElement.compile()....this is a mistake. + + if not dialect: + dialect = self._default_dialect() + + return dialect.type_compiler.process(self) + + @util.dependencies("sqlalchemy.engine.default") + def _default_dialect(self, default): + if self.__class__.__module__.startswith("sqlalchemy.dialects"): + tokens = self.__class__.__module__.split(".")[0:3] + mod = ".".join(tokens) + return getattr(__import__(mod).dialects, tokens[-1]).dialect() + else: + return default.DefaultDialect() + + def __str__(self): + if util.py2k: + return unicode(self.compile()).\ + encode('ascii', 'backslashreplace') + else: + return str(self.compile()) + + def __repr__(self): + return util.generic_repr(self) + + +class VisitableCheckKWArg(util.EnsureKWArgType, VisitableType): + pass + + +class UserDefinedType(util.with_metaclass(VisitableCheckKWArg, TypeEngine)): + """Base for user defined types. + + This should be the base of new types. Note that + for most cases, :class:`.TypeDecorator` is probably + more appropriate:: + + import sqlalchemy.types as types + + class MyType(types.UserDefinedType): + def __init__(self, precision = 8): + self.precision = precision + + def get_col_spec(self, **kw): + return "MYTYPE(%s)" % self.precision + + def bind_processor(self, dialect): + def process(value): + return value + return process + + def result_processor(self, dialect, coltype): + def process(value): + return value + return process + + Once the type is made, it's immediately usable:: + + table = Table('foo', meta, + Column('id', Integer, primary_key=True), + Column('data', MyType(16)) + ) + + The ``get_col_spec()`` method will in most cases receive a keyword + argument ``type_expression`` which refers to the owning expression + of the type as being compiled, such as a :class:`.Column` or + :func:`.cast` construct. This keyword is only sent if the method + accepts keyword arguments (e.g. ``**kw``) in its argument signature; + introspection is used to check for this in order to support legacy + forms of this function. + + .. versionadded:: 1.0.0 the owning expression is passed to + the ``get_col_spec()`` method via the keyword argument + ``type_expression``, if it receives ``**kw`` in its signature. + + """ + __visit_name__ = "user_defined" + + ensure_kwarg = 'get_col_spec' + + class Comparator(TypeEngine.Comparator): + __slots__ = () + + def _adapt_expression(self, op, other_comparator): + if hasattr(self.type, 'adapt_operator'): + util.warn_deprecated( + "UserDefinedType.adapt_operator is deprecated. Create " + "a UserDefinedType.Comparator subclass instead which " + "generates the desired expression constructs, given a " + "particular operator." + ) + return self.type.adapt_operator(op), self.type + else: + return op, self.type + + comparator_factory = Comparator + + def coerce_compared_value(self, op, value): + """Suggest a type for a 'coerced' Python value in an expression. + + Default behavior for :class:`.UserDefinedType` is the + same as that of :class:`.TypeDecorator`; by default it returns + ``self``, assuming the compared value should be coerced into + the same type as this one. See + :meth:`.TypeDecorator.coerce_compared_value` for more detail. + + .. versionchanged:: 0.8 :meth:`.UserDefinedType.coerce_compared_value` + now returns ``self`` by default, rather than falling onto the + more fundamental behavior of + :meth:`.TypeEngine.coerce_compared_value`. + + """ + + return self + + +class TypeDecorator(TypeEngine): + """Allows the creation of types which add additional functionality + to an existing type. + + This method is preferred to direct subclassing of SQLAlchemy's + built-in types as it ensures that all required functionality of + the underlying type is kept in place. + + Typical usage:: + + import sqlalchemy.types as types + + class MyType(types.TypeDecorator): + '''Prefixes Unicode values with "PREFIX:" on the way in and + strips it off on the way out. + ''' + + impl = types.Unicode + + def process_bind_param(self, value, dialect): + return "PREFIX:" + value + + def process_result_value(self, value, dialect): + return value[7:] + + def copy(self): + return MyType(self.impl.length) + + The class-level "impl" attribute is required, and can reference any + TypeEngine class. Alternatively, the load_dialect_impl() method + can be used to provide different type classes based on the dialect + given; in this case, the "impl" variable can reference + ``TypeEngine`` as a placeholder. + + Types that receive a Python type that isn't similar to the ultimate type + used may want to define the :meth:`TypeDecorator.coerce_compared_value` + method. This is used to give the expression system a hint when coercing + Python objects into bind parameters within expressions. Consider this + expression:: + + mytable.c.somecol + datetime.date(2009, 5, 15) + + Above, if "somecol" is an ``Integer`` variant, it makes sense that + we're doing date arithmetic, where above is usually interpreted + by databases as adding a number of days to the given date. + The expression system does the right thing by not attempting to + coerce the "date()" value into an integer-oriented bind parameter. + + However, in the case of ``TypeDecorator``, we are usually changing an + incoming Python type to something new - ``TypeDecorator`` by default will + "coerce" the non-typed side to be the same type as itself. Such as below, + we define an "epoch" type that stores a date value as an integer:: + + class MyEpochType(types.TypeDecorator): + impl = types.Integer + + epoch = datetime.date(1970, 1, 1) + + def process_bind_param(self, value, dialect): + return (value - self.epoch).days + + def process_result_value(self, value, dialect): + return self.epoch + timedelta(days=value) + + Our expression of ``somecol + date`` with the above type will coerce the + "date" on the right side to also be treated as ``MyEpochType``. + + This behavior can be overridden via the + :meth:`~TypeDecorator.coerce_compared_value` method, which returns a type + that should be used for the value of the expression. Below we set it such + that an integer value will be treated as an ``Integer``, and any other + value is assumed to be a date and will be treated as a ``MyEpochType``:: + + def coerce_compared_value(self, op, value): + if isinstance(value, int): + return Integer() + else: + return self + + .. warning:: + + Note that the **behavior of coerce_compared_value is not inherited + by default from that of the base type**. + If the :class:`.TypeDecorator` is augmenting a + type that requires special logic for certain types of operators, + this method **must** be overridden. A key example is when decorating + the :class:`.postgresql.JSON` and :class:`.postgresql.JSONB` types; + the default rules of :meth:`.TypeEngine.coerce_compared_value` should + be used in order to deal with operators like index operations:: + + class MyJsonType(TypeDecorator): + impl = postgresql.JSON + + def coerce_compared_value(self, op, value): + return self.impl.coerce_compared_value(op, value) + + Without the above step, index operations such as ``mycol['foo']`` + will cause the index value ``'foo'`` to be JSON encoded. + + """ + + __visit_name__ = "type_decorator" + + def __init__(self, *args, **kwargs): + """Construct a :class:`.TypeDecorator`. + + Arguments sent here are passed to the constructor + of the class assigned to the ``impl`` class level attribute, + assuming the ``impl`` is a callable, and the resulting + object is assigned to the ``self.impl`` instance attribute + (thus overriding the class attribute of the same name). + + If the class level ``impl`` is not a callable (the unusual case), + it will be assigned to the same instance attribute 'as-is', + ignoring those arguments passed to the constructor. + + Subclasses can override this to customize the generation + of ``self.impl`` entirely. + + """ + + if not hasattr(self.__class__, 'impl'): + raise AssertionError("TypeDecorator implementations " + "require a class-level variable " + "'impl' which refers to the class of " + "type being decorated") + self.impl = to_instance(self.__class__.impl, *args, **kwargs) + + coerce_to_is_types = (util.NoneType, ) + """Specify those Python types which should be coerced at the expression + level to "IS " when compared using ``==`` (and same for + ``IS NOT`` in conjunction with ``!=``. + + For most SQLAlchemy types, this includes ``NoneType``, as well as + ``bool``. + + :class:`.TypeDecorator` modifies this list to only include ``NoneType``, + as typedecorator implementations that deal with boolean types are common. + + Custom :class:`.TypeDecorator` classes can override this attribute to + return an empty tuple, in which case no values will be coerced to + constants. + + ..versionadded:: 0.8.2 + Added :attr:`.TypeDecorator.coerce_to_is_types` to allow for easier + control of ``__eq__()`` ``__ne__()`` operations. + + """ + + class Comparator(TypeEngine.Comparator): + __slots__ = () + + def operate(self, op, *other, **kwargs): + kwargs['_python_is_types'] = self.expr.type.coerce_to_is_types + return super(TypeDecorator.Comparator, self).operate( + op, *other, **kwargs) + + def reverse_operate(self, op, other, **kwargs): + kwargs['_python_is_types'] = self.expr.type.coerce_to_is_types + return super(TypeDecorator.Comparator, self).reverse_operate( + op, other, **kwargs) + + @property + def comparator_factory(self): + if TypeDecorator.Comparator in self.impl.comparator_factory.__mro__: + return self.impl.comparator_factory + else: + return type("TDComparator", + (TypeDecorator.Comparator, + self.impl.comparator_factory), + {}) + + def _gen_dialect_impl(self, dialect): + """ + #todo + """ + adapted = dialect.type_descriptor(self) + if adapted is not self: + return adapted + + # otherwise adapt the impl type, link + # to a copy of this TypeDecorator and return + # that. + typedesc = self.load_dialect_impl(dialect).dialect_impl(dialect) + tt = self.copy() + if not isinstance(tt, self.__class__): + raise AssertionError('Type object %s does not properly ' + 'implement the copy() method, it must ' + 'return an object of type %s' % + (self, self.__class__)) + tt.impl = typedesc + return tt + + @property + def _type_affinity(self): + """ + #todo + """ + return self.impl._type_affinity + + def type_engine(self, dialect): + """Return a dialect-specific :class:`.TypeEngine` instance + for this :class:`.TypeDecorator`. + + In most cases this returns a dialect-adapted form of + the :class:`.TypeEngine` type represented by ``self.impl``. + Makes usage of :meth:`dialect_impl` but also traverses + into wrapped :class:`.TypeDecorator` instances. + Behavior can be customized here by overriding + :meth:`load_dialect_impl`. + + """ + adapted = dialect.type_descriptor(self) + if not isinstance(adapted, type(self)): + return adapted + elif isinstance(self.impl, TypeDecorator): + return self.impl.type_engine(dialect) + else: + return self.load_dialect_impl(dialect) + + def load_dialect_impl(self, dialect): + """Return a :class:`.TypeEngine` object corresponding to a dialect. + + This is an end-user override hook that can be used to provide + differing types depending on the given dialect. It is used + by the :class:`.TypeDecorator` implementation of :meth:`type_engine` + to help determine what type should ultimately be returned + for a given :class:`.TypeDecorator`. + + By default returns ``self.impl``. + + """ + return self.impl + + def __getattr__(self, key): + """Proxy all other undefined accessors to the underlying + implementation.""" + return getattr(self.impl, key) + + def process_literal_param(self, value, dialect): + """Receive a literal parameter value to be rendered inline within + a statement. + + This method is used when the compiler renders a + literal value without using binds, typically within DDL + such as in the "server default" of a column or an expression + within a CHECK constraint. + + The returned string will be rendered into the output string. + + .. versionadded:: 0.9.0 + + """ + raise NotImplementedError() + + def process_bind_param(self, value, dialect): + """Receive a bound parameter value to be converted. + + Subclasses override this method to return the + value that should be passed along to the underlying + :class:`.TypeEngine` object, and from there to the + DBAPI ``execute()`` method. + + The operation could be anything desired to perform custom + behavior, such as transforming or serializing data. + This could also be used as a hook for validating logic. + + This operation should be designed with the reverse operation + in mind, which would be the process_result_value method of + this class. + + :param value: Data to operate upon, of any type expected by + this method in the subclass. Can be ``None``. + :param dialect: the :class:`.Dialect` in use. + + """ + + raise NotImplementedError() + + def process_result_value(self, value, dialect): + """Receive a result-row column value to be converted. + + Subclasses should implement this method to operate on data + fetched from the database. + + Subclasses override this method to return the + value that should be passed back to the application, + given a value that is already processed by + the underlying :class:`.TypeEngine` object, originally + from the DBAPI cursor method ``fetchone()`` or similar. + + The operation could be anything desired to perform custom + behavior, such as transforming or serializing data. + This could also be used as a hook for validating logic. + + :param value: Data to operate upon, of any type expected by + this method in the subclass. Can be ``None``. + :param dialect: the :class:`.Dialect` in use. + + This operation should be designed to be reversible by + the "process_bind_param" method of this class. + + """ + + raise NotImplementedError() + + @util.memoized_property + def _has_bind_processor(self): + """memoized boolean, check if process_bind_param is implemented. + + Allows the base process_bind_param to raise + NotImplementedError without needing to test an expensive + exception throw. + + """ + + return self.__class__.process_bind_param.__code__ \ + is not TypeDecorator.process_bind_param.__code__ + + @util.memoized_property + def _has_literal_processor(self): + """memoized boolean, check if process_literal_param is implemented. + + + """ + + return self.__class__.process_literal_param.__code__ \ + is not TypeDecorator.process_literal_param.__code__ + + def literal_processor(self, dialect): + """Provide a literal processing function for the given + :class:`.Dialect`. + + Subclasses here will typically override + :meth:`.TypeDecorator.process_literal_param` instead of this method + directly. + + By default, this method makes use of + :meth:`.TypeDecorator.process_bind_param` if that method is + implemented, where :meth:`.TypeDecorator.process_literal_param` is + not. The rationale here is that :class:`.TypeDecorator` typically + deals with Python conversions of data that are above the layer of + database presentation. With the value converted by + :meth:`.TypeDecorator.process_bind_param`, the underlying type will + then handle whether it needs to be presented to the DBAPI as a bound + parameter or to the database as an inline SQL value. + + .. versionadded:: 0.9.0 + + """ + if self._has_literal_processor: + process_param = self.process_literal_param + elif self._has_bind_processor: + # the bind processor should normally be OK + # for TypeDecorator since it isn't doing DB-level + # handling, the handling here won't be different for bound vs. + # literals. + process_param = self.process_bind_param + else: + process_param = None + + if process_param: + impl_processor = self.impl.literal_processor(dialect) + if impl_processor: + def process(value): + return impl_processor(process_param(value, dialect)) + else: + def process(value): + return process_param(value, dialect) + + return process + else: + return self.impl.literal_processor(dialect) + + def bind_processor(self, dialect): + """Provide a bound value processing function for the + given :class:`.Dialect`. + + This is the method that fulfills the :class:`.TypeEngine` + contract for bound value conversion. :class:`.TypeDecorator` + will wrap a user-defined implementation of + :meth:`process_bind_param` here. + + User-defined code can override this method directly, + though its likely best to use :meth:`process_bind_param` so that + the processing provided by ``self.impl`` is maintained. + + :param dialect: Dialect instance in use. + + This method is the reverse counterpart to the + :meth:`result_processor` method of this class. + + """ + if self._has_bind_processor: + process_param = self.process_bind_param + impl_processor = self.impl.bind_processor(dialect) + if impl_processor: + def process(value): + return impl_processor(process_param(value, dialect)) + + else: + def process(value): + return process_param(value, dialect) + + return process + else: + return self.impl.bind_processor(dialect) + + @util.memoized_property + def _has_result_processor(self): + """memoized boolean, check if process_result_value is implemented. + + Allows the base process_result_value to raise + NotImplementedError without needing to test an expensive + exception throw. + + """ + return self.__class__.process_result_value.__code__ \ + is not TypeDecorator.process_result_value.__code__ + + def result_processor(self, dialect, coltype): + """Provide a result value processing function for the given + :class:`.Dialect`. + + This is the method that fulfills the :class:`.TypeEngine` + contract for result value conversion. :class:`.TypeDecorator` + will wrap a user-defined implementation of + :meth:`process_result_value` here. + + User-defined code can override this method directly, + though its likely best to use :meth:`process_result_value` so that + the processing provided by ``self.impl`` is maintained. + + :param dialect: Dialect instance in use. + :param coltype: An SQLAlchemy data type + + This method is the reverse counterpart to the + :meth:`bind_processor` method of this class. + + """ + if self._has_result_processor: + process_value = self.process_result_value + impl_processor = self.impl.result_processor(dialect, + coltype) + if impl_processor: + def process(value): + return process_value(impl_processor(value), dialect) + + else: + def process(value): + return process_value(value, dialect) + + return process + else: + return self.impl.result_processor(dialect, coltype) + + def coerce_compared_value(self, op, value): + """Suggest a type for a 'coerced' Python value in an expression. + + By default, returns self. This method is called by + the expression system when an object using this type is + on the left or right side of an expression against a plain Python + object which does not yet have a SQLAlchemy type assigned:: + + expr = table.c.somecolumn + 35 + + Where above, if ``somecolumn`` uses this type, this method will + be called with the value ``operator.add`` + and ``35``. The return value is whatever SQLAlchemy type should + be used for ``35`` for this particular operation. + + """ + return self + + def copy(self): + """Produce a copy of this :class:`.TypeDecorator` instance. + + This is a shallow copy and is provided to fulfill part of + the :class:`.TypeEngine` contract. It usually does not + need to be overridden unless the user-defined :class:`.TypeDecorator` + has local state that should be deep-copied. + + """ + + instance = self.__class__.__new__(self.__class__) + instance.__dict__.update(self.__dict__) + return instance + + def get_dbapi_type(self, dbapi): + """Return the DBAPI type object represented by this + :class:`.TypeDecorator`. + + By default this calls upon :meth:`.TypeEngine.get_dbapi_type` of the + underlying "impl". + """ + return self.impl.get_dbapi_type(dbapi) + + def compare_values(self, x, y): + """Given two values, compare them for equality. + + By default this calls upon :meth:`.TypeEngine.compare_values` + of the underlying "impl", which in turn usually + uses the Python equals operator ``==``. + + This function is used by the ORM to compare + an original-loaded value with an intercepted + "changed" value, to determine if a net change + has occurred. + + """ + return self.impl.compare_values(x, y) + + def __repr__(self): + return util.generic_repr(self, to_inspect=self.impl) + + +class Variant(TypeDecorator): + """A wrapping type that selects among a variety of + implementations based on dialect in use. + + The :class:`.Variant` type is typically constructed + using the :meth:`.TypeEngine.with_variant` method. + + .. versionadded:: 0.7.2 + + .. seealso:: :meth:`.TypeEngine.with_variant` for an example of use. + + """ + + def __init__(self, base, mapping): + """Construct a new :class:`.Variant`. + + :param base: the base 'fallback' type + :param mapping: dictionary of string dialect names to + :class:`.TypeEngine` instances. + + """ + self.impl = base + self.mapping = mapping + + def load_dialect_impl(self, dialect): + if dialect.name in self.mapping: + return self.mapping[dialect.name] + else: + return self.impl + + def with_variant(self, type_, dialect_name): + """Return a new :class:`.Variant` which adds the given + type + dialect name to the mapping, in addition to the + mapping present in this :class:`.Variant`. + + :param type_: a :class:`.TypeEngine` that will be selected + as a variant from the originating type, when a dialect + of the given name is in use. + :param dialect_name: base name of the dialect which uses + this type. (i.e. ``'postgresql'``, ``'mysql'``, etc.) + + """ + + if dialect_name in self.mapping: + raise exc.ArgumentError( + "Dialect '%s' is already present in " + "the mapping for this Variant" % dialect_name) + mapping = self.mapping.copy() + mapping[dialect_name] = type_ + return Variant(self.impl, mapping) + + @property + def comparator_factory(self): + """express comparison behavior in terms of the base type""" + return self.impl.comparator_factory + + +def _reconstitute_comparator(expression): + return expression.comparator + + +def to_instance(typeobj, *arg, **kw): + if typeobj is None: + return NULLTYPE + + if util.callable(typeobj): + return typeobj(*arg, **kw) + else: + return typeobj + + +def adapt_type(typeobj, colspecs): + if isinstance(typeobj, type): + typeobj = typeobj() + for t in typeobj.__class__.__mro__[0:-1]: + try: + impltype = colspecs[t] + break + except KeyError: + pass + else: + # couldn't adapt - so just return the type itself + # (it may be a user-defined type) + return typeobj + # if we adapted the given generic type to a database-specific type, + # but it turns out the originally given "generic" type + # is actually a subclass of our resulting type, then we were already + # given a more specific type than that required; so use that. + if (issubclass(typeobj.__class__, impltype)): + return typeobj + return typeobj.adapt(impltype) diff --git a/lib/python3.4/site-packages/sqlalchemy/sql/util.py b/lib/python3.4/site-packages/sqlalchemy/sql/util.py new file mode 100644 index 0000000..16a1421 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/sql/util.py @@ -0,0 +1,612 @@ +# sql/util.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""High level utilities which build upon other modules here. + +""" + +from .. import exc, util +from .base import _from_objects, ColumnSet +from . import operators, visitors +from itertools import chain +from collections import deque + +from .elements import BindParameter, ColumnClause, ColumnElement, \ + Null, UnaryExpression, literal_column, Label, _label_reference, \ + _textual_label_reference +from .selectable import ScalarSelect, Join, FromClause, FromGrouping +from .schema import Column + +join_condition = util.langhelpers.public_factory( + Join._join_condition, + ".sql.util.join_condition") + +# names that are still being imported from the outside +from .annotation import _shallow_annotate, _deep_annotate, _deep_deannotate +from .elements import _find_columns +from .ddl import sort_tables + + +def find_join_source(clauses, join_to): + """Given a list of FROM clauses and a selectable, + return the first index and element from the list of + clauses which can be joined against the selectable. returns + None, None if no match is found. + + e.g.:: + + clause1 = table1.join(table2) + clause2 = table4.join(table5) + + join_to = table2.join(table3) + + find_join_source([clause1, clause2], join_to) == clause1 + + """ + + selectables = list(_from_objects(join_to)) + for i, f in enumerate(clauses): + for s in selectables: + if f.is_derived_from(s): + return i, f + else: + return None, None + + +def visit_binary_product(fn, expr): + """Produce a traversal of the given expression, delivering + column comparisons to the given function. + + The function is of the form:: + + def my_fn(binary, left, right) + + For each binary expression located which has a + comparison operator, the product of "left" and + "right" will be delivered to that function, + in terms of that binary. + + Hence an expression like:: + + and_( + (a + b) == q + func.sum(e + f), + j == r + ) + + would have the traversal:: + + a q + a e + a f + b q + b e + b f + j r + + That is, every combination of "left" and + "right" that doesn't further contain + a binary comparison is passed as pairs. + + """ + stack = [] + + def visit(element): + if isinstance(element, ScalarSelect): + # we don't want to dig into correlated subqueries, + # those are just column elements by themselves + yield element + elif element.__visit_name__ == 'binary' and \ + operators.is_comparison(element.operator): + stack.insert(0, element) + for l in visit(element.left): + for r in visit(element.right): + fn(stack[0], l, r) + stack.pop(0) + for elem in element.get_children(): + visit(elem) + else: + if isinstance(element, ColumnClause): + yield element + for elem in element.get_children(): + for e in visit(elem): + yield e + list(visit(expr)) + + +def find_tables(clause, check_columns=False, + include_aliases=False, include_joins=False, + include_selects=False, include_crud=False): + """locate Table objects within the given expression.""" + + tables = [] + _visitors = {} + + if include_selects: + _visitors['select'] = _visitors['compound_select'] = tables.append + + if include_joins: + _visitors['join'] = tables.append + + if include_aliases: + _visitors['alias'] = tables.append + + if include_crud: + _visitors['insert'] = _visitors['update'] = \ + _visitors['delete'] = lambda ent: tables.append(ent.table) + + if check_columns: + def visit_column(column): + tables.append(column.table) + _visitors['column'] = visit_column + + _visitors['table'] = tables.append + + visitors.traverse(clause, {'column_collections': False}, _visitors) + return tables + + +def unwrap_order_by(clause): + """Break up an 'order by' expression into individual column-expressions, + without DESC/ASC/NULLS FIRST/NULLS LAST""" + + cols = util.column_set() + stack = deque([clause]) + while stack: + t = stack.popleft() + if isinstance(t, ColumnElement) and \ + ( + not isinstance(t, UnaryExpression) or + not operators.is_ordering_modifier(t.modifier) + ): + if isinstance(t, _label_reference): + t = t.element + if isinstance(t, (_textual_label_reference)): + continue + cols.add(t) + else: + for c in t.get_children(): + stack.append(c) + return cols + + +def clause_is_present(clause, search): + """Given a target clause and a second to search within, return True + if the target is plainly present in the search without any + subqueries or aliases involved. + + Basically descends through Joins. + + """ + + for elem in surface_selectables(search): + if clause == elem: # use == here so that Annotated's compare + return True + else: + return False + + +def surface_selectables(clause): + stack = [clause] + while stack: + elem = stack.pop() + yield elem + if isinstance(elem, Join): + stack.extend((elem.left, elem.right)) + elif isinstance(elem, FromGrouping): + stack.append(elem.element) + + +def selectables_overlap(left, right): + """Return True if left/right have some overlapping selectable""" + + return bool( + set(surface_selectables(left)).intersection( + surface_selectables(right) + ) + ) + + +def bind_values(clause): + """Return an ordered list of "bound" values in the given clause. + + E.g.:: + + >>> expr = and_( + ... table.c.foo==5, table.c.foo==7 + ... ) + >>> bind_values(expr) + [5, 7] + """ + + v = [] + + def visit_bindparam(bind): + v.append(bind.effective_value) + + visitors.traverse(clause, {}, {'bindparam': visit_bindparam}) + return v + + +def _quote_ddl_expr(element): + if isinstance(element, util.string_types): + element = element.replace("'", "''") + return "'%s'" % element + else: + return repr(element) + + +class _repr_params(object): + """A string view of bound parameters, truncating + display to the given number of 'multi' parameter sets. + + """ + + def __init__(self, params, batches): + self.params = params + self.batches = batches + + def __repr__(self): + if isinstance(self.params, (list, tuple)) and \ + len(self.params) > self.batches and \ + isinstance(self.params[0], (list, dict, tuple)): + msg = " ... displaying %i of %i total bound parameter sets ... " + return ' '.join(( + repr(self.params[:self.batches - 2])[0:-1], + msg % (self.batches, len(self.params)), + repr(self.params[-2:])[1:] + )) + else: + return repr(self.params) + + +def adapt_criterion_to_null(crit, nulls): + """given criterion containing bind params, convert selected elements + to IS NULL. + + """ + + def visit_binary(binary): + if isinstance(binary.left, BindParameter) \ + and binary.left._identifying_key in nulls: + # reverse order if the NULL is on the left side + binary.left = binary.right + binary.right = Null() + binary.operator = operators.is_ + binary.negate = operators.isnot + elif isinstance(binary.right, BindParameter) \ + and binary.right._identifying_key in nulls: + binary.right = Null() + binary.operator = operators.is_ + binary.negate = operators.isnot + + return visitors.cloned_traverse(crit, {}, {'binary': visit_binary}) + + +def splice_joins(left, right, stop_on=None): + if left is None: + return right + + stack = [(right, None)] + + adapter = ClauseAdapter(left) + ret = None + while stack: + (right, prevright) = stack.pop() + if isinstance(right, Join) and right is not stop_on: + right = right._clone() + right._reset_exported() + right.onclause = adapter.traverse(right.onclause) + stack.append((right.left, right)) + else: + right = adapter.traverse(right) + if prevright is not None: + prevright.left = right + if ret is None: + ret = right + + return ret + + +def reduce_columns(columns, *clauses, **kw): + """given a list of columns, return a 'reduced' set based on natural + equivalents. + + the set is reduced to the smallest list of columns which have no natural + equivalent present in the list. A "natural equivalent" means that two + columns will ultimately represent the same value because they are related + by a foreign key. + + \*clauses is an optional list of join clauses which will be traversed + to further identify columns that are "equivalent". + + \**kw may specify 'ignore_nonexistent_tables' to ignore foreign keys + whose tables are not yet configured, or columns that aren't yet present. + + This function is primarily used to determine the most minimal "primary + key" from a selectable, by reducing the set of primary key columns present + in the selectable to just those that are not repeated. + + """ + ignore_nonexistent_tables = kw.pop('ignore_nonexistent_tables', False) + only_synonyms = kw.pop('only_synonyms', False) + + columns = util.ordered_column_set(columns) + + omit = util.column_set() + for col in columns: + for fk in chain(*[c.foreign_keys for c in col.proxy_set]): + for c in columns: + if c is col: + continue + try: + fk_col = fk.column + except exc.NoReferencedColumnError: + # TODO: add specific coverage here + # to test/sql/test_selectable ReduceTest + if ignore_nonexistent_tables: + continue + else: + raise + except exc.NoReferencedTableError: + # TODO: add specific coverage here + # to test/sql/test_selectable ReduceTest + if ignore_nonexistent_tables: + continue + else: + raise + if fk_col.shares_lineage(c) and \ + (not only_synonyms or + c.name == col.name): + omit.add(col) + break + + if clauses: + def visit_binary(binary): + if binary.operator == operators.eq: + cols = util.column_set( + chain(*[c.proxy_set for c in columns.difference(omit)])) + if binary.left in cols and binary.right in cols: + for c in reversed(columns): + if c.shares_lineage(binary.right) and \ + (not only_synonyms or + c.name == binary.left.name): + omit.add(c) + break + for clause in clauses: + if clause is not None: + visitors.traverse(clause, {}, {'binary': visit_binary}) + + return ColumnSet(columns.difference(omit)) + + +def criterion_as_pairs(expression, consider_as_foreign_keys=None, + consider_as_referenced_keys=None, any_operator=False): + """traverse an expression and locate binary criterion pairs.""" + + if consider_as_foreign_keys and consider_as_referenced_keys: + raise exc.ArgumentError("Can only specify one of " + "'consider_as_foreign_keys' or " + "'consider_as_referenced_keys'") + + def col_is(a, b): + # return a is b + return a.compare(b) + + def visit_binary(binary): + if not any_operator and binary.operator is not operators.eq: + return + if not isinstance(binary.left, ColumnElement) or \ + not isinstance(binary.right, ColumnElement): + return + + if consider_as_foreign_keys: + if binary.left in consider_as_foreign_keys and \ + (col_is(binary.right, binary.left) or + binary.right not in consider_as_foreign_keys): + pairs.append((binary.right, binary.left)) + elif binary.right in consider_as_foreign_keys and \ + (col_is(binary.left, binary.right) or + binary.left not in consider_as_foreign_keys): + pairs.append((binary.left, binary.right)) + elif consider_as_referenced_keys: + if binary.left in consider_as_referenced_keys and \ + (col_is(binary.right, binary.left) or + binary.right not in consider_as_referenced_keys): + pairs.append((binary.left, binary.right)) + elif binary.right in consider_as_referenced_keys and \ + (col_is(binary.left, binary.right) or + binary.left not in consider_as_referenced_keys): + pairs.append((binary.right, binary.left)) + else: + if isinstance(binary.left, Column) and \ + isinstance(binary.right, Column): + if binary.left.references(binary.right): + pairs.append((binary.right, binary.left)) + elif binary.right.references(binary.left): + pairs.append((binary.left, binary.right)) + pairs = [] + visitors.traverse(expression, {}, {'binary': visit_binary}) + return pairs + + +class ClauseAdapter(visitors.ReplacingCloningVisitor): + """Clones and modifies clauses based on column correspondence. + + E.g.:: + + table1 = Table('sometable', metadata, + Column('col1', Integer), + Column('col2', Integer) + ) + table2 = Table('someothertable', metadata, + Column('col1', Integer), + Column('col2', Integer) + ) + + condition = table1.c.col1 == table2.c.col1 + + make an alias of table1:: + + s = table1.alias('foo') + + calling ``ClauseAdapter(s).traverse(condition)`` converts + condition to read:: + + s.c.col1 == table2.c.col1 + + """ + + def __init__(self, selectable, equivalents=None, + include_fn=None, exclude_fn=None, + adapt_on_names=False, anonymize_labels=False): + self.__traverse_options__ = { + 'stop_on': [selectable], + 'anonymize_labels': anonymize_labels} + self.selectable = selectable + self.include_fn = include_fn + self.exclude_fn = exclude_fn + self.equivalents = util.column_dict(equivalents or {}) + self.adapt_on_names = adapt_on_names + + def _corresponding_column(self, col, require_embedded, + _seen=util.EMPTY_SET): + newcol = self.selectable.corresponding_column( + col, + require_embedded=require_embedded) + if newcol is None and col in self.equivalents and col not in _seen: + for equiv in self.equivalents[col]: + newcol = self._corresponding_column( + equiv, require_embedded=require_embedded, + _seen=_seen.union([col])) + if newcol is not None: + return newcol + if self.adapt_on_names and newcol is None: + newcol = self.selectable.c.get(col.name) + return newcol + + def replace(self, col): + if isinstance(col, FromClause) and \ + self.selectable.is_derived_from(col): + return self.selectable + elif not isinstance(col, ColumnElement): + return None + elif self.include_fn and not self.include_fn(col): + return None + elif self.exclude_fn and self.exclude_fn(col): + return None + else: + return self._corresponding_column(col, True) + + +class ColumnAdapter(ClauseAdapter): + """Extends ClauseAdapter with extra utility functions. + + Key aspects of ColumnAdapter include: + + * Expressions that are adapted are stored in a persistent + .columns collection; so that an expression E adapted into + an expression E1, will return the same object E1 when adapted + a second time. This is important in particular for things like + Label objects that are anonymized, so that the ColumnAdapter can + be used to present a consistent "adapted" view of things. + + * Exclusion of items from the persistent collection based on + include/exclude rules, but also independent of hash identity. + This because "annotated" items all have the same hash identity as their + parent. + + * "wrapping" capability is added, so that the replacement of an expression + E can proceed through a series of adapters. This differs from the + visitor's "chaining" feature in that the resulting object is passed + through all replacing functions unconditionally, rather than stopping + at the first one that returns non-None. + + * An adapt_required option, used by eager loading to indicate that + We don't trust a result row column that is not translated. + This is to prevent a column from being interpreted as that + of the child row in a self-referential scenario, see + inheritance/test_basic.py->EagerTargetingTest.test_adapt_stringency + + """ + + def __init__(self, selectable, equivalents=None, + chain_to=None, adapt_required=False, + include_fn=None, exclude_fn=None, + adapt_on_names=False, + allow_label_resolve=True, + anonymize_labels=False): + ClauseAdapter.__init__(self, selectable, equivalents, + include_fn=include_fn, exclude_fn=exclude_fn, + adapt_on_names=adapt_on_names, + anonymize_labels=anonymize_labels) + + if chain_to: + self.chain(chain_to) + self.columns = util.populate_column_dict(self._locate_col) + if self.include_fn or self.exclude_fn: + self.columns = self._IncludeExcludeMapping(self, self.columns) + self.adapt_required = adapt_required + self.allow_label_resolve = allow_label_resolve + self._wrap = None + + class _IncludeExcludeMapping(object): + def __init__(self, parent, columns): + self.parent = parent + self.columns = columns + + def __getitem__(self, key): + if ( + self.parent.include_fn and not self.parent.include_fn(key) + ) or ( + self.parent.exclude_fn and self.parent.exclude_fn(key) + ): + if self.parent._wrap: + return self.parent._wrap.columns[key] + else: + return key + return self.columns[key] + + def wrap(self, adapter): + ac = self.__class__.__new__(self.__class__) + ac.__dict__.update(self.__dict__) + ac._wrap = adapter + ac.columns = util.populate_column_dict(ac._locate_col) + if ac.include_fn or ac.exclude_fn: + ac.columns = self._IncludeExcludeMapping(ac, ac.columns) + + return ac + + def traverse(self, obj): + return self.columns[obj] + + adapt_clause = traverse + adapt_list = ClauseAdapter.copy_and_process + + def _locate_col(self, col): + + c = ClauseAdapter.traverse(self, col) + + if self._wrap: + c2 = self._wrap._locate_col(c) + if c2 is not None: + c = c2 + + if self.adapt_required and c is col: + return None + + c._allow_label_resolve = self.allow_label_resolve + + return c + + def __getstate__(self): + d = self.__dict__.copy() + del d['columns'] + return d + + def __setstate__(self, state): + self.__dict__.update(state) + self.columns = util.PopulateDict(self._locate_col) diff --git a/lib/python3.4/site-packages/sqlalchemy/sql/visitors.py b/lib/python3.4/site-packages/sqlalchemy/sql/visitors.py new file mode 100644 index 0000000..d12213e --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/sql/visitors.py @@ -0,0 +1,328 @@ +# sql/visitors.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Visitor/traversal interface and library functions. + +SQLAlchemy schema and expression constructs rely on a Python-centric +version of the classic "visitor" pattern as the primary way in which +they apply functionality. The most common use of this pattern +is statement compilation, where individual expression classes match +up to rendering methods that produce a string result. Beyond this, +the visitor system is also used to inspect expressions for various +information and patterns, as well as for usage in +some kinds of expression transformation. Other kinds of transformation +use a non-visitor traversal system. + +For many examples of how the visit system is used, see the +sqlalchemy.sql.util and the sqlalchemy.sql.compiler modules. +For an introduction to clause adaption, see +http://techspot.zzzeek.org/2008/01/23/expression-transformations/ + +""" + +from collections import deque +from .. import util +import operator +from .. import exc + +__all__ = ['VisitableType', 'Visitable', 'ClauseVisitor', + 'CloningVisitor', 'ReplacingCloningVisitor', 'iterate', + 'iterate_depthfirst', 'traverse_using', 'traverse', + 'traverse_depthfirst', + 'cloned_traverse', 'replacement_traverse'] + + +class VisitableType(type): + """Metaclass which assigns a `_compiler_dispatch` method to classes + having a `__visit_name__` attribute. + + The _compiler_dispatch attribute becomes an instance method which + looks approximately like the following:: + + def _compiler_dispatch (self, visitor, **kw): + '''Look for an attribute named "visit_" + self.__visit_name__ + on the visitor, and call it with the same kw params.''' + visit_attr = 'visit_%s' % self.__visit_name__ + return getattr(visitor, visit_attr)(self, **kw) + + Classes having no __visit_name__ attribute will remain unaffected. + """ + + def __init__(cls, clsname, bases, clsdict): + if clsname != 'Visitable' and \ + hasattr(cls, '__visit_name__'): + _generate_dispatch(cls) + + super(VisitableType, cls).__init__(clsname, bases, clsdict) + + +def _generate_dispatch(cls): + """Return an optimized visit dispatch function for the cls + for use by the compiler. + """ + if '__visit_name__' in cls.__dict__: + visit_name = cls.__visit_name__ + if isinstance(visit_name, str): + # There is an optimization opportunity here because the + # the string name of the class's __visit_name__ is known at + # this early stage (import time) so it can be pre-constructed. + getter = operator.attrgetter("visit_%s" % visit_name) + + def _compiler_dispatch(self, visitor, **kw): + try: + meth = getter(visitor) + except AttributeError: + raise exc.UnsupportedCompilationError(visitor, cls) + else: + return meth(self, **kw) + else: + # The optimization opportunity is lost for this case because the + # __visit_name__ is not yet a string. As a result, the visit + # string has to be recalculated with each compilation. + def _compiler_dispatch(self, visitor, **kw): + visit_attr = 'visit_%s' % self.__visit_name__ + try: + meth = getattr(visitor, visit_attr) + except AttributeError: + raise exc.UnsupportedCompilationError(visitor, cls) + else: + return meth(self, **kw) + + _compiler_dispatch.__doc__ = \ + """Look for an attribute named "visit_" + self.__visit_name__ + on the visitor, and call it with the same kw params. + """ + cls._compiler_dispatch = _compiler_dispatch + + +class Visitable(util.with_metaclass(VisitableType, object)): + """Base class for visitable objects, applies the + ``VisitableType`` metaclass. + + """ + + +class ClauseVisitor(object): + """Base class for visitor objects which can traverse using + the traverse() function. + + """ + + __traverse_options__ = {} + + def traverse_single(self, obj, **kw): + for v in self._visitor_iterator: + meth = getattr(v, "visit_%s" % obj.__visit_name__, None) + if meth: + return meth(obj, **kw) + + def iterate(self, obj): + """traverse the given expression structure, returning an iterator + of all elements. + + """ + return iterate(obj, self.__traverse_options__) + + def traverse(self, obj): + """traverse and visit the given expression structure.""" + + return traverse(obj, self.__traverse_options__, self._visitor_dict) + + @util.memoized_property + def _visitor_dict(self): + visitors = {} + + for name in dir(self): + if name.startswith('visit_'): + visitors[name[6:]] = getattr(self, name) + return visitors + + @property + def _visitor_iterator(self): + """iterate through this visitor and each 'chained' visitor.""" + + v = self + while v: + yield v + v = getattr(v, '_next', None) + + def chain(self, visitor): + """'chain' an additional ClauseVisitor onto this ClauseVisitor. + + the chained visitor will receive all visit events after this one. + + """ + tail = list(self._visitor_iterator)[-1] + tail._next = visitor + return self + + +class CloningVisitor(ClauseVisitor): + """Base class for visitor objects which can traverse using + the cloned_traverse() function. + + """ + + def copy_and_process(self, list_): + """Apply cloned traversal to the given list of elements, and return + the new list. + + """ + return [self.traverse(x) for x in list_] + + def traverse(self, obj): + """traverse and visit the given expression structure.""" + + return cloned_traverse( + obj, self.__traverse_options__, self._visitor_dict) + + +class ReplacingCloningVisitor(CloningVisitor): + """Base class for visitor objects which can traverse using + the replacement_traverse() function. + + """ + + def replace(self, elem): + """receive pre-copied elements during a cloning traversal. + + If the method returns a new element, the element is used + instead of creating a simple copy of the element. Traversal + will halt on the newly returned element if it is re-encountered. + """ + return None + + def traverse(self, obj): + """traverse and visit the given expression structure.""" + + def replace(elem): + for v in self._visitor_iterator: + e = v.replace(elem) + if e is not None: + return e + return replacement_traverse(obj, self.__traverse_options__, replace) + + +def iterate(obj, opts): + """traverse the given expression structure, returning an iterator. + + traversal is configured to be breadth-first. + + """ + # fasttrack for atomic elements like columns + children = obj.get_children(**opts) + if not children: + return [obj] + + traversal = deque() + stack = deque([obj]) + while stack: + t = stack.popleft() + traversal.append(t) + for c in t.get_children(**opts): + stack.append(c) + return iter(traversal) + + +def iterate_depthfirst(obj, opts): + """traverse the given expression structure, returning an iterator. + + traversal is configured to be depth-first. + + """ + # fasttrack for atomic elements like columns + children = obj.get_children(**opts) + if not children: + return [obj] + + stack = deque([obj]) + traversal = deque() + while stack: + t = stack.pop() + traversal.appendleft(t) + for c in t.get_children(**opts): + stack.append(c) + return iter(traversal) + + +def traverse_using(iterator, obj, visitors): + """visit the given expression structure using the given iterator of + objects. + + """ + for target in iterator: + meth = visitors.get(target.__visit_name__, None) + if meth: + meth(target) + return obj + + +def traverse(obj, opts, visitors): + """traverse and visit the given expression structure using the default + iterator. + + """ + return traverse_using(iterate(obj, opts), obj, visitors) + + +def traverse_depthfirst(obj, opts, visitors): + """traverse and visit the given expression structure using the + depth-first iterator. + + """ + return traverse_using(iterate_depthfirst(obj, opts), obj, visitors) + + +def cloned_traverse(obj, opts, visitors): + """clone the given expression structure, allowing + modifications by visitors.""" + + cloned = {} + stop_on = set(opts.get('stop_on', [])) + + def clone(elem): + if elem in stop_on: + return elem + else: + if id(elem) not in cloned: + cloned[id(elem)] = newelem = elem._clone() + newelem._copy_internals(clone=clone) + meth = visitors.get(newelem.__visit_name__, None) + if meth: + meth(newelem) + return cloned[id(elem)] + + if obj is not None: + obj = clone(obj) + return obj + + +def replacement_traverse(obj, opts, replace): + """clone the given expression structure, allowing element + replacement by a given replacement function.""" + + cloned = {} + stop_on = set([id(x) for x in opts.get('stop_on', [])]) + + def clone(elem, **kw): + if id(elem) in stop_on or \ + 'no_replacement_traverse' in elem._annotations: + return elem + else: + newelem = replace(elem) + if newelem is not None: + stop_on.add(id(newelem)) + return newelem + else: + if elem not in cloned: + cloned[elem] = newelem = elem._clone() + newelem._copy_internals(clone=clone, **kw) + return cloned[elem] + + if obj is not None: + obj = clone(obj, **opts) + return obj diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/__init__.py b/lib/python3.4/site-packages/sqlalchemy/testing/__init__.py new file mode 100644 index 0000000..4e02227 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/__init__.py @@ -0,0 +1,36 @@ +# testing/__init__.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + + +from .warnings import assert_warnings + +from . import config + +from .exclusions import db_spec, _is_excluded, fails_if, skip_if, future,\ + fails_on, fails_on_everything_except, skip, only_on, exclude, \ + against as _against, _server_version, only_if, fails + + +def against(*queries): + return _against(config._current, *queries) + +from .assertions import emits_warning, emits_warning_on, uses_deprecated, \ + eq_, ne_, le_, is_, is_not_, startswith_, assert_raises, \ + assert_raises_message, AssertsCompiledSQL, ComparesTables, \ + AssertsExecutionResults, expect_deprecated, expect_warnings, \ + in_, not_in_ + +from .util import run_as_contextmanager, rowset, fail, \ + provide_metadata, adict, force_drop_names, \ + teardown_events + +crashes = skip + +from .config import db +from .config import requirements as requires + +from . import mock diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/assertions.py b/lib/python3.4/site-packages/sqlalchemy/testing/assertions.py new file mode 100644 index 0000000..492adcd --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/assertions.py @@ -0,0 +1,491 @@ +# testing/assertions.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from __future__ import absolute_import + +from . import util as testutil +from sqlalchemy import pool, orm, util +from sqlalchemy.engine import default, url +from sqlalchemy.util import decorator +from sqlalchemy import types as sqltypes, schema, exc as sa_exc +import warnings +import re +from .exclusions import db_spec, _is_excluded +from . import assertsql +from . import config +from .util import fail +import contextlib +from . import mock + + +def expect_warnings(*messages, **kw): + """Context manager which expects one or more warnings. + + With no arguments, squelches all SAWarnings emitted via + sqlalchemy.util.warn and sqlalchemy.util.warn_limited. Otherwise + pass string expressions that will match selected warnings via regex; + all non-matching warnings are sent through. + + The expect version **asserts** that the warnings were in fact seen. + + Note that the test suite sets SAWarning warnings to raise exceptions. + + """ + return _expect_warnings(sa_exc.SAWarning, messages, **kw) + + +@contextlib.contextmanager +def expect_warnings_on(db, *messages, **kw): + """Context manager which expects one or more warnings on specific + dialects. + + The expect version **asserts** that the warnings were in fact seen. + + """ + spec = db_spec(db) + + if isinstance(db, util.string_types) and not spec(config._current): + yield + else: + with expect_warnings(*messages, **kw): + yield + + +def emits_warning(*messages): + """Decorator form of expect_warnings(). + + Note that emits_warning does **not** assert that the warnings + were in fact seen. + + """ + + @decorator + def decorate(fn, *args, **kw): + with expect_warnings(assert_=False, *messages): + return fn(*args, **kw) + + return decorate + + +def expect_deprecated(*messages, **kw): + return _expect_warnings(sa_exc.SADeprecationWarning, messages, **kw) + + +def emits_warning_on(db, *messages): + """Mark a test as emitting a warning on a specific dialect. + + With no arguments, squelches all SAWarning failures. Or pass one or more + strings; these will be matched to the root of the warning description by + warnings.filterwarnings(). + + Note that emits_warning_on does **not** assert that the warnings + were in fact seen. + + """ + @decorator + def decorate(fn, *args, **kw): + with expect_warnings_on(db, assert_=False, *messages): + return fn(*args, **kw) + + return decorate + + +def uses_deprecated(*messages): + """Mark a test as immune from fatal deprecation warnings. + + With no arguments, squelches all SADeprecationWarning failures. + Or pass one or more strings; these will be matched to the root + of the warning description by warnings.filterwarnings(). + + As a special case, you may pass a function name prefixed with // + and it will be re-written as needed to match the standard warning + verbiage emitted by the sqlalchemy.util.deprecated decorator. + + Note that uses_deprecated does **not** assert that the warnings + were in fact seen. + + """ + + @decorator + def decorate(fn, *args, **kw): + with expect_deprecated(*messages, assert_=False): + return fn(*args, **kw) + return decorate + + +@contextlib.contextmanager +def _expect_warnings(exc_cls, messages, regex=True, assert_=True): + + if regex: + filters = [re.compile(msg, re.I | re.S) for msg in messages] + else: + filters = messages + + seen = set(filters) + + real_warn = warnings.warn + + def our_warn(msg, exception, *arg, **kw): + if not issubclass(exception, exc_cls): + return real_warn(msg, exception, *arg, **kw) + + if not filters: + return + + for filter_ in filters: + if (regex and filter_.match(msg)) or \ + (not regex and filter_ == msg): + seen.discard(filter_) + break + else: + real_warn(msg, exception, *arg, **kw) + + with mock.patch("warnings.warn", our_warn): + yield + + if assert_: + assert not seen, "Warnings were not seen: %s" % \ + ", ".join("%r" % (s.pattern if regex else s) for s in seen) + + +def global_cleanup_assertions(): + """Check things that have to be finalized at the end of a test suite. + + Hardcoded at the moment, a modular system can be built here + to support things like PG prepared transactions, tables all + dropped, etc. + + """ + _assert_no_stray_pool_connections() + +_STRAY_CONNECTION_FAILURES = 0 + + +def _assert_no_stray_pool_connections(): + global _STRAY_CONNECTION_FAILURES + + # lazy gc on cPython means "do nothing." pool connections + # shouldn't be in cycles, should go away. + testutil.lazy_gc() + + # however, once in awhile, on an EC2 machine usually, + # there's a ref in there. usually just one. + if pool._refs: + + # OK, let's be somewhat forgiving. + _STRAY_CONNECTION_FAILURES += 1 + + print("Encountered a stray connection in test cleanup: %s" + % str(pool._refs)) + # then do a real GC sweep. We shouldn't even be here + # so a single sweep should really be doing it, otherwise + # there's probably a real unreachable cycle somewhere. + testutil.gc_collect() + + # if we've already had two of these occurrences, or + # after a hard gc sweep we still have pool._refs?! + # now we have to raise. + if pool._refs: + err = str(pool._refs) + + # but clean out the pool refs collection directly, + # reset the counter, + # so the error doesn't at least keep happening. + pool._refs.clear() + _STRAY_CONNECTION_FAILURES = 0 + assert False, "Stray connection refused to leave "\ + "after gc.collect(): %s" % err + elif _STRAY_CONNECTION_FAILURES > 10: + assert False, "Encountered more than 10 stray connections" + _STRAY_CONNECTION_FAILURES = 0 + + +def eq_(a, b, msg=None): + """Assert a == b, with repr messaging on failure.""" + assert a == b, msg or "%r != %r" % (a, b) + + +def ne_(a, b, msg=None): + """Assert a != b, with repr messaging on failure.""" + assert a != b, msg or "%r == %r" % (a, b) + + +def le_(a, b, msg=None): + """Assert a <= b, with repr messaging on failure.""" + assert a <= b, msg or "%r != %r" % (a, b) + + +def is_(a, b, msg=None): + """Assert a is b, with repr messaging on failure.""" + assert a is b, msg or "%r is not %r" % (a, b) + + +def is_not_(a, b, msg=None): + """Assert a is not b, with repr messaging on failure.""" + assert a is not b, msg or "%r is %r" % (a, b) + + +def in_(a, b, msg=None): + """Assert a in b, with repr messaging on failure.""" + assert a in b, msg or "%r not in %r" % (a, b) + + +def not_in_(a, b, msg=None): + """Assert a in not b, with repr messaging on failure.""" + assert a not in b, msg or "%r is in %r" % (a, b) + + +def startswith_(a, fragment, msg=None): + """Assert a.startswith(fragment), with repr messaging on failure.""" + assert a.startswith(fragment), msg or "%r does not start with %r" % ( + a, fragment) + + +def assert_raises(except_cls, callable_, *args, **kw): + try: + callable_(*args, **kw) + success = False + except except_cls: + success = True + + # assert outside the block so it works for AssertionError too ! + assert success, "Callable did not raise an exception" + + +def assert_raises_message(except_cls, msg, callable_, *args, **kwargs): + try: + callable_(*args, **kwargs) + assert False, "Callable did not raise an exception" + except except_cls as e: + assert re.search( + msg, util.text_type(e), re.UNICODE), "%r !~ %s" % (msg, e) + print(util.text_type(e).encode('utf-8')) + + +class AssertsCompiledSQL(object): + def assert_compile(self, clause, result, params=None, + checkparams=None, dialect=None, + checkpositional=None, + check_prefetch=None, + use_default_dialect=False, + allow_dialect_select=False, + literal_binds=False): + if use_default_dialect: + dialect = default.DefaultDialect() + elif allow_dialect_select: + dialect = None + else: + if dialect is None: + dialect = getattr(self, '__dialect__', None) + + if dialect is None: + dialect = config.db.dialect + elif dialect == 'default': + dialect = default.DefaultDialect() + elif isinstance(dialect, util.string_types): + dialect = url.URL(dialect).get_dialect()() + + kw = {} + compile_kwargs = {} + + if params is not None: + kw['column_keys'] = list(params) + + if literal_binds: + compile_kwargs['literal_binds'] = True + + if isinstance(clause, orm.Query): + context = clause._compile_context() + context.statement.use_labels = True + clause = context.statement + + if compile_kwargs: + kw['compile_kwargs'] = compile_kwargs + + c = clause.compile(dialect=dialect, **kw) + + param_str = repr(getattr(c, 'params', {})) + + if util.py3k: + param_str = param_str.encode('utf-8').decode('ascii', 'ignore') + print( + ("\nSQL String:\n" + + util.text_type(c) + + param_str).encode('utf-8')) + else: + print( + "\nSQL String:\n" + + util.text_type(c).encode('utf-8') + + param_str) + + cc = re.sub(r'[\n\t]', '', util.text_type(c)) + + eq_(cc, result, "%r != %r on dialect %r" % (cc, result, dialect)) + + if checkparams is not None: + eq_(c.construct_params(params), checkparams) + if checkpositional is not None: + p = c.construct_params(params) + eq_(tuple([p[x] for x in c.positiontup]), checkpositional) + if check_prefetch is not None: + eq_(c.prefetch, check_prefetch) + + +class ComparesTables(object): + + def assert_tables_equal(self, table, reflected_table, strict_types=False): + assert len(table.c) == len(reflected_table.c) + for c, reflected_c in zip(table.c, reflected_table.c): + eq_(c.name, reflected_c.name) + assert reflected_c is reflected_table.c[c.name] + eq_(c.primary_key, reflected_c.primary_key) + eq_(c.nullable, reflected_c.nullable) + + if strict_types: + msg = "Type '%s' doesn't correspond to type '%s'" + assert isinstance(reflected_c.type, type(c.type)), \ + msg % (reflected_c.type, c.type) + else: + self.assert_types_base(reflected_c, c) + + if isinstance(c.type, sqltypes.String): + eq_(c.type.length, reflected_c.type.length) + + eq_( + set([f.column.name for f in c.foreign_keys]), + set([f.column.name for f in reflected_c.foreign_keys]) + ) + if c.server_default: + assert isinstance(reflected_c.server_default, + schema.FetchedValue) + + assert len(table.primary_key) == len(reflected_table.primary_key) + for c in table.primary_key: + assert reflected_table.primary_key.columns[c.name] is not None + + def assert_types_base(self, c1, c2): + assert c1.type._compare_type_affinity(c2.type),\ + "On column %r, type '%s' doesn't correspond to type '%s'" % \ + (c1.name, c1.type, c2.type) + + +class AssertsExecutionResults(object): + def assert_result(self, result, class_, *objects): + result = list(result) + print(repr(result)) + self.assert_list(result, class_, objects) + + def assert_list(self, result, class_, list): + self.assert_(len(result) == len(list), + "result list is not the same size as test list, " + + "for class " + class_.__name__) + for i in range(0, len(list)): + self.assert_row(class_, result[i], list[i]) + + def assert_row(self, class_, rowobj, desc): + self.assert_(rowobj.__class__ is class_, + "item class is not " + repr(class_)) + for key, value in desc.items(): + if isinstance(value, tuple): + if isinstance(value[1], list): + self.assert_list(getattr(rowobj, key), value[0], value[1]) + else: + self.assert_row(value[0], getattr(rowobj, key), value[1]) + else: + self.assert_(getattr(rowobj, key) == value, + "attribute %s value %s does not match %s" % ( + key, getattr(rowobj, key), value)) + + def assert_unordered_result(self, result, cls, *expected): + """As assert_result, but the order of objects is not considered. + + The algorithm is very expensive but not a big deal for the small + numbers of rows that the test suite manipulates. + """ + + class immutabledict(dict): + def __hash__(self): + return id(self) + + found = util.IdentitySet(result) + expected = set([immutabledict(e) for e in expected]) + + for wrong in util.itertools_filterfalse(lambda o: + isinstance(o, cls), found): + fail('Unexpected type "%s", expected "%s"' % ( + type(wrong).__name__, cls.__name__)) + + if len(found) != len(expected): + fail('Unexpected object count "%s", expected "%s"' % ( + len(found), len(expected))) + + NOVALUE = object() + + def _compare_item(obj, spec): + for key, value in spec.items(): + if isinstance(value, tuple): + try: + self.assert_unordered_result( + getattr(obj, key), value[0], *value[1]) + except AssertionError: + return False + else: + if getattr(obj, key, NOVALUE) != value: + return False + return True + + for expected_item in expected: + for found_item in found: + if _compare_item(found_item, expected_item): + found.remove(found_item) + break + else: + fail( + "Expected %s instance with attributes %s not found." % ( + cls.__name__, repr(expected_item))) + return True + + def sql_execution_asserter(self, db=None): + if db is None: + from . import db as db + + return assertsql.assert_engine(db) + + def assert_sql_execution(self, db, callable_, *rules): + with self.sql_execution_asserter(db) as asserter: + callable_() + asserter.assert_(*rules) + + def assert_sql(self, db, callable_, rules): + + newrules = [] + for rule in rules: + if isinstance(rule, dict): + newrule = assertsql.AllOf(*[ + assertsql.CompiledSQL(k, v) for k, v in rule.items() + ]) + else: + newrule = assertsql.CompiledSQL(*rule) + newrules.append(newrule) + + self.assert_sql_execution(db, callable_, *newrules) + + def assert_sql_count(self, db, callable_, count): + self.assert_sql_execution( + db, callable_, assertsql.CountStatements(count)) + + @contextlib.contextmanager + def assert_execution(self, *rules): + assertsql.asserter.add_rules(rules) + try: + yield + assertsql.asserter.statement_complete() + finally: + assertsql.asserter.clear_rules() + + def assert_statement_count(self, count): + return self.assert_execution(assertsql.CountStatements(count)) diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/assertsql.py b/lib/python3.4/site-packages/sqlalchemy/testing/assertsql.py new file mode 100644 index 0000000..10f7ca7 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/assertsql.py @@ -0,0 +1,372 @@ +# testing/assertsql.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from ..engine.default import DefaultDialect +from .. import util +import re +import collections +import contextlib +from .. import event +from sqlalchemy.schema import _DDLCompiles +from sqlalchemy.engine.util import _distill_params +from sqlalchemy.engine import url + + +class AssertRule(object): + + is_consumed = False + errormessage = None + consume_statement = True + + def process_statement(self, execute_observed): + pass + + def no_more_statements(self): + assert False, 'All statements are complete, but pending '\ + 'assertion rules remain' + + +class SQLMatchRule(AssertRule): + pass + + +class CursorSQL(SQLMatchRule): + consume_statement = False + + def __init__(self, statement, params=None): + self.statement = statement + self.params = params + + def process_statement(self, execute_observed): + stmt = execute_observed.statements[0] + if self.statement != stmt.statement or ( + self.params is not None and self.params != stmt.parameters): + self.errormessage = \ + "Testing for exact SQL %s parameters %s received %s %s" % ( + self.statement, self.params, + stmt.statement, stmt.parameters + ) + else: + execute_observed.statements.pop(0) + self.is_consumed = True + if not execute_observed.statements: + self.consume_statement = True + + +class CompiledSQL(SQLMatchRule): + + def __init__(self, statement, params=None, dialect='default'): + self.statement = statement + self.params = params + self.dialect = dialect + + def _compare_sql(self, execute_observed, received_statement): + stmt = re.sub(r'[\n\t]', '', self.statement) + return received_statement == stmt + + def _compile_dialect(self, execute_observed): + if self.dialect == 'default': + return DefaultDialect() + else: + # ugh + if self.dialect == 'postgresql': + params = {'implicit_returning': True} + else: + params = {} + return url.URL(self.dialect).get_dialect()(**params) + + def _received_statement(self, execute_observed): + """reconstruct the statement and params in terms + of a target dialect, which for CompiledSQL is just DefaultDialect.""" + + context = execute_observed.context + compare_dialect = self._compile_dialect(execute_observed) + if isinstance(context.compiled.statement, _DDLCompiles): + compiled = \ + context.compiled.statement.compile(dialect=compare_dialect) + else: + compiled = ( + context.compiled.statement.compile( + dialect=compare_dialect, + column_keys=context.compiled.column_keys, + inline=context.compiled.inline) + ) + _received_statement = re.sub(r'[\n\t]', '', util.text_type(compiled)) + parameters = execute_observed.parameters + + if not parameters: + _received_parameters = [compiled.construct_params()] + else: + _received_parameters = [ + compiled.construct_params(m) for m in parameters] + + return _received_statement, _received_parameters + + def process_statement(self, execute_observed): + context = execute_observed.context + + _received_statement, _received_parameters = \ + self._received_statement(execute_observed) + params = self._all_params(context) + + equivalent = self._compare_sql(execute_observed, _received_statement) + + if equivalent: + if params is not None: + all_params = list(params) + all_received = list(_received_parameters) + while all_params and all_received: + param = dict(all_params.pop(0)) + + for idx, received in enumerate(list(all_received)): + # do a positive compare only + for param_key in param: + # a key in param did not match current + # 'received' + if param_key not in received or \ + received[param_key] != param[param_key]: + break + else: + # all keys in param matched 'received'; + # onto next param + del all_received[idx] + break + else: + # param did not match any entry + # in all_received + equivalent = False + break + if all_params or all_received: + equivalent = False + + if equivalent: + self.is_consumed = True + self.errormessage = None + else: + self.errormessage = self._failure_message(params) % { + 'received_statement': _received_statement, + 'received_parameters': _received_parameters + } + + def _all_params(self, context): + if self.params: + if util.callable(self.params): + params = self.params(context) + else: + params = self.params + if not isinstance(params, list): + params = [params] + return params + else: + return None + + def _failure_message(self, expected_params): + return ( + 'Testing for compiled statement %r partial params %r, ' + 'received %%(received_statement)r with params ' + '%%(received_parameters)r' % ( + self.statement.replace('%', '%%'), expected_params + ) + ) + + +class RegexSQL(CompiledSQL): + def __init__(self, regex, params=None): + SQLMatchRule.__init__(self) + self.regex = re.compile(regex) + self.orig_regex = regex + self.params = params + self.dialect = 'default' + + def _failure_message(self, expected_params): + return ( + 'Testing for compiled statement ~%r partial params %r, ' + 'received %%(received_statement)r with params ' + '%%(received_parameters)r' % ( + self.orig_regex, expected_params + ) + ) + + def _compare_sql(self, execute_observed, received_statement): + return bool(self.regex.match(received_statement)) + + +class DialectSQL(CompiledSQL): + def _compile_dialect(self, execute_observed): + return execute_observed.context.dialect + + def _compare_no_space(self, real_stmt, received_stmt): + stmt = re.sub(r'[\n\t]', '', real_stmt) + return received_stmt == stmt + + def _received_statement(self, execute_observed): + received_stmt, received_params = super(DialectSQL, self).\ + _received_statement(execute_observed) + + # TODO: why do we need this part? + for real_stmt in execute_observed.statements: + if self._compare_no_space(real_stmt.statement, received_stmt): + break + else: + raise AssertionError( + "Can't locate compiled statement %r in list of " + "statements actually invoked" % received_stmt) + + return received_stmt, execute_observed.context.compiled_parameters + + def _compare_sql(self, execute_observed, received_statement): + stmt = re.sub(r'[\n\t]', '', self.statement) + # convert our comparison statement to have the + # paramstyle of the received + paramstyle = execute_observed.context.dialect.paramstyle + if paramstyle == 'pyformat': + stmt = re.sub( + r':([\w_]+)', r"%(\1)s", stmt) + else: + # positional params + repl = None + if paramstyle == 'qmark': + repl = "?" + elif paramstyle == 'format': + repl = r"%s" + elif paramstyle == 'numeric': + repl = None + stmt = re.sub(r':([\w_]+)', repl, stmt) + + return received_statement == stmt + + +class CountStatements(AssertRule): + + def __init__(self, count): + self.count = count + self._statement_count = 0 + + def process_statement(self, execute_observed): + self._statement_count += 1 + + def no_more_statements(self): + if self.count != self._statement_count: + assert False, 'desired statement count %d does not match %d' \ + % (self.count, self._statement_count) + + +class AllOf(AssertRule): + + def __init__(self, *rules): + self.rules = set(rules) + + def process_statement(self, execute_observed): + for rule in list(self.rules): + rule.errormessage = None + rule.process_statement(execute_observed) + if rule.is_consumed: + self.rules.discard(rule) + if not self.rules: + self.is_consumed = True + break + elif not rule.errormessage: + # rule is not done yet + self.errormessage = None + break + else: + self.errormessage = list(self.rules)[0].errormessage + + +class Or(AllOf): + + def process_statement(self, execute_observed): + for rule in self.rules: + rule.process_statement(execute_observed) + if rule.is_consumed: + self.is_consumed = True + break + else: + self.errormessage = list(self.rules)[0].errormessage + + +class SQLExecuteObserved(object): + def __init__(self, context, clauseelement, multiparams, params): + self.context = context + self.clauseelement = clauseelement + self.parameters = _distill_params(multiparams, params) + self.statements = [] + + +class SQLCursorExecuteObserved( + collections.namedtuple( + "SQLCursorExecuteObserved", + ["statement", "parameters", "context", "executemany"]) +): + pass + + +class SQLAsserter(object): + def __init__(self): + self.accumulated = [] + + def _close(self): + self._final = self.accumulated + del self.accumulated + + def assert_(self, *rules): + rules = list(rules) + observed = list(self._final) + + while observed and rules: + rule = rules[0] + rule.process_statement(observed[0]) + if rule.is_consumed: + rules.pop(0) + elif rule.errormessage: + assert False, rule.errormessage + + if rule.consume_statement: + observed.pop(0) + + if not observed and rules: + rules[0].no_more_statements() + elif not rules and observed: + assert False, "Additional SQL statements remain" + + +@contextlib.contextmanager +def assert_engine(engine): + asserter = SQLAsserter() + + orig = [] + + @event.listens_for(engine, "before_execute") + def connection_execute(conn, clauseelement, multiparams, params): + # grab the original statement + params before any cursor + # execution + orig[:] = clauseelement, multiparams, params + + @event.listens_for(engine, "after_cursor_execute") + def cursor_execute(conn, cursor, statement, parameters, + context, executemany): + if not context: + return + # then grab real cursor statements and associate them all + # around a single context + if asserter.accumulated and \ + asserter.accumulated[-1].context is context: + obs = asserter.accumulated[-1] + else: + obs = SQLExecuteObserved(context, orig[0], orig[1], orig[2]) + asserter.accumulated.append(obs) + obs.statements.append( + SQLCursorExecuteObserved( + statement, parameters, context, executemany) + ) + + try: + yield asserter + finally: + event.remove(engine, "after_cursor_execute", cursor_execute) + event.remove(engine, "before_execute", connection_execute) + asserter._close() diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/config.py b/lib/python3.4/site-packages/sqlalchemy/testing/config.py new file mode 100644 index 0000000..da59976 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/config.py @@ -0,0 +1,92 @@ +# testing/config.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +import collections + +requirements = None +db = None +db_url = None +db_opts = None +file_config = None +test_schema = None +test_schema_2 = None +_current = None +_skip_test_exception = None + + +class Config(object): + def __init__(self, db, db_opts, options, file_config): + self.db = db + self.db_opts = db_opts + self.options = options + self.file_config = file_config + self.test_schema = "test_schema" + self.test_schema_2 = "test_schema_2" + + _stack = collections.deque() + _configs = {} + + @classmethod + def register(cls, db, db_opts, options, file_config): + """add a config as one of the global configs. + + If there are no configs set up yet, this config also + gets set as the "_current". + """ + cfg = Config(db, db_opts, options, file_config) + + cls._configs[cfg.db.name] = cfg + cls._configs[(cfg.db.name, cfg.db.dialect)] = cfg + cls._configs[cfg.db] = cfg + return cfg + + @classmethod + def set_as_current(cls, config, namespace): + global db, _current, db_url, test_schema, test_schema_2, db_opts + _current = config + db_url = config.db.url + db_opts = config.db_opts + test_schema = config.test_schema + test_schema_2 = config.test_schema_2 + namespace.db = db = config.db + + @classmethod + def push_engine(cls, db, namespace): + assert _current, "Can't push without a default Config set up" + cls.push( + Config( + db, _current.db_opts, _current.options, _current.file_config), + namespace + ) + + @classmethod + def push(cls, config, namespace): + cls._stack.append(_current) + cls.set_as_current(config, namespace) + + @classmethod + def reset(cls, namespace): + if cls._stack: + cls.set_as_current(cls._stack[0], namespace) + cls._stack.clear() + + @classmethod + def all_configs(cls): + for cfg in set(cls._configs.values()): + yield cfg + + @classmethod + def all_dbs(cls): + for cfg in cls.all_configs(): + yield cfg.db + + def skip_test(self, msg): + skip_test(msg) + + +def skip_test(msg): + raise _skip_test_exception(msg) diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/distutils_run.py b/lib/python3.4/site-packages/sqlalchemy/testing/distutils_run.py new file mode 100644 index 0000000..38de887 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/distutils_run.py @@ -0,0 +1,11 @@ +"""Quick and easy way to get setup.py test to run py.test without any +custom setuptools/distutils code. + +""" +import unittest +import pytest + + +class TestSuite(unittest.TestCase): + def test_sqlalchemy(self): + pytest.main(["-n", "4", "-q"]) diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/engines.py b/lib/python3.4/site-packages/sqlalchemy/testing/engines.py new file mode 100644 index 0000000..def9f3c --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/engines.py @@ -0,0 +1,346 @@ +# testing/engines.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from __future__ import absolute_import + +import weakref +from . import config +from .util import decorator +from .. import event, pool +import re +import warnings + + +class ConnectionKiller(object): + + def __init__(self): + self.proxy_refs = weakref.WeakKeyDictionary() + self.testing_engines = weakref.WeakKeyDictionary() + self.conns = set() + + def add_engine(self, engine): + self.testing_engines[engine] = True + + def connect(self, dbapi_conn, con_record): + self.conns.add((dbapi_conn, con_record)) + + def checkout(self, dbapi_con, con_record, con_proxy): + self.proxy_refs[con_proxy] = True + + def invalidate(self, dbapi_con, con_record, exception): + self.conns.discard((dbapi_con, con_record)) + + def _safe(self, fn): + try: + fn() + except Exception as e: + warnings.warn( + "testing_reaper couldn't " + "rollback/close connection: %s" % e) + + def rollback_all(self): + for rec in list(self.proxy_refs): + if rec is not None and rec.is_valid: + self._safe(rec.rollback) + + def close_all(self): + for rec in list(self.proxy_refs): + if rec is not None and rec.is_valid: + self._safe(rec._close) + + def _after_test_ctx(self): + # this can cause a deadlock with pg8000 - pg8000 acquires + # prepared statement lock inside of rollback() - if async gc + # is collecting in finalize_fairy, deadlock. + # not sure if this should be if pypy/jython only. + # note that firebird/fdb definitely needs this though + for conn, rec in list(self.conns): + self._safe(conn.rollback) + + def _stop_test_ctx(self): + if config.options.low_connections: + self._stop_test_ctx_minimal() + else: + self._stop_test_ctx_aggressive() + + def _stop_test_ctx_minimal(self): + self.close_all() + + self.conns = set() + + for rec in list(self.testing_engines): + if rec is not config.db: + rec.dispose() + + def _stop_test_ctx_aggressive(self): + self.close_all() + for conn, rec in list(self.conns): + self._safe(conn.close) + rec.connection = None + + self.conns = set() + for rec in list(self.testing_engines): + rec.dispose() + + def assert_all_closed(self): + for rec in self.proxy_refs: + if rec.is_valid: + assert False + +testing_reaper = ConnectionKiller() + + +def drop_all_tables(metadata, bind): + testing_reaper.close_all() + if hasattr(bind, 'close'): + bind.close() + + if not config.db.dialect.supports_alter: + from . import assertions + with assertions.expect_warnings( + "Can't sort tables", assert_=False): + metadata.drop_all(bind) + else: + metadata.drop_all(bind) + + +@decorator +def assert_conns_closed(fn, *args, **kw): + try: + fn(*args, **kw) + finally: + testing_reaper.assert_all_closed() + + +@decorator +def rollback_open_connections(fn, *args, **kw): + """Decorator that rolls back all open connections after fn execution.""" + + try: + fn(*args, **kw) + finally: + testing_reaper.rollback_all() + + +@decorator +def close_first(fn, *args, **kw): + """Decorator that closes all connections before fn execution.""" + + testing_reaper.close_all() + fn(*args, **kw) + + +@decorator +def close_open_connections(fn, *args, **kw): + """Decorator that closes all connections after fn execution.""" + try: + fn(*args, **kw) + finally: + testing_reaper.close_all() + + +def all_dialects(exclude=None): + import sqlalchemy.databases as d + for name in d.__all__: + # TEMPORARY + if exclude and name in exclude: + continue + mod = getattr(d, name, None) + if not mod: + mod = getattr(__import__( + 'sqlalchemy.databases.%s' % name).databases, name) + yield mod.dialect() + + +class ReconnectFixture(object): + + def __init__(self, dbapi): + self.dbapi = dbapi + self.connections = [] + + def __getattr__(self, key): + return getattr(self.dbapi, key) + + def connect(self, *args, **kwargs): + conn = self.dbapi.connect(*args, **kwargs) + self.connections.append(conn) + return conn + + def _safe(self, fn): + try: + fn() + except Exception as e: + warnings.warn( + "ReconnectFixture couldn't " + "close connection: %s" % e) + + def shutdown(self): + # TODO: this doesn't cover all cases + # as nicely as we'd like, namely MySQLdb. + # would need to implement R. Brewer's + # proxy server idea to get better + # coverage. + for c in list(self.connections): + self._safe(c.close) + self.connections = [] + + +def reconnecting_engine(url=None, options=None): + url = url or config.db.url + dbapi = config.db.dialect.dbapi + if not options: + options = {} + options['module'] = ReconnectFixture(dbapi) + engine = testing_engine(url, options) + _dispose = engine.dispose + + def dispose(): + engine.dialect.dbapi.shutdown() + _dispose() + + engine.test_shutdown = engine.dialect.dbapi.shutdown + engine.dispose = dispose + return engine + + +def testing_engine(url=None, options=None): + """Produce an engine configured by --options with optional overrides.""" + + from sqlalchemy import create_engine + from sqlalchemy.engine.url import make_url + + if not options: + use_reaper = True + else: + use_reaper = options.pop('use_reaper', True) + + url = url or config.db.url + + url = make_url(url) + if options is None: + if config.db is None or url.drivername == config.db.url.drivername: + options = config.db_opts + else: + options = {} + + engine = create_engine(url, **options) + engine._has_events = True # enable event blocks, helps with profiling + + if isinstance(engine.pool, pool.QueuePool): + engine.pool._timeout = 0 + engine.pool._max_overflow = 0 + if use_reaper: + event.listen(engine.pool, 'connect', testing_reaper.connect) + event.listen(engine.pool, 'checkout', testing_reaper.checkout) + event.listen(engine.pool, 'invalidate', testing_reaper.invalidate) + testing_reaper.add_engine(engine) + + return engine + + +def mock_engine(dialect_name=None): + """Provides a mocking engine based on the current testing.db. + + This is normally used to test DDL generation flow as emitted + by an Engine. + + It should not be used in other cases, as assert_compile() and + assert_sql_execution() are much better choices with fewer + moving parts. + + """ + + from sqlalchemy import create_engine + + if not dialect_name: + dialect_name = config.db.name + + buffer = [] + + def executor(sql, *a, **kw): + buffer.append(sql) + + def assert_sql(stmts): + recv = [re.sub(r'[\n\t]', '', str(s)) for s in buffer] + assert recv == stmts, recv + + def print_sql(): + d = engine.dialect + return "\n".join( + str(s.compile(dialect=d)) + for s in engine.mock + ) + + engine = create_engine(dialect_name + '://', + strategy='mock', executor=executor) + assert not hasattr(engine, 'mock') + engine.mock = buffer + engine.assert_sql = assert_sql + engine.print_sql = print_sql + return engine + + +class DBAPIProxyCursor(object): + """Proxy a DBAPI cursor. + + Tests can provide subclasses of this to intercept + DBAPI-level cursor operations. + + """ + + def __init__(self, engine, conn, *args, **kwargs): + self.engine = engine + self.connection = conn + self.cursor = conn.cursor(*args, **kwargs) + + def execute(self, stmt, parameters=None, **kw): + if parameters: + return self.cursor.execute(stmt, parameters, **kw) + else: + return self.cursor.execute(stmt, **kw) + + def executemany(self, stmt, params, **kw): + return self.cursor.executemany(stmt, params, **kw) + + def __getattr__(self, key): + return getattr(self.cursor, key) + + +class DBAPIProxyConnection(object): + """Proxy a DBAPI connection. + + Tests can provide subclasses of this to intercept + DBAPI-level connection operations. + + """ + + def __init__(self, engine, cursor_cls): + self.conn = self._sqla_unwrap = engine.pool._creator() + self.engine = engine + self.cursor_cls = cursor_cls + + def cursor(self, *args, **kwargs): + return self.cursor_cls(self.engine, self.conn, *args, **kwargs) + + def close(self): + self.conn.close() + + def __getattr__(self, key): + return getattr(self.conn, key) + + +def proxying_engine(conn_cls=DBAPIProxyConnection, + cursor_cls=DBAPIProxyCursor): + """Produce an engine that provides proxy hooks for + common methods. + + """ + def mock_conn(): + return conn_cls(config.db, cursor_cls) + return testing_engine(options={'creator': mock_conn}) + + diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/entities.py b/lib/python3.4/site-packages/sqlalchemy/testing/entities.py new file mode 100644 index 0000000..a5d04de --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/entities.py @@ -0,0 +1,101 @@ +# testing/entities.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +import sqlalchemy as sa +from sqlalchemy import exc as sa_exc + +_repr_stack = set() + + +class BasicEntity(object): + + def __init__(self, **kw): + for key, value in kw.items(): + setattr(self, key, value) + + def __repr__(self): + if id(self) in _repr_stack: + return object.__repr__(self) + _repr_stack.add(id(self)) + try: + return "%s(%s)" % ( + (self.__class__.__name__), + ', '.join(["%s=%r" % (key, getattr(self, key)) + for key in sorted(self.__dict__.keys()) + if not key.startswith('_')])) + finally: + _repr_stack.remove(id(self)) + +_recursion_stack = set() + + +class ComparableEntity(BasicEntity): + + def __hash__(self): + return hash(self.__class__) + + def __ne__(self, other): + return not self.__eq__(other) + + def __eq__(self, other): + """'Deep, sparse compare. + + Deeply compare two entities, following the non-None attributes of the + non-persisted object, if possible. + + """ + if other is self: + return True + elif not self.__class__ == other.__class__: + return False + + if id(self) in _recursion_stack: + return True + _recursion_stack.add(id(self)) + + try: + # pick the entity that's not SA persisted as the source + try: + self_key = sa.orm.attributes.instance_state(self).key + except sa.orm.exc.NO_STATE: + self_key = None + + if other is None: + a = self + b = other + elif self_key is not None: + a = other + b = self + else: + a = self + b = other + + for attr in list(a.__dict__): + if attr.startswith('_'): + continue + value = getattr(a, attr) + + try: + # handle lazy loader errors + battr = getattr(b, attr) + except (AttributeError, sa_exc.UnboundExecutionError): + return False + + if hasattr(value, '__iter__'): + if hasattr(value, '__getitem__') and not hasattr( + value, 'keys'): + if list(value) != list(battr): + return False + else: + if set(value) != set(battr): + return False + else: + if value is not None and value != battr: + return False + return True + finally: + _recursion_stack.remove(id(self)) diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/exclusions.py b/lib/python3.4/site-packages/sqlalchemy/testing/exclusions.py new file mode 100644 index 0000000..11c9e1b --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/exclusions.py @@ -0,0 +1,441 @@ +# testing/exclusions.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + + +import operator +from ..util import decorator +from . import config +from .. import util +import inspect +import contextlib +from sqlalchemy.util.compat import inspect_getargspec + + +def skip_if(predicate, reason=None): + rule = compound() + pred = _as_predicate(predicate, reason) + rule.skips.add(pred) + return rule + + +def fails_if(predicate, reason=None): + rule = compound() + pred = _as_predicate(predicate, reason) + rule.fails.add(pred) + return rule + + +class compound(object): + def __init__(self): + self.fails = set() + self.skips = set() + self.tags = set() + + def __add__(self, other): + return self.add(other) + + def add(self, *others): + copy = compound() + copy.fails.update(self.fails) + copy.skips.update(self.skips) + copy.tags.update(self.tags) + for other in others: + copy.fails.update(other.fails) + copy.skips.update(other.skips) + copy.tags.update(other.tags) + return copy + + def not_(self): + copy = compound() + copy.fails.update(NotPredicate(fail) for fail in self.fails) + copy.skips.update(NotPredicate(skip) for skip in self.skips) + copy.tags.update(self.tags) + return copy + + @property + def enabled(self): + return self.enabled_for_config(config._current) + + def enabled_for_config(self, config): + for predicate in self.skips.union(self.fails): + if predicate(config): + return False + else: + return True + + def matching_config_reasons(self, config): + return [ + predicate._as_string(config) for predicate + in self.skips.union(self.fails) + if predicate(config) + ] + + def include_test(self, include_tags, exclude_tags): + return bool( + not self.tags.intersection(exclude_tags) and + (not include_tags or self.tags.intersection(include_tags)) + ) + + def _extend(self, other): + self.skips.update(other.skips) + self.fails.update(other.fails) + self.tags.update(other.tags) + + def __call__(self, fn): + if hasattr(fn, '_sa_exclusion_extend'): + fn._sa_exclusion_extend._extend(self) + return fn + + @decorator + def decorate(fn, *args, **kw): + return self._do(config._current, fn, *args, **kw) + decorated = decorate(fn) + decorated._sa_exclusion_extend = self + return decorated + + @contextlib.contextmanager + def fail_if(self): + all_fails = compound() + all_fails.fails.update(self.skips.union(self.fails)) + + try: + yield + except Exception as ex: + all_fails._expect_failure(config._current, ex) + else: + all_fails._expect_success(config._current) + + def _do(self, config, fn, *args, **kw): + for skip in self.skips: + if skip(config): + msg = "'%s' : %s" % ( + fn.__name__, + skip._as_string(config) + ) + config.skip_test(msg) + + try: + return_value = fn(*args, **kw) + except Exception as ex: + self._expect_failure(config, ex, name=fn.__name__) + else: + self._expect_success(config, name=fn.__name__) + return return_value + + def _expect_failure(self, config, ex, name='block'): + for fail in self.fails: + if fail(config): + print(("%s failed as expected (%s): %s " % ( + name, fail._as_string(config), str(ex)))) + break + else: + util.raise_from_cause(ex) + + def _expect_success(self, config, name='block'): + if not self.fails: + return + for fail in self.fails: + if not fail(config): + break + else: + raise AssertionError( + "Unexpected success for '%s' (%s)" % + ( + name, + " and ".join( + fail._as_string(config) + for fail in self.fails + ) + ) + ) + + +def requires_tag(tagname): + return tags([tagname]) + + +def tags(tagnames): + comp = compound() + comp.tags.update(tagnames) + return comp + + +def only_if(predicate, reason=None): + predicate = _as_predicate(predicate) + return skip_if(NotPredicate(predicate), reason) + + +def succeeds_if(predicate, reason=None): + predicate = _as_predicate(predicate) + return fails_if(NotPredicate(predicate), reason) + + +class Predicate(object): + @classmethod + def as_predicate(cls, predicate, description=None): + if isinstance(predicate, compound): + return cls.as_predicate(predicate.enabled_for_config, description) + elif isinstance(predicate, Predicate): + if description and predicate.description is None: + predicate.description = description + return predicate + elif isinstance(predicate, (list, set)): + return OrPredicate( + [cls.as_predicate(pred) for pred in predicate], + description) + elif isinstance(predicate, tuple): + return SpecPredicate(*predicate) + elif isinstance(predicate, util.string_types): + tokens = predicate.split(" ", 2) + op = spec = None + db = tokens.pop(0) + if tokens: + op = tokens.pop(0) + if tokens: + spec = tuple(int(d) for d in tokens.pop(0).split(".")) + return SpecPredicate(db, op, spec, description=description) + elif util.callable(predicate): + return LambdaPredicate(predicate, description) + else: + assert False, "unknown predicate type: %s" % predicate + + def _format_description(self, config, negate=False): + bool_ = self(config) + if negate: + bool_ = not negate + return self.description % { + "driver": config.db.url.get_driver_name(), + "database": config.db.url.get_backend_name(), + "doesnt_support": "doesn't support" if bool_ else "does support", + "does_support": "does support" if bool_ else "doesn't support" + } + + def _as_string(self, config=None, negate=False): + raise NotImplementedError() + + +class BooleanPredicate(Predicate): + def __init__(self, value, description=None): + self.value = value + self.description = description or "boolean %s" % value + + def __call__(self, config): + return self.value + + def _as_string(self, config, negate=False): + return self._format_description(config, negate=negate) + + +class SpecPredicate(Predicate): + def __init__(self, db, op=None, spec=None, description=None): + self.db = db + self.op = op + self.spec = spec + self.description = description + + _ops = { + '<': operator.lt, + '>': operator.gt, + '==': operator.eq, + '!=': operator.ne, + '<=': operator.le, + '>=': operator.ge, + 'in': operator.contains, + 'between': lambda val, pair: val >= pair[0] and val <= pair[1], + } + + def __call__(self, config): + engine = config.db + + if "+" in self.db: + dialect, driver = self.db.split('+') + else: + dialect, driver = self.db, None + + if dialect and engine.name != dialect: + return False + if driver is not None and engine.driver != driver: + return False + + if self.op is not None: + assert driver is None, "DBAPI version specs not supported yet" + + version = _server_version(engine) + oper = hasattr(self.op, '__call__') and self.op \ + or self._ops[self.op] + return oper(version, self.spec) + else: + return True + + def _as_string(self, config, negate=False): + if self.description is not None: + return self._format_description(config) + elif self.op is None: + if negate: + return "not %s" % self.db + else: + return "%s" % self.db + else: + if negate: + return "not %s %s %s" % ( + self.db, + self.op, + self.spec + ) + else: + return "%s %s %s" % ( + self.db, + self.op, + self.spec + ) + + +class LambdaPredicate(Predicate): + def __init__(self, lambda_, description=None, args=None, kw=None): + spec = inspect_getargspec(lambda_) + if not spec[0]: + self.lambda_ = lambda db: lambda_() + else: + self.lambda_ = lambda_ + self.args = args or () + self.kw = kw or {} + if description: + self.description = description + elif lambda_.__doc__: + self.description = lambda_.__doc__ + else: + self.description = "custom function" + + def __call__(self, config): + return self.lambda_(config) + + def _as_string(self, config, negate=False): + return self._format_description(config) + + +class NotPredicate(Predicate): + def __init__(self, predicate, description=None): + self.predicate = predicate + self.description = description + + def __call__(self, config): + return not self.predicate(config) + + def _as_string(self, config, negate=False): + if self.description: + return self._format_description(config, not negate) + else: + return self.predicate._as_string(config, not negate) + + +class OrPredicate(Predicate): + def __init__(self, predicates, description=None): + self.predicates = predicates + self.description = description + + def __call__(self, config): + for pred in self.predicates: + if pred(config): + return True + return False + + def _eval_str(self, config, negate=False): + if negate: + conjunction = " and " + else: + conjunction = " or " + return conjunction.join(p._as_string(config, negate=negate) + for p in self.predicates) + + def _negation_str(self, config): + if self.description is not None: + return "Not " + self._format_description(config) + else: + return self._eval_str(config, negate=True) + + def _as_string(self, config, negate=False): + if negate: + return self._negation_str(config) + else: + if self.description is not None: + return self._format_description(config) + else: + return self._eval_str(config) + + +_as_predicate = Predicate.as_predicate + + +def _is_excluded(db, op, spec): + return SpecPredicate(db, op, spec)(config._current) + + +def _server_version(engine): + """Return a server_version_info tuple.""" + + # force metadata to be retrieved + conn = engine.connect() + version = getattr(engine.dialect, 'server_version_info', ()) + conn.close() + return version + + +def db_spec(*dbs): + return OrPredicate( + [Predicate.as_predicate(db) for db in dbs] + ) + + +def open(): + return skip_if(BooleanPredicate(False, "mark as execute")) + + +def closed(): + return skip_if(BooleanPredicate(True, "marked as skip")) + + +def fails(reason=None): + return fails_if(BooleanPredicate(True, reason or "expected to fail")) + + +@decorator +def future(fn, *arg): + return fails_if(LambdaPredicate(fn), "Future feature") + + +def fails_on(db, reason=None): + return fails_if(SpecPredicate(db), reason) + + +def fails_on_everything_except(*dbs): + return succeeds_if( + OrPredicate([ + SpecPredicate(db) for db in dbs + ]) + ) + + +def skip(db, reason=None): + return skip_if(SpecPredicate(db), reason) + + +def only_on(dbs, reason=None): + return only_if( + OrPredicate([Predicate.as_predicate(db) for db in util.to_list(dbs)]) + ) + + +def exclude(db, op, spec, reason=None): + return skip_if(SpecPredicate(db, op, spec), reason) + + +def against(config, *queries): + assert queries, "no queries sent!" + return OrPredicate([ + Predicate.as_predicate(query) + for query in queries + ])(config) diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/fixtures.py b/lib/python3.4/site-packages/sqlalchemy/testing/fixtures.py new file mode 100644 index 0000000..d6712b4 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/fixtures.py @@ -0,0 +1,386 @@ +# testing/fixtures.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from . import config +from . import assertions, schema +from .util import adict +from .. import util +from .engines import drop_all_tables +from .entities import BasicEntity, ComparableEntity +import sys +import sqlalchemy as sa +from sqlalchemy.ext.declarative import declarative_base, DeclarativeMeta + +# whether or not we use unittest changes things dramatically, +# as far as how py.test collection works. + + +class TestBase(object): + # A sequence of database names to always run, regardless of the + # constraints below. + __whitelist__ = () + + # A sequence of requirement names matching testing.requires decorators + __requires__ = () + + # A sequence of dialect names to exclude from the test class. + __unsupported_on__ = () + + # If present, test class is only runnable for the *single* specified + # dialect. If you need multiple, use __unsupported_on__ and invert. + __only_on__ = None + + # A sequence of no-arg callables. If any are True, the entire testcase is + # skipped. + __skip_if__ = None + + def assert_(self, val, msg=None): + assert val, msg + + # apparently a handful of tests are doing this....OK + def setup(self): + if hasattr(self, "setUp"): + self.setUp() + + def teardown(self): + if hasattr(self, "tearDown"): + self.tearDown() + + +class TablesTest(TestBase): + + # 'once', None + run_setup_bind = 'once' + + # 'once', 'each', None + run_define_tables = 'once' + + # 'once', 'each', None + run_create_tables = 'once' + + # 'once', 'each', None + run_inserts = 'each' + + # 'each', None + run_deletes = 'each' + + # 'once', None + run_dispose_bind = None + + bind = None + metadata = None + tables = None + other = None + + @classmethod + def setup_class(cls): + cls._init_class() + + cls._setup_once_tables() + + cls._setup_once_inserts() + + @classmethod + def _init_class(cls): + if cls.run_define_tables == 'each': + if cls.run_create_tables == 'once': + cls.run_create_tables = 'each' + assert cls.run_inserts in ('each', None) + + cls.other = adict() + cls.tables = adict() + + cls.bind = cls.setup_bind() + cls.metadata = sa.MetaData() + cls.metadata.bind = cls.bind + + @classmethod + def _setup_once_inserts(cls): + if cls.run_inserts == 'once': + cls._load_fixtures() + cls.insert_data() + + @classmethod + def _setup_once_tables(cls): + if cls.run_define_tables == 'once': + cls.define_tables(cls.metadata) + if cls.run_create_tables == 'once': + cls.metadata.create_all(cls.bind) + cls.tables.update(cls.metadata.tables) + + def _setup_each_tables(self): + if self.run_define_tables == 'each': + self.tables.clear() + if self.run_create_tables == 'each': + drop_all_tables(self.metadata, self.bind) + self.metadata.clear() + self.define_tables(self.metadata) + if self.run_create_tables == 'each': + self.metadata.create_all(self.bind) + self.tables.update(self.metadata.tables) + elif self.run_create_tables == 'each': + drop_all_tables(self.metadata, self.bind) + self.metadata.create_all(self.bind) + + def _setup_each_inserts(self): + if self.run_inserts == 'each': + self._load_fixtures() + self.insert_data() + + def _teardown_each_tables(self): + # no need to run deletes if tables are recreated on setup + if self.run_define_tables != 'each' and self.run_deletes == 'each': + with self.bind.connect() as conn: + for table in reversed(self.metadata.sorted_tables): + try: + conn.execute(table.delete()) + except sa.exc.DBAPIError as ex: + util.print_( + ("Error emptying table %s: %r" % (table, ex)), + file=sys.stderr) + + def setup(self): + self._setup_each_tables() + self._setup_each_inserts() + + def teardown(self): + self._teardown_each_tables() + + @classmethod + def _teardown_once_metadata_bind(cls): + if cls.run_create_tables: + drop_all_tables(cls.metadata, cls.bind) + + if cls.run_dispose_bind == 'once': + cls.dispose_bind(cls.bind) + + cls.metadata.bind = None + + if cls.run_setup_bind is not None: + cls.bind = None + + @classmethod + def teardown_class(cls): + cls._teardown_once_metadata_bind() + + @classmethod + def setup_bind(cls): + return config.db + + @classmethod + def dispose_bind(cls, bind): + if hasattr(bind, 'dispose'): + bind.dispose() + elif hasattr(bind, 'close'): + bind.close() + + @classmethod + def define_tables(cls, metadata): + pass + + @classmethod + def fixtures(cls): + return {} + + @classmethod + def insert_data(cls): + pass + + def sql_count_(self, count, fn): + self.assert_sql_count(self.bind, fn, count) + + def sql_eq_(self, callable_, statements): + self.assert_sql(self.bind, callable_, statements) + + @classmethod + def _load_fixtures(cls): + """Insert rows as represented by the fixtures() method.""" + headers, rows = {}, {} + for table, data in cls.fixtures().items(): + if len(data) < 2: + continue + if isinstance(table, util.string_types): + table = cls.tables[table] + headers[table] = data[0] + rows[table] = data[1:] + for table in cls.metadata.sorted_tables: + if table not in headers: + continue + cls.bind.execute( + table.insert(), + [dict(zip(headers[table], column_values)) + for column_values in rows[table]]) + +from sqlalchemy import event + + +class RemovesEvents(object): + @util.memoized_property + def _event_fns(self): + return set() + + def event_listen(self, target, name, fn): + self._event_fns.add((target, name, fn)) + event.listen(target, name, fn) + + def teardown(self): + for key in self._event_fns: + event.remove(*key) + super_ = super(RemovesEvents, self) + if hasattr(super_, "teardown"): + super_.teardown() + + +class _ORMTest(object): + + @classmethod + def teardown_class(cls): + sa.orm.session.Session.close_all() + sa.orm.clear_mappers() + + +class ORMTest(_ORMTest, TestBase): + pass + + +class MappedTest(_ORMTest, TablesTest, assertions.AssertsExecutionResults): + # 'once', 'each', None + run_setup_classes = 'once' + + # 'once', 'each', None + run_setup_mappers = 'each' + + classes = None + + @classmethod + def setup_class(cls): + cls._init_class() + + if cls.classes is None: + cls.classes = adict() + + cls._setup_once_tables() + cls._setup_once_classes() + cls._setup_once_mappers() + cls._setup_once_inserts() + + @classmethod + def teardown_class(cls): + cls._teardown_once_class() + cls._teardown_once_metadata_bind() + + def setup(self): + self._setup_each_tables() + self._setup_each_classes() + self._setup_each_mappers() + self._setup_each_inserts() + + def teardown(self): + sa.orm.session.Session.close_all() + self._teardown_each_mappers() + self._teardown_each_classes() + self._teardown_each_tables() + + @classmethod + def _teardown_once_class(cls): + cls.classes.clear() + _ORMTest.teardown_class() + + @classmethod + def _setup_once_classes(cls): + if cls.run_setup_classes == 'once': + cls._with_register_classes(cls.setup_classes) + + @classmethod + def _setup_once_mappers(cls): + if cls.run_setup_mappers == 'once': + cls._with_register_classes(cls.setup_mappers) + + def _setup_each_mappers(self): + if self.run_setup_mappers == 'each': + self._with_register_classes(self.setup_mappers) + + def _setup_each_classes(self): + if self.run_setup_classes == 'each': + self._with_register_classes(self.setup_classes) + + @classmethod + def _with_register_classes(cls, fn): + """Run a setup method, framing the operation with a Base class + that will catch new subclasses to be established within + the "classes" registry. + + """ + cls_registry = cls.classes + + class FindFixture(type): + def __init__(cls, classname, bases, dict_): + cls_registry[classname] = cls + return type.__init__(cls, classname, bases, dict_) + + class _Base(util.with_metaclass(FindFixture, object)): + pass + + class Basic(BasicEntity, _Base): + pass + + class Comparable(ComparableEntity, _Base): + pass + + cls.Basic = Basic + cls.Comparable = Comparable + fn() + + def _teardown_each_mappers(self): + # some tests create mappers in the test bodies + # and will define setup_mappers as None - + # clear mappers in any case + if self.run_setup_mappers != 'once': + sa.orm.clear_mappers() + + def _teardown_each_classes(self): + if self.run_setup_classes != 'once': + self.classes.clear() + + @classmethod + def setup_classes(cls): + pass + + @classmethod + def setup_mappers(cls): + pass + + +class DeclarativeMappedTest(MappedTest): + run_setup_classes = 'once' + run_setup_mappers = 'once' + + @classmethod + def _setup_once_tables(cls): + pass + + @classmethod + def _with_register_classes(cls, fn): + cls_registry = cls.classes + + class FindFixtureDeclarative(DeclarativeMeta): + def __init__(cls, classname, bases, dict_): + cls_registry[classname] = cls + return DeclarativeMeta.__init__( + cls, classname, bases, dict_) + + class DeclarativeBasic(object): + __table_cls__ = schema.Table + + _DeclBase = declarative_base(metadata=cls.metadata, + metaclass=FindFixtureDeclarative, + cls=DeclarativeBasic) + cls.DeclarativeBasic = _DeclBase + fn() + + if cls.metadata.tables and cls.run_create_tables: + cls.metadata.create_all(config.db) diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/mock.py b/lib/python3.4/site-packages/sqlalchemy/testing/mock.py new file mode 100644 index 0000000..674f085 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/mock.py @@ -0,0 +1,21 @@ +# testing/mock.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Import stub for mock library. +""" +from __future__ import absolute_import +from ..util import py33 + +if py33: + from unittest.mock import MagicMock, Mock, call, patch, ANY +else: + try: + from mock import MagicMock, Mock, call, patch, ANY + except ImportError: + raise ImportError( + "SQLAlchemy's test suite requires the " + "'mock' library as of 0.8.2.") diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/pickleable.py b/lib/python3.4/site-packages/sqlalchemy/testing/pickleable.py new file mode 100644 index 0000000..d6814a1 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/pickleable.py @@ -0,0 +1,143 @@ +# testing/pickleable.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Classes used in pickling tests, need to be at the module level for +unpickling. +""" + +from . import fixtures + + +class User(fixtures.ComparableEntity): + pass + + +class Order(fixtures.ComparableEntity): + pass + + +class Dingaling(fixtures.ComparableEntity): + pass + + +class EmailUser(User): + pass + + +class Address(fixtures.ComparableEntity): + pass + + +# TODO: these are kind of arbitrary.... +class Child1(fixtures.ComparableEntity): + pass + + +class Child2(fixtures.ComparableEntity): + pass + + +class Parent(fixtures.ComparableEntity): + pass + + +class Screen(object): + + def __init__(self, obj, parent=None): + self.obj = obj + self.parent = parent + + +class Foo(object): + + def __init__(self, moredata): + self.data = 'im data' + self.stuff = 'im stuff' + self.moredata = moredata + + __hash__ = object.__hash__ + + def __eq__(self, other): + return other.data == self.data and \ + other.stuff == self.stuff and \ + other.moredata == self.moredata + + +class Bar(object): + + def __init__(self, x, y): + self.x = x + self.y = y + + __hash__ = object.__hash__ + + def __eq__(self, other): + return other.__class__ is self.__class__ and \ + other.x == self.x and \ + other.y == self.y + + def __str__(self): + return "Bar(%d, %d)" % (self.x, self.y) + + +class OldSchool: + + def __init__(self, x, y): + self.x = x + self.y = y + + def __eq__(self, other): + return other.__class__ is self.__class__ and \ + other.x == self.x and \ + other.y == self.y + + +class OldSchoolWithoutCompare: + + def __init__(self, x, y): + self.x = x + self.y = y + + +class BarWithoutCompare(object): + + def __init__(self, x, y): + self.x = x + self.y = y + + def __str__(self): + return "Bar(%d, %d)" % (self.x, self.y) + + +class NotComparable(object): + + def __init__(self, data): + self.data = data + + def __hash__(self): + return id(self) + + def __eq__(self, other): + return NotImplemented + + def __ne__(self, other): + return NotImplemented + + +class BrokenComparable(object): + + def __init__(self, data): + self.data = data + + def __hash__(self): + return id(self) + + def __eq__(self, other): + raise NotImplementedError + + def __ne__(self, other): + raise NotImplementedError diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/plugin/__init__.py b/lib/python3.4/site-packages/sqlalchemy/testing/plugin/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/plugin/bootstrap.py b/lib/python3.4/site-packages/sqlalchemy/testing/plugin/bootstrap.py new file mode 100644 index 0000000..497fcb7 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/plugin/bootstrap.py @@ -0,0 +1,44 @@ +""" +Bootstrapper for nose/pytest plugins. + +The entire rationale for this system is to get the modules in plugin/ +imported without importing all of the supporting library, so that we can +set up things for testing before coverage starts. + +The rationale for all of plugin/ being *in* the supporting library in the +first place is so that the testing and plugin suite is available to other +libraries, mainly external SQLAlchemy and Alembic dialects, to make use +of the same test environment and standard suites available to +SQLAlchemy/Alembic themselves without the need to ship/install a separate +package outside of SQLAlchemy. + +NOTE: copied/adapted from SQLAlchemy master for backwards compatibility; +this should be removable when Alembic targets SQLAlchemy 1.0.0. + +""" + +import os +import sys + +bootstrap_file = locals()['bootstrap_file'] +to_bootstrap = locals()['to_bootstrap'] + + +def load_file_as_module(name): + path = os.path.join(os.path.dirname(bootstrap_file), "%s.py" % name) + if sys.version_info >= (3, 3): + from importlib import machinery + mod = machinery.SourceFileLoader(name, path).load_module() + else: + import imp + mod = imp.load_source(name, path) + return mod + +if to_bootstrap == "pytest": + sys.modules["sqla_plugin_base"] = load_file_as_module("plugin_base") + sys.modules["sqla_pytestplugin"] = load_file_as_module("pytestplugin") +elif to_bootstrap == "nose": + sys.modules["sqla_plugin_base"] = load_file_as_module("plugin_base") + sys.modules["sqla_noseplugin"] = load_file_as_module("noseplugin") +else: + raise Exception("unknown bootstrap: %s" % to_bootstrap) # noqa diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/plugin/noseplugin.py b/lib/python3.4/site-packages/sqlalchemy/testing/plugin/noseplugin.py new file mode 100644 index 0000000..9fc5848 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/plugin/noseplugin.py @@ -0,0 +1,107 @@ +# plugin/noseplugin.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Enhance nose with extra options and behaviors for running SQLAlchemy tests. + +Must be run via ./sqla_nose.py so that it is imported in the expected +way (e.g. as a package-less import). + +""" + +try: + # installed by bootstrap.py + import sqla_plugin_base as plugin_base +except ImportError: + # assume we're a package, use traditional import + from . import plugin_base + + +import os +import sys + +from nose.plugins import Plugin +import nose +fixtures = None + +py3k = sys.version_info >= (3, 0) + + +class NoseSQLAlchemy(Plugin): + enabled = True + + name = 'sqla_testing' + score = 100 + + def options(self, parser, env=os.environ): + Plugin.options(self, parser, env) + opt = parser.add_option + + def make_option(name, **kw): + callback_ = kw.pop("callback", None) + if callback_: + def wrap_(option, opt_str, value, parser): + callback_(opt_str, value, parser) + kw["callback"] = wrap_ + opt(name, **kw) + + plugin_base.setup_options(make_option) + plugin_base.read_config() + + def configure(self, options, conf): + super(NoseSQLAlchemy, self).configure(options, conf) + plugin_base.pre_begin(options) + + plugin_base.set_coverage_flag(options.enable_plugin_coverage) + + plugin_base.set_skip_test(nose.SkipTest) + + def begin(self): + global fixtures + from sqlalchemy.testing import fixtures # noqa + + plugin_base.post_begin() + + def describeTest(self, test): + return "" + + def wantFunction(self, fn): + return False + + def wantMethod(self, fn): + if py3k: + if not hasattr(fn.__self__, 'cls'): + return False + cls = fn.__self__.cls + else: + cls = fn.im_class + return plugin_base.want_method(cls, fn) + + def wantClass(self, cls): + return plugin_base.want_class(cls) + + def beforeTest(self, test): + if not hasattr(test.test, 'cls'): + return + plugin_base.before_test( + test, + test.test.cls.__module__, + test.test.cls, test.test.method.__name__) + + def afterTest(self, test): + plugin_base.after_test(test) + + def startContext(self, ctx): + if not isinstance(ctx, type) \ + or not issubclass(ctx, fixtures.TestBase): + return + plugin_base.start_test_class(ctx) + + def stopContext(self, ctx): + if not isinstance(ctx, type) \ + or not issubclass(ctx, fixtures.TestBase): + return + plugin_base.stop_test_class(ctx) diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/plugin/plugin_base.py b/lib/python3.4/site-packages/sqlalchemy/testing/plugin/plugin_base.py new file mode 100644 index 0000000..84258df --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/plugin/plugin_base.py @@ -0,0 +1,552 @@ +# plugin/plugin_base.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Testing extensions. + +this module is designed to work as a testing-framework-agnostic library, +so that we can continue to support nose and also begin adding new +functionality via py.test. + +""" + +from __future__ import absolute_import + +import sys +import re + +py3k = sys.version_info >= (3, 0) + +if py3k: + import configparser +else: + import ConfigParser as configparser + +# late imports +fixtures = None +engines = None +exclusions = None +warnings = None +profiling = None +assertions = None +requirements = None +config = None +testing = None +util = None +file_config = None + + +logging = None +include_tags = set() +exclude_tags = set() +options = None + + +def setup_options(make_option): + make_option("--log-info", action="callback", type="string", callback=_log, + help="turn on info logging for (multiple OK)") + make_option("--log-debug", action="callback", + type="string", callback=_log, + help="turn on debug logging for (multiple OK)") + make_option("--db", action="append", type="string", dest="db", + help="Use prefab database uri. Multiple OK, " + "first one is run by default.") + make_option('--dbs', action='callback', callback=_list_dbs, + help="List available prefab dbs") + make_option("--dburi", action="append", type="string", dest="dburi", + help="Database uri. Multiple OK, " + "first one is run by default.") + make_option("--dropfirst", action="store_true", dest="dropfirst", + help="Drop all tables in the target database first") + make_option("--backend-only", action="store_true", dest="backend_only", + help="Run only tests marked with __backend__") + make_option("--low-connections", action="store_true", + dest="low_connections", + help="Use a low number of distinct connections - " + "i.e. for Oracle TNS") + make_option("--reversetop", action="store_true", + dest="reversetop", default=False, + help="Use a random-ordering set implementation in the ORM " + "(helps reveal dependency issues)") + make_option("--requirements", action="callback", type="string", + callback=_requirements_opt, + help="requirements class for testing, overrides setup.cfg") + make_option("--with-cdecimal", action="store_true", + dest="cdecimal", default=False, + help="Monkeypatch the cdecimal library into Python 'decimal' " + "for all tests") + make_option("--include-tag", action="callback", callback=_include_tag, + type="string", + help="Include tests with tag ") + make_option("--exclude-tag", action="callback", callback=_exclude_tag, + type="string", + help="Exclude tests with tag ") + make_option("--write-profiles", action="store_true", + dest="write_profiles", default=False, + help="Write/update failing profiling data.") + make_option("--force-write-profiles", action="store_true", + dest="force_write_profiles", default=False, + help="Unconditionally write/update profiling data.") + + +def configure_follower(follower_ident): + """Configure required state for a follower. + + This invokes in the parent process and typically includes + database creation. + + """ + from sqlalchemy.testing import provision + provision.FOLLOWER_IDENT = follower_ident + + +def memoize_important_follower_config(dict_): + """Store important configuration we will need to send to a follower. + + This invokes in the parent process after normal config is set up. + + This is necessary as py.test seems to not be using forking, so we + start with nothing in memory, *but* it isn't running our argparse + callables, so we have to just copy all of that over. + + """ + dict_['memoized_config'] = { + 'include_tags': include_tags, + 'exclude_tags': exclude_tags + } + + +def restore_important_follower_config(dict_): + """Restore important configuration needed by a follower. + + This invokes in the follower process. + + """ + global include_tags, exclude_tags + include_tags.update(dict_['memoized_config']['include_tags']) + exclude_tags.update(dict_['memoized_config']['exclude_tags']) + + +def read_config(): + global file_config + file_config = configparser.ConfigParser() + file_config.read(['setup.cfg', 'test.cfg']) + + +def pre_begin(opt): + """things to set up early, before coverage might be setup.""" + global options + options = opt + for fn in pre_configure: + fn(options, file_config) + + +def set_coverage_flag(value): + options.has_coverage = value + +_skip_test_exception = None + + +def set_skip_test(exc): + global _skip_test_exception + _skip_test_exception = exc + + +def post_begin(): + """things to set up later, once we know coverage is running.""" + # Lazy setup of other options (post coverage) + for fn in post_configure: + fn(options, file_config) + + # late imports, has to happen after config as well + # as nose plugins like coverage + global util, fixtures, engines, exclusions, \ + assertions, warnings, profiling,\ + config, testing + from sqlalchemy import testing # noqa + from sqlalchemy.testing import fixtures, engines, exclusions # noqa + from sqlalchemy.testing import assertions, warnings, profiling # noqa + from sqlalchemy.testing import config # noqa + from sqlalchemy import util # noqa + warnings.setup_filters() + + +def _log(opt_str, value, parser): + global logging + if not logging: + import logging + logging.basicConfig() + + if opt_str.endswith('-info'): + logging.getLogger(value).setLevel(logging.INFO) + elif opt_str.endswith('-debug'): + logging.getLogger(value).setLevel(logging.DEBUG) + + +def _list_dbs(*args): + print("Available --db options (use --dburi to override)") + for macro in sorted(file_config.options('db')): + print("%20s\t%s" % (macro, file_config.get('db', macro))) + sys.exit(0) + + +def _requirements_opt(opt_str, value, parser): + _setup_requirements(value) + + +def _exclude_tag(opt_str, value, parser): + exclude_tags.add(value.replace('-', '_')) + + +def _include_tag(opt_str, value, parser): + include_tags.add(value.replace('-', '_')) + +pre_configure = [] +post_configure = [] + + +def pre(fn): + pre_configure.append(fn) + return fn + + +def post(fn): + post_configure.append(fn) + return fn + + +@pre +def _setup_options(opt, file_config): + global options + options = opt + + +@pre +def _monkeypatch_cdecimal(options, file_config): + if options.cdecimal: + import cdecimal + sys.modules['decimal'] = cdecimal + + +@post +def _init_skiptest(options, file_config): + from sqlalchemy.testing import config + + config._skip_test_exception = _skip_test_exception + + +@post +def _engine_uri(options, file_config): + from sqlalchemy.testing import config + from sqlalchemy import testing + from sqlalchemy.testing import provision + + if options.dburi: + db_urls = list(options.dburi) + else: + db_urls = [] + + if options.db: + for db_token in options.db: + for db in re.split(r'[,\s]+', db_token): + if db not in file_config.options('db'): + raise RuntimeError( + "Unknown URI specifier '%s'. " + "Specify --dbs for known uris." + % db) + else: + db_urls.append(file_config.get('db', db)) + + if not db_urls: + db_urls.append(file_config.get('db', 'default')) + + for db_url in db_urls: + cfg = provision.setup_config( + db_url, options, file_config, provision.FOLLOWER_IDENT) + + if not config._current: + cfg.set_as_current(cfg, testing) + + +@post +def _requirements(options, file_config): + + requirement_cls = file_config.get('sqla_testing', "requirement_cls") + _setup_requirements(requirement_cls) + + +def _setup_requirements(argument): + from sqlalchemy.testing import config + from sqlalchemy import testing + + if config.requirements is not None: + return + + modname, clsname = argument.split(":") + + # importlib.import_module() only introduced in 2.7, a little + # late + mod = __import__(modname) + for component in modname.split(".")[1:]: + mod = getattr(mod, component) + req_cls = getattr(mod, clsname) + + config.requirements = testing.requires = req_cls() + + +@post +def _prep_testing_database(options, file_config): + from sqlalchemy.testing import config, util + from sqlalchemy.testing.exclusions import against + from sqlalchemy import schema, inspect + + if options.dropfirst: + for cfg in config.Config.all_configs(): + e = cfg.db + inspector = inspect(e) + try: + view_names = inspector.get_view_names() + except NotImplementedError: + pass + else: + for vname in view_names: + e.execute(schema._DropView( + schema.Table(vname, schema.MetaData()) + )) + + if config.requirements.schemas.enabled_for_config(cfg): + try: + view_names = inspector.get_view_names( + schema="test_schema") + except NotImplementedError: + pass + else: + for vname in view_names: + e.execute(schema._DropView( + schema.Table(vname, schema.MetaData(), + schema="test_schema") + )) + + util.drop_all_tables(e, inspector) + + if config.requirements.schemas.enabled_for_config(cfg): + util.drop_all_tables(e, inspector, schema=cfg.test_schema) + + if against(cfg, "postgresql"): + from sqlalchemy.dialects import postgresql + for enum in inspector.get_enums("*"): + e.execute(postgresql.DropEnumType( + postgresql.ENUM( + name=enum['name'], + schema=enum['schema']))) + + +@post +def _reverse_topological(options, file_config): + if options.reversetop: + from sqlalchemy.orm.util import randomize_unitofwork + randomize_unitofwork() + + +@post +def _post_setup_options(opt, file_config): + from sqlalchemy.testing import config + config.options = options + config.file_config = file_config + + +@post +def _setup_profiling(options, file_config): + from sqlalchemy.testing import profiling + profiling._profile_stats = profiling.ProfileStatsFile( + file_config.get('sqla_testing', 'profile_file')) + + +def want_class(cls): + if not issubclass(cls, fixtures.TestBase): + return False + elif cls.__name__.startswith('_'): + return False + elif config.options.backend_only and not getattr(cls, '__backend__', + False): + return False + else: + return True + + +def want_method(cls, fn): + if not fn.__name__.startswith("test_"): + return False + elif fn.__module__ is None: + return False + elif include_tags: + return ( + hasattr(cls, '__tags__') and + exclusions.tags(cls.__tags__).include_test( + include_tags, exclude_tags) + ) or ( + hasattr(fn, '_sa_exclusion_extend') and + fn._sa_exclusion_extend.include_test( + include_tags, exclude_tags) + ) + elif exclude_tags and hasattr(cls, '__tags__'): + return exclusions.tags(cls.__tags__).include_test( + include_tags, exclude_tags) + elif exclude_tags and hasattr(fn, '_sa_exclusion_extend'): + return fn._sa_exclusion_extend.include_test(include_tags, exclude_tags) + else: + return True + + +def generate_sub_tests(cls, module): + if getattr(cls, '__backend__', False): + for cfg in _possible_configs_for_cls(cls): + name = "%s_%s_%s" % (cls.__name__, cfg.db.name, cfg.db.driver) + subcls = type( + name, + (cls, ), + { + "__only_on__": ("%s+%s" % (cfg.db.name, cfg.db.driver)), + } + ) + setattr(module, name, subcls) + yield subcls + else: + yield cls + + +def start_test_class(cls): + _do_skips(cls) + _setup_engine(cls) + + +def stop_test_class(cls): + #from sqlalchemy import inspect + #assert not inspect(testing.db).get_table_names() + engines.testing_reaper._stop_test_ctx() + if not options.low_connections: + assertions.global_cleanup_assertions() + _restore_engine() + + +def _restore_engine(): + config._current.reset(testing) + + +def _setup_engine(cls): + if getattr(cls, '__engine_options__', None): + eng = engines.testing_engine(options=cls.__engine_options__) + config._current.push_engine(eng, testing) + + +def before_test(test, test_module_name, test_class, test_name): + + # like a nose id, e.g.: + # "test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause" + name = test_class.__name__ + + suffix = "_%s_%s" % (config.db.name, config.db.driver) + if name.endswith(suffix): + name = name[0:-(len(suffix))] + + id_ = "%s.%s.%s" % (test_module_name, name, test_name) + + profiling._current_test = id_ + + +def after_test(test): + engines.testing_reaper._after_test_ctx() + + +def _possible_configs_for_cls(cls, reasons=None): + all_configs = set(config.Config.all_configs()) + + if cls.__unsupported_on__: + spec = exclusions.db_spec(*cls.__unsupported_on__) + for config_obj in list(all_configs): + if spec(config_obj): + all_configs.remove(config_obj) + + if getattr(cls, '__only_on__', None): + spec = exclusions.db_spec(*util.to_list(cls.__only_on__)) + for config_obj in list(all_configs): + if not spec(config_obj): + all_configs.remove(config_obj) + + if hasattr(cls, '__requires__'): + requirements = config.requirements + for config_obj in list(all_configs): + for requirement in cls.__requires__: + check = getattr(requirements, requirement) + + skip_reasons = check.matching_config_reasons(config_obj) + if skip_reasons: + all_configs.remove(config_obj) + if reasons is not None: + reasons.extend(skip_reasons) + break + + if hasattr(cls, '__prefer_requires__'): + non_preferred = set() + requirements = config.requirements + for config_obj in list(all_configs): + for requirement in cls.__prefer_requires__: + check = getattr(requirements, requirement) + + if not check.enabled_for_config(config_obj): + non_preferred.add(config_obj) + if all_configs.difference(non_preferred): + all_configs.difference_update(non_preferred) + + return all_configs + + +def _do_skips(cls): + reasons = [] + all_configs = _possible_configs_for_cls(cls, reasons) + + if getattr(cls, '__skip_if__', False): + for c in getattr(cls, '__skip_if__'): + if c(): + config.skip_test("'%s' skipped by %s" % ( + cls.__name__, c.__name__) + ) + + if not all_configs: + if getattr(cls, '__backend__', False): + msg = "'%s' unsupported for implementation '%s'" % ( + cls.__name__, cls.__only_on__) + else: + msg = "'%s' unsupported on any DB implementation %s%s" % ( + cls.__name__, + ", ".join( + "'%s(%s)+%s'" % ( + config_obj.db.name, + ".".join( + str(dig) for dig in + config_obj.db.dialect.server_version_info), + config_obj.db.driver + ) + for config_obj in config.Config.all_configs() + ), + ", ".join(reasons) + ) + config.skip_test(msg) + elif hasattr(cls, '__prefer_backends__'): + non_preferred = set() + spec = exclusions.db_spec(*util.to_list(cls.__prefer_backends__)) + for config_obj in all_configs: + if not spec(config_obj): + non_preferred.add(config_obj) + if all_configs.difference(non_preferred): + all_configs.difference_update(non_preferred) + + if config._current not in all_configs: + _setup_config(all_configs.pop(), cls) + + +def _setup_config(config_obj, ctx): + config._current.push(config_obj, testing) diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/plugin/pytestplugin.py b/lib/python3.4/site-packages/sqlalchemy/testing/plugin/pytestplugin.py new file mode 100644 index 0000000..5bb6b96 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/plugin/pytestplugin.py @@ -0,0 +1,181 @@ +try: + # installed by bootstrap.py + import sqla_plugin_base as plugin_base +except ImportError: + # assume we're a package, use traditional import + from . import plugin_base + +import pytest +import argparse +import inspect +import collections +import itertools + +try: + import xdist # noqa + has_xdist = True +except ImportError: + has_xdist = False + + +def pytest_addoption(parser): + group = parser.getgroup("sqlalchemy") + + def make_option(name, **kw): + callback_ = kw.pop("callback", None) + if callback_: + class CallableAction(argparse.Action): + def __call__(self, parser, namespace, + values, option_string=None): + callback_(option_string, values, parser) + kw["action"] = CallableAction + + group.addoption(name, **kw) + + plugin_base.setup_options(make_option) + plugin_base.read_config() + + +def pytest_configure(config): + if hasattr(config, "slaveinput"): + plugin_base.restore_important_follower_config(config.slaveinput) + plugin_base.configure_follower( + config.slaveinput["follower_ident"] + ) + + plugin_base.pre_begin(config.option) + + plugin_base.set_coverage_flag(bool(getattr(config.option, + "cov_source", False))) + + plugin_base.set_skip_test(pytest.skip.Exception) + + +def pytest_sessionstart(session): + plugin_base.post_begin() + +if has_xdist: + import uuid + + def pytest_configure_node(node): + # the master for each node fills slaveinput dictionary + # which pytest-xdist will transfer to the subprocess + + plugin_base.memoize_important_follower_config(node.slaveinput) + + node.slaveinput["follower_ident"] = "test_%s" % uuid.uuid4().hex[0:12] + from sqlalchemy.testing import provision + provision.create_follower_db(node.slaveinput["follower_ident"]) + + def pytest_testnodedown(node, error): + from sqlalchemy.testing import provision + provision.drop_follower_db(node.slaveinput["follower_ident"]) + + +def pytest_collection_modifyitems(session, config, items): + # look for all those classes that specify __backend__ and + # expand them out into per-database test cases. + + # this is much easier to do within pytest_pycollect_makeitem, however + # pytest is iterating through cls.__dict__ as makeitem is + # called which causes a "dictionary changed size" error on py3k. + # I'd submit a pullreq for them to turn it into a list first, but + # it's to suit the rather odd use case here which is that we are adding + # new classes to a module on the fly. + + rebuilt_items = collections.defaultdict(list) + items[:] = [ + item for item in + items if isinstance(item.parent, pytest.Instance) + and not item.parent.parent.name.startswith("_")] + test_classes = set(item.parent for item in items) + for test_class in test_classes: + for sub_cls in plugin_base.generate_sub_tests( + test_class.cls, test_class.parent.module): + if sub_cls is not test_class.cls: + list_ = rebuilt_items[test_class.cls] + + for inst in pytest.Class( + sub_cls.__name__, + parent=test_class.parent.parent).collect(): + list_.extend(inst.collect()) + + newitems = [] + for item in items: + if item.parent.cls in rebuilt_items: + newitems.extend(rebuilt_items[item.parent.cls]) + rebuilt_items[item.parent.cls][:] = [] + else: + newitems.append(item) + + # seems like the functions attached to a test class aren't sorted already? + # is that true and why's that? (when using unittest, they're sorted) + items[:] = sorted(newitems, key=lambda item: ( + item.parent.parent.parent.name, + item.parent.parent.name, + item.name + )) + + +def pytest_pycollect_makeitem(collector, name, obj): + if inspect.isclass(obj) and plugin_base.want_class(obj): + return pytest.Class(name, parent=collector) + elif inspect.isfunction(obj) and \ + isinstance(collector, pytest.Instance) and \ + plugin_base.want_method(collector.cls, obj): + return pytest.Function(name, parent=collector) + else: + return [] + +_current_class = None + + +def pytest_runtest_setup(item): + # here we seem to get called only based on what we collected + # in pytest_collection_modifyitems. So to do class-based stuff + # we have to tear that out. + global _current_class + + if not isinstance(item, pytest.Function): + return + + # ... so we're doing a little dance here to figure it out... + if _current_class is None: + class_setup(item.parent.parent) + _current_class = item.parent.parent + + # this is needed for the class-level, to ensure that the + # teardown runs after the class is completed with its own + # class-level teardown... + def finalize(): + global _current_class + class_teardown(item.parent.parent) + _current_class = None + item.parent.parent.addfinalizer(finalize) + + test_setup(item) + + +def pytest_runtest_teardown(item): + # ...but this works better as the hook here rather than + # using a finalizer, as the finalizer seems to get in the way + # of the test reporting failures correctly (you get a bunch of + # py.test assertion stuff instead) + test_teardown(item) + + +def test_setup(item): + plugin_base.before_test(item, item.parent.module.__name__, + item.parent.cls, item.name) + + +def test_teardown(item): + plugin_base.after_test(item) + + +def class_setup(item): + plugin_base.start_test_class(item.cls) + + +def class_teardown(item): + plugin_base.stop_test_class(item.cls) diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/profiling.py b/lib/python3.4/site-packages/sqlalchemy/testing/profiling.py new file mode 100644 index 0000000..a88cd21 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/profiling.py @@ -0,0 +1,265 @@ +# testing/profiling.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Profiling support for unit and performance tests. + +These are special purpose profiling methods which operate +in a more fine-grained way than nose's profiling plugin. + +""" + +import os +import sys +from .util import gc_collect +from . import config +import pstats +import collections +import contextlib + +try: + import cProfile +except ImportError: + cProfile = None +from ..util import jython, pypy, win32, update_wrapper + +_current_test = None + +# ProfileStatsFile instance, set up in plugin_base +_profile_stats = None + + +class ProfileStatsFile(object): + """"Store per-platform/fn profiling results in a file. + + We're still targeting Py2.5, 2.4 on 0.7 with no dependencies, + so no json lib :( need to roll something silly + + """ + + def __init__(self, filename): + self.force_write = ( + config.options is not None and + config.options.force_write_profiles + ) + self.write = self.force_write or ( + config.options is not None and + config.options.write_profiles + ) + self.fname = os.path.abspath(filename) + self.short_fname = os.path.split(self.fname)[-1] + self.data = collections.defaultdict( + lambda: collections.defaultdict(dict)) + self._read() + if self.write: + # rewrite for the case where features changed, + # etc. + self._write() + + @property + def platform_key(self): + + dbapi_key = config.db.name + "_" + config.db.driver + + # keep it at 2.7, 3.1, 3.2, etc. for now. + py_version = '.'.join([str(v) for v in sys.version_info[0:2]]) + + platform_tokens = [py_version] + platform_tokens.append(dbapi_key) + if jython: + platform_tokens.append("jython") + if pypy: + platform_tokens.append("pypy") + if win32: + platform_tokens.append("win") + platform_tokens.append( + "nativeunicode" + if config.db.dialect.convert_unicode + else "dbapiunicode" + ) + _has_cext = config.requirements._has_cextensions() + platform_tokens.append(_has_cext and "cextensions" or "nocextensions") + return "_".join(platform_tokens) + + def has_stats(self): + test_key = _current_test + return ( + test_key in self.data and + self.platform_key in self.data[test_key] + ) + + def result(self, callcount): + test_key = _current_test + per_fn = self.data[test_key] + per_platform = per_fn[self.platform_key] + + if 'counts' not in per_platform: + per_platform['counts'] = counts = [] + else: + counts = per_platform['counts'] + + if 'current_count' not in per_platform: + per_platform['current_count'] = current_count = 0 + else: + current_count = per_platform['current_count'] + + has_count = len(counts) > current_count + + if not has_count: + counts.append(callcount) + if self.write: + self._write() + result = None + else: + result = per_platform['lineno'], counts[current_count] + per_platform['current_count'] += 1 + return result + + def replace(self, callcount): + test_key = _current_test + per_fn = self.data[test_key] + per_platform = per_fn[self.platform_key] + counts = per_platform['counts'] + current_count = per_platform['current_count'] + if current_count < len(counts): + counts[current_count - 1] = callcount + else: + counts[-1] = callcount + if self.write: + self._write() + + def _header(self): + return ( + "# %s\n" + "# This file is written out on a per-environment basis.\n" + "# For each test in aaa_profiling, the corresponding " + "function and \n" + "# environment is located within this file. " + "If it doesn't exist,\n" + "# the test is skipped.\n" + "# If a callcount does exist, it is compared " + "to what we received. \n" + "# assertions are raised if the counts do not match.\n" + "# \n" + "# To add a new callcount test, apply the function_call_count \n" + "# decorator and re-run the tests using the --write-profiles \n" + "# option - this file will be rewritten including the new count.\n" + "# \n" + ) % (self.fname) + + def _read(self): + try: + profile_f = open(self.fname) + except IOError: + return + for lineno, line in enumerate(profile_f): + line = line.strip() + if not line or line.startswith("#"): + continue + + test_key, platform_key, counts = line.split() + per_fn = self.data[test_key] + per_platform = per_fn[platform_key] + c = [int(count) for count in counts.split(",")] + per_platform['counts'] = c + per_platform['lineno'] = lineno + 1 + per_platform['current_count'] = 0 + profile_f.close() + + def _write(self): + print(("Writing profile file %s" % self.fname)) + profile_f = open(self.fname, "w") + profile_f.write(self._header()) + for test_key in sorted(self.data): + + per_fn = self.data[test_key] + profile_f.write("\n# TEST: %s\n\n" % test_key) + for platform_key in sorted(per_fn): + per_platform = per_fn[platform_key] + c = ",".join(str(count) for count in per_platform['counts']) + profile_f.write("%s %s %s\n" % (test_key, platform_key, c)) + profile_f.close() + + +def function_call_count(variance=0.05): + """Assert a target for a test case's function call count. + + The main purpose of this assertion is to detect changes in + callcounts for various functions - the actual number is not as important. + Callcounts are stored in a file keyed to Python version and OS platform + information. This file is generated automatically for new tests, + and versioned so that unexpected changes in callcounts will be detected. + + """ + + def decorate(fn): + def wrap(*args, **kw): + with count_functions(variance=variance): + return fn(*args, **kw) + return update_wrapper(wrap, fn) + return decorate + + +@contextlib.contextmanager +def count_functions(variance=0.05): + if cProfile is None: + raise SkipTest("cProfile is not installed") + + if not _profile_stats.has_stats() and not _profile_stats.write: + config.skip_test( + "No profiling stats available on this " + "platform for this function. Run tests with " + "--write-profiles to add statistics to %s for " + "this platform." % _profile_stats.short_fname) + + gc_collect() + + pr = cProfile.Profile() + pr.enable() + #began = time.time() + yield + #ended = time.time() + pr.disable() + + #s = compat.StringIO() + stats = pstats.Stats(pr, stream=sys.stdout) + + #timespent = ended - began + callcount = stats.total_calls + + expected = _profile_stats.result(callcount) + + if expected is None: + expected_count = None + else: + line_no, expected_count = expected + + print(("Pstats calls: %d Expected %s" % ( + callcount, + expected_count + ) + )) + stats.sort_stats("cumulative") + stats.print_stats() + + if expected_count: + deviance = int(callcount * variance) + failed = abs(callcount - expected_count) > deviance + + if failed or _profile_stats.force_write: + if _profile_stats.write: + _profile_stats.replace(callcount) + else: + raise AssertionError( + "Adjusted function call count %s not within %s%% " + "of expected %s, platform %s. Rerun with " + "--write-profiles to " + "regenerate this callcount." + % ( + callcount, (variance * 100), + expected_count, _profile_stats.platform_key)) + + diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/provision.py b/lib/python3.4/site-packages/sqlalchemy/testing/provision.py new file mode 100644 index 0000000..2797411 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/provision.py @@ -0,0 +1,317 @@ +from sqlalchemy.engine import url as sa_url +from sqlalchemy import text +from sqlalchemy import exc +from sqlalchemy.util import compat +from . import config, engines +import time +import logging +import os +log = logging.getLogger(__name__) + +FOLLOWER_IDENT = None + + +class register(object): + def __init__(self): + self.fns = {} + + @classmethod + def init(cls, fn): + return register().for_db("*")(fn) + + def for_db(self, dbname): + def decorate(fn): + self.fns[dbname] = fn + return self + return decorate + + def __call__(self, cfg, *arg): + if isinstance(cfg, compat.string_types): + url = sa_url.make_url(cfg) + elif isinstance(cfg, sa_url.URL): + url = cfg + else: + url = cfg.db.url + backend = url.get_backend_name() + if backend in self.fns: + return self.fns[backend](cfg, *arg) + else: + return self.fns['*'](cfg, *arg) + + +def create_follower_db(follower_ident): + + for cfg in _configs_for_db_operation(): + _create_db(cfg, cfg.db, follower_ident) + + +def configure_follower(follower_ident): + for cfg in config.Config.all_configs(): + _configure_follower(cfg, follower_ident) + + +def setup_config(db_url, options, file_config, follower_ident): + if follower_ident: + db_url = _follower_url_from_main(db_url, follower_ident) + db_opts = {} + _update_db_opts(db_url, db_opts) + eng = engines.testing_engine(db_url, db_opts) + _post_configure_engine(db_url, eng, follower_ident) + eng.connect().close() + cfg = config.Config.register(eng, db_opts, options, file_config) + if follower_ident: + _configure_follower(cfg, follower_ident) + return cfg + + +def drop_follower_db(follower_ident): + for cfg in _configs_for_db_operation(): + _drop_db(cfg, cfg.db, follower_ident) + + +def _configs_for_db_operation(): + hosts = set() + + for cfg in config.Config.all_configs(): + cfg.db.dispose() + + for cfg in config.Config.all_configs(): + url = cfg.db.url + backend = url.get_backend_name() + host_conf = ( + backend, + url.username, url.host, url.database) + + if host_conf not in hosts: + yield cfg + hosts.add(host_conf) + + for cfg in config.Config.all_configs(): + cfg.db.dispose() + + +@register.init +def _create_db(cfg, eng, ident): + raise NotImplementedError("no DB creation routine for cfg: %s" % eng.url) + + +@register.init +def _drop_db(cfg, eng, ident): + raise NotImplementedError("no DB drop routine for cfg: %s" % eng.url) + + +@register.init +def _update_db_opts(db_url, db_opts): + pass + + +@register.init +def _configure_follower(cfg, ident): + pass + + +@register.init +def _post_configure_engine(url, engine, follower_ident): + pass + + +@register.init +def _follower_url_from_main(url, ident): + url = sa_url.make_url(url) + url.database = ident + return url + + +@_update_db_opts.for_db("mssql") +def _mssql_update_db_opts(db_url, db_opts): + db_opts['legacy_schema_aliasing'] = False + + +@_follower_url_from_main.for_db("sqlite") +def _sqlite_follower_url_from_main(url, ident): + url = sa_url.make_url(url) + if not url.database or url.database == ':memory:': + return url + else: + return sa_url.make_url("sqlite:///%s.db" % ident) + + +@_post_configure_engine.for_db("sqlite") +def _sqlite_post_configure_engine(url, engine, follower_ident): + from sqlalchemy import event + + @event.listens_for(engine, "connect") + def connect(dbapi_connection, connection_record): + # use file DBs in all cases, memory acts kind of strangely + # as an attached + if not follower_ident: + dbapi_connection.execute( + 'ATTACH DATABASE "test_schema.db" AS test_schema') + else: + dbapi_connection.execute( + 'ATTACH DATABASE "%s_test_schema.db" AS test_schema' + % follower_ident) + + +@_create_db.for_db("postgresql") +def _pg_create_db(cfg, eng, ident): + with eng.connect().execution_options( + isolation_level="AUTOCOMMIT") as conn: + try: + _pg_drop_db(cfg, conn, ident) + except Exception: + pass + currentdb = conn.scalar("select current_database()") + for attempt in range(3): + try: + conn.execute( + "CREATE DATABASE %s TEMPLATE %s" % (ident, currentdb)) + except exc.OperationalError as err: + if attempt != 2 and "accessed by other users" in str(err): + time.sleep(.2) + continue + else: + raise + else: + break + + +@_create_db.for_db("mysql") +def _mysql_create_db(cfg, eng, ident): + with eng.connect() as conn: + try: + _mysql_drop_db(cfg, conn, ident) + except Exception: + pass + conn.execute("CREATE DATABASE %s" % ident) + conn.execute("CREATE DATABASE %s_test_schema" % ident) + conn.execute("CREATE DATABASE %s_test_schema_2" % ident) + + +@_configure_follower.for_db("mysql") +def _mysql_configure_follower(config, ident): + config.test_schema = "%s_test_schema" % ident + config.test_schema_2 = "%s_test_schema_2" % ident + + +@_create_db.for_db("sqlite") +def _sqlite_create_db(cfg, eng, ident): + pass + + +@_drop_db.for_db("postgresql") +def _pg_drop_db(cfg, eng, ident): + with eng.connect().execution_options( + isolation_level="AUTOCOMMIT") as conn: + conn.execute( + text( + "select pg_terminate_backend(pid) from pg_stat_activity " + "where usename=current_user and pid != pg_backend_pid() " + "and datname=:dname" + ), dname=ident) + conn.execute("DROP DATABASE %s" % ident) + + +@_drop_db.for_db("sqlite") +def _sqlite_drop_db(cfg, eng, ident): + if ident: + os.remove("%s_test_schema.db" % ident) + else: + os.remove("%s.db" % ident) + + +@_drop_db.for_db("mysql") +def _mysql_drop_db(cfg, eng, ident): + with eng.connect() as conn: + try: + conn.execute("DROP DATABASE %s_test_schema" % ident) + except Exception: + pass + try: + conn.execute("DROP DATABASE %s_test_schema_2" % ident) + except Exception: + pass + try: + conn.execute("DROP DATABASE %s" % ident) + except Exception: + pass + + +@_create_db.for_db("oracle") +def _oracle_create_db(cfg, eng, ident): + # NOTE: make sure you've run "ALTER DATABASE default tablespace users" or + # similar, so that the default tablespace is not "system"; reflection will + # fail otherwise + with eng.connect() as conn: + conn.execute("create user %s identified by xe" % ident) + conn.execute("create user %s_ts1 identified by xe" % ident) + conn.execute("create user %s_ts2 identified by xe" % ident) + conn.execute("grant dba to %s" % (ident, )) + conn.execute("grant unlimited tablespace to %s" % ident) + conn.execute("grant unlimited tablespace to %s_ts1" % ident) + conn.execute("grant unlimited tablespace to %s_ts2" % ident) + +@_configure_follower.for_db("oracle") +def _oracle_configure_follower(config, ident): + config.test_schema = "%s_ts1" % ident + config.test_schema_2 = "%s_ts2" % ident + + +def _ora_drop_ignore(conn, dbname): + try: + conn.execute("drop user %s cascade" % dbname) + log.info("Reaped db: %s" % dbname) + return True + except exc.DatabaseError as err: + log.warn("couldn't drop db: %s" % err) + return False + + +@_drop_db.for_db("oracle") +def _oracle_drop_db(cfg, eng, ident): + with eng.connect() as conn: + # cx_Oracle seems to occasionally leak open connections when a large + # suite it run, even if we confirm we have zero references to + # connection objects. + # while there is a "kill session" command in Oracle, + # it unfortunately does not release the connection sufficiently. + _ora_drop_ignore(conn, ident) + _ora_drop_ignore(conn, "%s_ts1" % ident) + _ora_drop_ignore(conn, "%s_ts2" % ident) + + +def reap_oracle_dbs(eng): + log.info("Reaping Oracle dbs...") + with eng.connect() as conn: + to_reap = conn.execute( + "select u.username from all_users u where username " + "like 'TEST_%' and not exists (select username " + "from v$session where username=u.username)") + all_names = set([username.lower() for (username, ) in to_reap]) + to_drop = set() + for name in all_names: + if name.endswith("_ts1") or name.endswith("_ts2"): + continue + else: + to_drop.add(name) + if "%s_ts1" % name in all_names: + to_drop.add("%s_ts1" % name) + if "%s_ts2" % name in all_names: + to_drop.add("%s_ts2" % name) + + dropped = total = 0 + for total, username in enumerate(to_drop, 1): + if _ora_drop_ignore(conn, username): + dropped += 1 + log.info( + "Dropped %d out of %d stale databases detected", dropped, total) + + +@_follower_url_from_main.for_db("oracle") +def _oracle_follower_url_from_main(url, ident): + url = sa_url.make_url(url) + url.username = ident + url.password = 'xe' + return url + + diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/replay_fixture.py b/lib/python3.4/site-packages/sqlalchemy/testing/replay_fixture.py new file mode 100644 index 0000000..b50f52e --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/replay_fixture.py @@ -0,0 +1,172 @@ +from . import fixtures +from . import profiling +from .. import util +import types +from collections import deque +import contextlib +from . import config +from sqlalchemy import MetaData +from sqlalchemy import create_engine +from sqlalchemy.orm import Session + + +class ReplayFixtureTest(fixtures.TestBase): + + @contextlib.contextmanager + def _dummy_ctx(self, *arg, **kw): + yield + + def test_invocation(self): + + dbapi_session = ReplayableSession() + creator = config.db.pool._creator + recorder = lambda: dbapi_session.recorder(creator()) + engine = create_engine( + config.db.url, creator=recorder, + use_native_hstore=False) + self.metadata = MetaData(engine) + self.engine = engine + self.session = Session(engine) + + self.setup_engine() + try: + self._run_steps(ctx=self._dummy_ctx) + finally: + self.teardown_engine() + engine.dispose() + + player = lambda: dbapi_session.player() + engine = create_engine( + config.db.url, creator=player, + use_native_hstore=False) + + self.metadata = MetaData(engine) + self.engine = engine + self.session = Session(engine) + + self.setup_engine() + try: + self._run_steps(ctx=profiling.count_functions) + finally: + self.session.close() + engine.dispose() + + def setup_engine(self): + pass + + def teardown_engine(self): + pass + + def _run_steps(self, ctx): + raise NotImplementedError() + + +class ReplayableSession(object): + """A simple record/playback tool. + + This is *not* a mock testing class. It only records a session for later + playback and makes no assertions on call consistency whatsoever. It's + unlikely to be suitable for anything other than DB-API recording. + + """ + + Callable = object() + NoAttribute = object() + + if util.py2k: + Natives = set([getattr(types, t) + for t in dir(types) if not t.startswith('_')]).\ + difference([getattr(types, t) + for t in ('FunctionType', 'BuiltinFunctionType', + 'MethodType', 'BuiltinMethodType', + 'LambdaType', 'UnboundMethodType',)]) + else: + Natives = set([getattr(types, t) + for t in dir(types) if not t.startswith('_')]).\ + union([type(t) if not isinstance(t, type) + else t for t in __builtins__.values()]).\ + difference([getattr(types, t) + for t in ('FunctionType', 'BuiltinFunctionType', + 'MethodType', 'BuiltinMethodType', + 'LambdaType', )]) + + def __init__(self): + self.buffer = deque() + + def recorder(self, base): + return self.Recorder(self.buffer, base) + + def player(self): + return self.Player(self.buffer) + + class Recorder(object): + def __init__(self, buffer, subject): + self._buffer = buffer + self._subject = subject + + def __call__(self, *args, **kw): + subject, buffer = [object.__getattribute__(self, x) + for x in ('_subject', '_buffer')] + + result = subject(*args, **kw) + if type(result) not in ReplayableSession.Natives: + buffer.append(ReplayableSession.Callable) + return type(self)(buffer, result) + else: + buffer.append(result) + return result + + @property + def _sqla_unwrap(self): + return self._subject + + def __getattribute__(self, key): + try: + return object.__getattribute__(self, key) + except AttributeError: + pass + + subject, buffer = [object.__getattribute__(self, x) + for x in ('_subject', '_buffer')] + try: + result = type(subject).__getattribute__(subject, key) + except AttributeError: + buffer.append(ReplayableSession.NoAttribute) + raise + else: + if type(result) not in ReplayableSession.Natives: + buffer.append(ReplayableSession.Callable) + return type(self)(buffer, result) + else: + buffer.append(result) + return result + + class Player(object): + def __init__(self, buffer): + self._buffer = buffer + + def __call__(self, *args, **kw): + buffer = object.__getattribute__(self, '_buffer') + result = buffer.popleft() + if result is ReplayableSession.Callable: + return self + else: + return result + + @property + def _sqla_unwrap(self): + return None + + def __getattribute__(self, key): + try: + return object.__getattribute__(self, key) + except AttributeError: + pass + buffer = object.__getattribute__(self, '_buffer') + result = buffer.popleft() + if result is ReplayableSession.Callable: + return self + elif result is ReplayableSession.NoAttribute: + raise AttributeError(key) + else: + return result diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/requirements.py b/lib/python3.4/site-packages/sqlalchemy/testing/requirements.py new file mode 100644 index 0000000..5d31c4d --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/requirements.py @@ -0,0 +1,709 @@ +# testing/requirements.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Global database feature support policy. + +Provides decorators to mark tests requiring specific feature support from the +target database. + +External dialect test suites should subclass SuiteRequirements +to provide specific inclusion/exclusions. + +""" + +from . import exclusions +from .. import util + + +class Requirements(object): + pass + + +class SuiteRequirements(Requirements): + + @property + def create_table(self): + """target platform can emit basic CreateTable DDL.""" + + return exclusions.open() + + @property + def drop_table(self): + """target platform can emit basic DropTable DDL.""" + + return exclusions.open() + + @property + def foreign_keys(self): + """Target database must support foreign keys.""" + + return exclusions.open() + + @property + def on_update_cascade(self): + """"target database must support ON UPDATE..CASCADE behavior in + foreign keys.""" + + return exclusions.open() + + @property + def non_updating_cascade(self): + """target database must *not* support ON UPDATE..CASCADE behavior in + foreign keys.""" + return exclusions.closed() + + @property + def deferrable_fks(self): + return exclusions.closed() + + @property + def on_update_or_deferrable_fks(self): + # TODO: exclusions should be composable, + # somehow only_if([x, y]) isn't working here, negation/conjunctions + # getting confused. + return exclusions.only_if( + lambda: self.on_update_cascade.enabled or + self.deferrable_fks.enabled + ) + + @property + def self_referential_foreign_keys(self): + """Target database must support self-referential foreign keys.""" + + return exclusions.open() + + @property + def foreign_key_ddl(self): + """Target database must support the DDL phrases for FOREIGN KEY.""" + + return exclusions.open() + + @property + def named_constraints(self): + """target database must support names for constraints.""" + + return exclusions.open() + + @property + def subqueries(self): + """Target database must support subqueries.""" + + return exclusions.open() + + @property + def offset(self): + """target database can render OFFSET, or an equivalent, in a + SELECT. + """ + + return exclusions.open() + + @property + def bound_limit_offset(self): + """target database can render LIMIT and/or OFFSET using a bound + parameter + """ + + return exclusions.open() + + @property + def boolean_col_expressions(self): + """Target database must support boolean expressions as columns""" + + return exclusions.closed() + + @property + def nullsordering(self): + """Target backends that support nulls ordering.""" + + return exclusions.closed() + + @property + def standalone_binds(self): + """target database/driver supports bound parameters as column expressions + without being in the context of a typed column. + + """ + return exclusions.closed() + + @property + def intersect(self): + """Target database must support INTERSECT or equivalent.""" + return exclusions.closed() + + @property + def except_(self): + """Target database must support EXCEPT or equivalent (i.e. MINUS).""" + return exclusions.closed() + + @property + def window_functions(self): + """Target database must support window functions.""" + return exclusions.closed() + + @property + def autoincrement_insert(self): + """target platform generates new surrogate integer primary key values + when insert() is executed, excluding the pk column.""" + + return exclusions.open() + + @property + def fetch_rows_post_commit(self): + """target platform will allow cursor.fetchone() to proceed after a + COMMIT. + + Typically this refers to an INSERT statement with RETURNING which + is invoked within "autocommit". If the row can be returned + after the autocommit, then this rule can be open. + + """ + + return exclusions.open() + + @property + def empty_inserts(self): + """target platform supports INSERT with no values, i.e. + INSERT DEFAULT VALUES or equivalent.""" + + return exclusions.only_if( + lambda config: config.db.dialect.supports_empty_insert or + config.db.dialect.supports_default_values, + "empty inserts not supported" + ) + + @property + def insert_from_select(self): + """target platform supports INSERT from a SELECT.""" + + return exclusions.open() + + @property + def returning(self): + """target platform supports RETURNING.""" + + return exclusions.only_if( + lambda config: config.db.dialect.implicit_returning, + "%(database)s %(does_support)s 'returning'" + ) + + @property + def duplicate_names_in_cursor_description(self): + """target platform supports a SELECT statement that has + the same name repeated more than once in the columns list.""" + + return exclusions.open() + + @property + def denormalized_names(self): + """Target database must have 'denormalized', i.e. + UPPERCASE as case insensitive names.""" + + return exclusions.skip_if( + lambda config: not config.db.dialect.requires_name_normalize, + "Backend does not require denormalized names." + ) + + @property + def multivalues_inserts(self): + """target database must support multiple VALUES clauses in an + INSERT statement.""" + + return exclusions.skip_if( + lambda config: not config.db.dialect.supports_multivalues_insert, + "Backend does not support multirow inserts." + ) + + @property + def implements_get_lastrowid(self): + """"target dialect implements the executioncontext.get_lastrowid() + method without reliance on RETURNING. + + """ + return exclusions.open() + + @property + def emulated_lastrowid(self): + """"target dialect retrieves cursor.lastrowid, or fetches + from a database-side function after an insert() construct executes, + within the get_lastrowid() method. + + Only dialects that "pre-execute", or need RETURNING to get last + inserted id, would return closed/fail/skip for this. + + """ + return exclusions.closed() + + @property + def dbapi_lastrowid(self): + """"target platform includes a 'lastrowid' accessor on the DBAPI + cursor object. + + """ + return exclusions.closed() + + @property + def views(self): + """Target database must support VIEWs.""" + + return exclusions.closed() + + @property + def schemas(self): + """Target database must support external schemas, and have one + named 'test_schema'.""" + + return exclusions.closed() + + @property + def sequences(self): + """Target database must support SEQUENCEs.""" + + return exclusions.only_if([ + lambda config: config.db.dialect.supports_sequences + ], "no sequence support") + + @property + def sequences_optional(self): + """Target database supports sequences, but also optionally + as a means of generating new PK values.""" + + return exclusions.only_if([ + lambda config: config.db.dialect.supports_sequences and + config.db.dialect.sequences_optional + ], "no sequence support, or sequences not optional") + + @property + def reflects_pk_names(self): + return exclusions.closed() + + @property + def table_reflection(self): + return exclusions.open() + + @property + def view_column_reflection(self): + """target database must support retrieval of the columns in a view, + similarly to how a table is inspected. + + This does not include the full CREATE VIEW definition. + + """ + return self.views + + @property + def view_reflection(self): + """target database must support inspection of the full CREATE VIEW definition. + """ + return self.views + + @property + def schema_reflection(self): + return self.schemas + + @property + def primary_key_constraint_reflection(self): + return exclusions.open() + + @property + def foreign_key_constraint_reflection(self): + return exclusions.open() + + @property + def temp_table_reflection(self): + return exclusions.open() + + @property + def temp_table_names(self): + """target dialect supports listing of temporary table names""" + return exclusions.closed() + + @property + def temporary_tables(self): + """target database supports temporary tables""" + return exclusions.open() + + @property + def temporary_views(self): + """target database supports temporary views""" + return exclusions.closed() + + @property + def index_reflection(self): + return exclusions.open() + + @property + def unique_constraint_reflection(self): + """target dialect supports reflection of unique constraints""" + return exclusions.open() + + @property + def duplicate_key_raises_integrity_error(self): + """target dialect raises IntegrityError when reporting an INSERT + with a primary key violation. (hint: it should) + + """ + return exclusions.open() + + @property + def unbounded_varchar(self): + """Target database must support VARCHAR with no length""" + + return exclusions.open() + + @property + def unicode_data(self): + """Target database/dialect must support Python unicode objects with + non-ASCII characters represented, delivered as bound parameters + as well as in result rows. + + """ + return exclusions.open() + + @property + def unicode_ddl(self): + """Target driver must support some degree of non-ascii symbol + names. + """ + return exclusions.closed() + + @property + def datetime_literals(self): + """target dialect supports rendering of a date, time, or datetime as a + literal string, e.g. via the TypeEngine.literal_processor() method. + + """ + + return exclusions.closed() + + @property + def datetime(self): + """target dialect supports representation of Python + datetime.datetime() objects.""" + + return exclusions.open() + + @property + def datetime_microseconds(self): + """target dialect supports representation of Python + datetime.datetime() with microsecond objects.""" + + return exclusions.open() + + @property + def datetime_historic(self): + """target dialect supports representation of Python + datetime.datetime() objects with historic (pre 1970) values.""" + + return exclusions.closed() + + @property + def date(self): + """target dialect supports representation of Python + datetime.date() objects.""" + + return exclusions.open() + + @property + def date_coerces_from_datetime(self): + """target dialect accepts a datetime object as the target + of a date column.""" + + return exclusions.open() + + @property + def date_historic(self): + """target dialect supports representation of Python + datetime.datetime() objects with historic (pre 1970) values.""" + + return exclusions.closed() + + @property + def time(self): + """target dialect supports representation of Python + datetime.time() objects.""" + + return exclusions.open() + + @property + def time_microseconds(self): + """target dialect supports representation of Python + datetime.time() with microsecond objects.""" + + return exclusions.open() + + @property + def binary_comparisons(self): + """target database/driver can allow BLOB/BINARY fields to be compared + against a bound parameter value. + """ + + return exclusions.open() + + @property + def binary_literals(self): + """target backend supports simple binary literals, e.g. an + expression like:: + + SELECT CAST('foo' AS BINARY) + + Where ``BINARY`` is the type emitted from :class:`.LargeBinary`, + e.g. it could be ``BLOB`` or similar. + + Basically fails on Oracle. + + """ + + return exclusions.open() + + @property + def precision_numerics_general(self): + """target backend has general support for moderately high-precision + numerics.""" + return exclusions.open() + + @property + def precision_numerics_enotation_small(self): + """target backend supports Decimal() objects using E notation + to represent very small values.""" + return exclusions.closed() + + @property + def precision_numerics_enotation_large(self): + """target backend supports Decimal() objects using E notation + to represent very large values.""" + return exclusions.closed() + + @property + def precision_numerics_many_significant_digits(self): + """target backend supports values with many digits on both sides, + such as 319438950232418390.273596, 87673.594069654243 + + """ + return exclusions.closed() + + @property + def precision_numerics_retains_significant_digits(self): + """A precision numeric type will return empty significant digits, + i.e. a value such as 10.000 will come back in Decimal form with + the .000 maintained.""" + + return exclusions.closed() + + @property + def precision_generic_float_type(self): + """target backend will return native floating point numbers with at + least seven decimal places when using the generic Float type. + + """ + return exclusions.open() + + @property + def floats_to_four_decimals(self): + """target backend can return a floating-point number with four + significant digits (such as 15.7563) accurately + (i.e. without FP inaccuracies, such as 15.75629997253418). + + """ + return exclusions.open() + + @property + def fetch_null_from_numeric(self): + """target backend doesn't crash when you try to select a NUMERIC + value that has a value of NULL. + + Added to support Pyodbc bug #351. + """ + + return exclusions.open() + + @property + def text_type(self): + """Target database must support an unbounded Text() " + "type such as TEXT or CLOB""" + + return exclusions.open() + + @property + def empty_strings_varchar(self): + """target database can persist/return an empty string with a + varchar. + + """ + return exclusions.open() + + @property + def empty_strings_text(self): + """target database can persist/return an empty string with an + unbounded text.""" + + return exclusions.open() + + @property + def selectone(self): + """target driver must support the literal statement 'select 1'""" + return exclusions.open() + + @property + def savepoints(self): + """Target database must support savepoints.""" + + return exclusions.closed() + + @property + def two_phase_transactions(self): + """Target database must support two-phase transactions.""" + + return exclusions.closed() + + @property + def update_from(self): + """Target must support UPDATE..FROM syntax""" + return exclusions.closed() + + @property + def update_where_target_in_subquery(self): + """Target must support UPDATE where the same table is present in a + subquery in the WHERE clause. + + This is an ANSI-standard syntax that apparently MySQL can't handle, + such as: + + UPDATE documents SET flag=1 WHERE documents.title IN + (SELECT max(documents.title) AS title + FROM documents GROUP BY documents.user_id + ) + """ + return exclusions.open() + + @property + def mod_operator_as_percent_sign(self): + """target database must use a plain percent '%' as the 'modulus' + operator.""" + return exclusions.closed() + + @property + def percent_schema_names(self): + """target backend supports weird identifiers with percent signs + in them, e.g. 'some % column'. + + this is a very weird use case but often has problems because of + DBAPIs that use python formatting. It's not a critical use + case either. + + """ + return exclusions.closed() + + @property + def order_by_label_with_expression(self): + """target backend supports ORDER BY a column label within an + expression. + + Basically this:: + + select data as foo from test order by foo || 'bar' + + Lots of databases including Postgresql don't support this, + so this is off by default. + + """ + return exclusions.closed() + + @property + def unicode_connections(self): + """Target driver must support non-ASCII characters being passed at + all. + """ + return exclusions.open() + + @property + def graceful_disconnects(self): + """Target driver must raise a DBAPI-level exception, such as + InterfaceError, when the underlying connection has been closed + and the execute() method is called. + """ + return exclusions.open() + + @property + def skip_mysql_on_windows(self): + """Catchall for a large variety of MySQL on Windows failures""" + return exclusions.open() + + @property + def ad_hoc_engines(self): + """Test environment must allow ad-hoc engine/connection creation. + + DBs that scale poorly for many connections, even when closed, i.e. + Oracle, may use the "--low-connections" option which flags this + requirement as not present. + + """ + return exclusions.skip_if( + lambda config: config.options.low_connections) + + @property + def timing_intensive(self): + return exclusions.requires_tag("timing_intensive") + + @property + def memory_intensive(self): + return exclusions.requires_tag("memory_intensive") + + @property + def threading_with_mock(self): + """Mark tests that use threading and mock at the same time - stability + issues have been observed with coverage + python 3.3 + + """ + return exclusions.skip_if( + lambda config: util.py3k and config.options.has_coverage, + "Stability issues with coverage + py3k" + ) + + @property + def no_coverage(self): + """Test should be skipped if coverage is enabled. + + This is to block tests that exercise libraries that seem to be + sensitive to coverage, such as Postgresql notice logging. + + """ + return exclusions.skip_if( + lambda config: config.options.has_coverage, + "Issues observed when coverage is enabled" + ) + + def _has_mysql_on_windows(self, config): + return False + + def _has_mysql_fully_case_sensitive(self, config): + return False + + @property + def sqlite(self): + return exclusions.skip_if(lambda: not self._has_sqlite()) + + @property + def cextensions(self): + return exclusions.skip_if( + lambda: not self._has_cextensions(), "C extensions not installed" + ) + + def _has_sqlite(self): + from sqlalchemy import create_engine + try: + create_engine('sqlite://') + return True + except ImportError: + return False + + def _has_cextensions(self): + try: + from sqlalchemy import cresultproxy, cprocessors + return True + except ImportError: + return False diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/runner.py b/lib/python3.4/site-packages/sqlalchemy/testing/runner.py new file mode 100644 index 0000000..b58aa01 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/runner.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python +# testing/runner.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +""" +Nose test runner module. + +This script is a front-end to "nosetests" which +installs SQLAlchemy's testing plugin into the local environment. + +The script is intended to be used by third-party dialects and extensions +that run within SQLAlchemy's testing framework. The runner can +be invoked via:: + + python -m sqlalchemy.testing.runner + +The script is then essentially the same as the "nosetests" script, including +all of the usual Nose options. The test environment requires that a +setup.cfg is locally present including various required options. + +Note that when using this runner, Nose's "coverage" plugin will not be +able to provide coverage for SQLAlchemy itself, since SQLAlchemy is +imported into sys.modules before coverage is started. The special +script sqla_nose.py is provided as a top-level script which loads the +plugin in a special (somewhat hacky) way so that coverage against +SQLAlchemy itself is possible. + +""" + +from .plugin.noseplugin import NoseSQLAlchemy + +import nose + + +def main(): + nose.main(addplugins=[NoseSQLAlchemy()]) + + +def setup_py_test(): + """Runner to use for the 'test_suite' entry of your setup.py. + + Prevents any name clash shenanigans from the command line + argument "test" that the "setup.py test" command sends + to nose. + + """ + nose.main(addplugins=[NoseSQLAlchemy()], argv=['runner']) diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/schema.py b/lib/python3.4/site-packages/sqlalchemy/testing/schema.py new file mode 100644 index 0000000..2cd6e4c --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/schema.py @@ -0,0 +1,98 @@ +# testing/schema.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from . import exclusions +from .. import schema, event +from . import config + +__all__ = 'Table', 'Column', + +table_options = {} + + +def Table(*args, **kw): + """A schema.Table wrapper/hook for dialect-specific tweaks.""" + + test_opts = dict([(k, kw.pop(k)) for k in list(kw) + if k.startswith('test_')]) + + kw.update(table_options) + + if exclusions.against(config._current, 'mysql'): + if 'mysql_engine' not in kw and 'mysql_type' not in kw: + if 'test_needs_fk' in test_opts or 'test_needs_acid' in test_opts: + kw['mysql_engine'] = 'InnoDB' + else: + kw['mysql_engine'] = 'MyISAM' + + # Apply some default cascading rules for self-referential foreign keys. + # MySQL InnoDB has some issues around seleting self-refs too. + if exclusions.against(config._current, 'firebird'): + table_name = args[0] + unpack = (config.db.dialect. + identifier_preparer.unformat_identifiers) + + # Only going after ForeignKeys in Columns. May need to + # expand to ForeignKeyConstraint too. + fks = [fk + for col in args if isinstance(col, schema.Column) + for fk in col.foreign_keys] + + for fk in fks: + # root around in raw spec + ref = fk._colspec + if isinstance(ref, schema.Column): + name = ref.table.name + else: + # take just the table name: on FB there cannot be + # a schema, so the first element is always the + # table name, possibly followed by the field name + name = unpack(ref)[0] + if name == table_name: + if fk.ondelete is None: + fk.ondelete = 'CASCADE' + if fk.onupdate is None: + fk.onupdate = 'CASCADE' + + return schema.Table(*args, **kw) + + +def Column(*args, **kw): + """A schema.Column wrapper/hook for dialect-specific tweaks.""" + + test_opts = dict([(k, kw.pop(k)) for k in list(kw) + if k.startswith('test_')]) + + if not config.requirements.foreign_key_ddl.enabled_for_config(config): + args = [arg for arg in args if not isinstance(arg, schema.ForeignKey)] + + col = schema.Column(*args, **kw) + if 'test_needs_autoincrement' in test_opts and \ + kw.get('primary_key', False): + + # allow any test suite to pick up on this + col.info['test_needs_autoincrement'] = True + + # hardcoded rule for firebird, oracle; this should + # be moved out + if exclusions.against(config._current, 'firebird', 'oracle'): + def add_seq(c, tbl): + c._init_items( + schema.Sequence(_truncate_name( + config.db.dialect, tbl.name + '_' + c.name + '_seq'), + optional=True) + ) + event.listen(col, 'after_parent_attach', add_seq, propagate=True) + return col + + +def _truncate_name(dialect, name): + if len(name) > dialect.max_identifier_length: + return name[0:max(dialect.max_identifier_length - 6, 0)] + \ + "_" + hex(hash(name) % 64)[2:] + else: + return name diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/suite/__init__.py b/lib/python3.4/site-packages/sqlalchemy/testing/suite/__init__.py new file mode 100644 index 0000000..9eeffd4 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/suite/__init__.py @@ -0,0 +1,10 @@ + +from sqlalchemy.testing.suite.test_dialect import * +from sqlalchemy.testing.suite.test_ddl import * +from sqlalchemy.testing.suite.test_insert import * +from sqlalchemy.testing.suite.test_sequence import * +from sqlalchemy.testing.suite.test_select import * +from sqlalchemy.testing.suite.test_results import * +from sqlalchemy.testing.suite.test_update_delete import * +from sqlalchemy.testing.suite.test_reflection import * +from sqlalchemy.testing.suite.test_types import * diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/suite/test_ddl.py b/lib/python3.4/site-packages/sqlalchemy/testing/suite/test_ddl.py new file mode 100644 index 0000000..1d8010c --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/suite/test_ddl.py @@ -0,0 +1,65 @@ + + +from .. import fixtures, config, util +from ..config import requirements +from ..assertions import eq_ + +from sqlalchemy import Table, Column, Integer, String + + +class TableDDLTest(fixtures.TestBase): + __backend__ = True + + def _simple_fixture(self): + return Table('test_table', self.metadata, + Column('id', Integer, primary_key=True, + autoincrement=False), + Column('data', String(50)) + ) + + def _underscore_fixture(self): + return Table('_test_table', self.metadata, + Column('id', Integer, primary_key=True, + autoincrement=False), + Column('_data', String(50)) + ) + + def _simple_roundtrip(self, table): + with config.db.begin() as conn: + conn.execute(table.insert().values((1, 'some data'))) + result = conn.execute(table.select()) + eq_( + result.first(), + (1, 'some data') + ) + + @requirements.create_table + @util.provide_metadata + def test_create_table(self): + table = self._simple_fixture() + table.create( + config.db, checkfirst=False + ) + self._simple_roundtrip(table) + + @requirements.drop_table + @util.provide_metadata + def test_drop_table(self): + table = self._simple_fixture() + table.create( + config.db, checkfirst=False + ) + table.drop( + config.db, checkfirst=False + ) + + @requirements.create_table + @util.provide_metadata + def test_underscore_names(self): + table = self._underscore_fixture() + table.create( + config.db, checkfirst=False + ) + self._simple_roundtrip(table) + +__all__ = ('TableDDLTest', ) diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/suite/test_dialect.py b/lib/python3.4/site-packages/sqlalchemy/testing/suite/test_dialect.py new file mode 100644 index 0000000..00884a2 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/suite/test_dialect.py @@ -0,0 +1,41 @@ +from .. import fixtures, config +from ..config import requirements +from sqlalchemy import exc +from sqlalchemy import Integer, String +from .. import assert_raises +from ..schema import Table, Column + + +class ExceptionTest(fixtures.TablesTest): + """Test basic exception wrapping. + + DBAPIs vary a lot in exception behavior so to actually anticipate + specific exceptions from real round trips, we need to be conservative. + + """ + run_deletes = 'each' + + __backend__ = True + + @classmethod + def define_tables(cls, metadata): + Table('manual_pk', metadata, + Column('id', Integer, primary_key=True, autoincrement=False), + Column('data', String(50)) + ) + + @requirements.duplicate_key_raises_integrity_error + def test_integrity_error(self): + + with config.db.begin() as conn: + conn.execute( + self.tables.manual_pk.insert(), + {'id': 1, 'data': 'd1'} + ) + + assert_raises( + exc.IntegrityError, + conn.execute, + self.tables.manual_pk.insert(), + {'id': 1, 'data': 'd1'} + ) diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/suite/test_insert.py b/lib/python3.4/site-packages/sqlalchemy/testing/suite/test_insert.py new file mode 100644 index 0000000..70e8a6b --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/suite/test_insert.py @@ -0,0 +1,269 @@ +from .. import fixtures, config +from ..config import requirements +from .. import exclusions +from ..assertions import eq_ +from .. import engines + +from sqlalchemy import Integer, String, select, literal_column, literal + +from ..schema import Table, Column + + +class LastrowidTest(fixtures.TablesTest): + run_deletes = 'each' + + __backend__ = True + + __requires__ = 'implements_get_lastrowid', 'autoincrement_insert' + + __engine_options__ = {"implicit_returning": False} + + @classmethod + def define_tables(cls, metadata): + Table('autoinc_pk', metadata, + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('data', String(50)) + ) + + Table('manual_pk', metadata, + Column('id', Integer, primary_key=True, autoincrement=False), + Column('data', String(50)) + ) + + def _assert_round_trip(self, table, conn): + row = conn.execute(table.select()).first() + eq_( + row, + (config.db.dialect.default_sequence_base, "some data") + ) + + def test_autoincrement_on_insert(self): + + config.db.execute( + self.tables.autoinc_pk.insert(), + data="some data" + ) + self._assert_round_trip(self.tables.autoinc_pk, config.db) + + def test_last_inserted_id(self): + + r = config.db.execute( + self.tables.autoinc_pk.insert(), + data="some data" + ) + pk = config.db.scalar(select([self.tables.autoinc_pk.c.id])) + eq_( + r.inserted_primary_key, + [pk] + ) + + # failed on pypy1.9 but seems to be OK on pypy 2.1 + # @exclusions.fails_if(lambda: util.pypy, + # "lastrowid not maintained after " + # "connection close") + @requirements.dbapi_lastrowid + def test_native_lastrowid_autoinc(self): + r = config.db.execute( + self.tables.autoinc_pk.insert(), + data="some data" + ) + lastrowid = r.lastrowid + pk = config.db.scalar(select([self.tables.autoinc_pk.c.id])) + eq_( + lastrowid, pk + ) + + +class InsertBehaviorTest(fixtures.TablesTest): + run_deletes = 'each' + __backend__ = True + + @classmethod + def define_tables(cls, metadata): + Table('autoinc_pk', metadata, + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('data', String(50)) + ) + Table('manual_pk', metadata, + Column('id', Integer, primary_key=True, autoincrement=False), + Column('data', String(50)) + ) + Table('includes_defaults', metadata, + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('data', String(50)), + Column('x', Integer, default=5), + Column('y', Integer, + default=literal_column("2", type_=Integer) + literal(2))) + + def test_autoclose_on_insert(self): + if requirements.returning.enabled: + engine = engines.testing_engine( + options={'implicit_returning': False}) + else: + engine = config.db + + r = engine.execute( + self.tables.autoinc_pk.insert(), + data="some data" + ) + assert r._soft_closed + assert not r.closed + assert r.is_insert + assert not r.returns_rows + + @requirements.returning + def test_autoclose_on_insert_implicit_returning(self): + r = config.db.execute( + self.tables.autoinc_pk.insert(), + data="some data" + ) + assert r._soft_closed + assert not r.closed + assert r.is_insert + assert not r.returns_rows + + @requirements.empty_inserts + def test_empty_insert(self): + r = config.db.execute( + self.tables.autoinc_pk.insert(), + ) + assert r._soft_closed + assert not r.closed + + r = config.db.execute( + self.tables.autoinc_pk.select(). + where(self.tables.autoinc_pk.c.id != None) + ) + + assert len(r.fetchall()) + + @requirements.insert_from_select + def test_insert_from_select(self): + table = self.tables.manual_pk + config.db.execute( + table.insert(), + [ + dict(id=1, data="data1"), + dict(id=2, data="data2"), + dict(id=3, data="data3"), + ] + ) + + config.db.execute( + table.insert(inline=True). + from_select(("id", "data",), + select([table.c.id + 5, table.c.data]). + where(table.c.data.in_(["data2", "data3"])) + ), + ) + + eq_( + config.db.execute( + select([table.c.data]).order_by(table.c.data) + ).fetchall(), + [("data1", ), ("data2", ), ("data2", ), + ("data3", ), ("data3", )] + ) + + @requirements.insert_from_select + def test_insert_from_select_with_defaults(self): + table = self.tables.includes_defaults + config.db.execute( + table.insert(), + [ + dict(id=1, data="data1"), + dict(id=2, data="data2"), + dict(id=3, data="data3"), + ] + ) + + config.db.execute( + table.insert(inline=True). + from_select(("id", "data",), + select([table.c.id + 5, table.c.data]). + where(table.c.data.in_(["data2", "data3"])) + ), + ) + + eq_( + config.db.execute( + select([table]).order_by(table.c.data, table.c.id) + ).fetchall(), + [(1, 'data1', 5, 4), (2, 'data2', 5, 4), + (7, 'data2', 5, 4), (3, 'data3', 5, 4), (8, 'data3', 5, 4)] + ) + + +class ReturningTest(fixtures.TablesTest): + run_create_tables = 'each' + __requires__ = 'returning', 'autoincrement_insert' + __backend__ = True + + __engine_options__ = {"implicit_returning": True} + + def _assert_round_trip(self, table, conn): + row = conn.execute(table.select()).first() + eq_( + row, + (config.db.dialect.default_sequence_base, "some data") + ) + + @classmethod + def define_tables(cls, metadata): + Table('autoinc_pk', metadata, + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('data', String(50)) + ) + + @requirements.fetch_rows_post_commit + def test_explicit_returning_pk_autocommit(self): + engine = config.db + table = self.tables.autoinc_pk + r = engine.execute( + table.insert().returning( + table.c.id), + data="some data" + ) + pk = r.first()[0] + fetched_pk = config.db.scalar(select([table.c.id])) + eq_(fetched_pk, pk) + + def test_explicit_returning_pk_no_autocommit(self): + engine = config.db + table = self.tables.autoinc_pk + with engine.begin() as conn: + r = conn.execute( + table.insert().returning( + table.c.id), + data="some data" + ) + pk = r.first()[0] + fetched_pk = config.db.scalar(select([table.c.id])) + eq_(fetched_pk, pk) + + def test_autoincrement_on_insert_implcit_returning(self): + + config.db.execute( + self.tables.autoinc_pk.insert(), + data="some data" + ) + self._assert_round_trip(self.tables.autoinc_pk, config.db) + + def test_last_inserted_id_implicit_returning(self): + + r = config.db.execute( + self.tables.autoinc_pk.insert(), + data="some data" + ) + pk = config.db.scalar(select([self.tables.autoinc_pk.c.id])) + eq_( + r.inserted_primary_key, + [pk] + ) + + +__all__ = ('LastrowidTest', 'InsertBehaviorTest', 'ReturningTest') diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/suite/test_reflection.py b/lib/python3.4/site-packages/sqlalchemy/testing/suite/test_reflection.py new file mode 100644 index 0000000..1874f62 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/suite/test_reflection.py @@ -0,0 +1,656 @@ + + +import sqlalchemy as sa +from sqlalchemy import exc as sa_exc +from sqlalchemy import types as sql_types +from sqlalchemy import inspect +from sqlalchemy import MetaData, Integer, String +from sqlalchemy.engine.reflection import Inspector +from sqlalchemy.testing import engines, fixtures +from sqlalchemy.testing.schema import Table, Column +from sqlalchemy.testing import eq_, assert_raises_message +from sqlalchemy import testing +from .. import config +import operator +from sqlalchemy.schema import DDL, Index +from sqlalchemy import event + +metadata, users = None, None + + +class HasTableTest(fixtures.TablesTest): + __backend__ = True + + @classmethod + def define_tables(cls, metadata): + Table('test_table', metadata, + Column('id', Integer, primary_key=True), + Column('data', String(50)) + ) + + def test_has_table(self): + with config.db.begin() as conn: + assert config.db.dialect.has_table(conn, "test_table") + assert not config.db.dialect.has_table(conn, "nonexistent_table") + + +class ComponentReflectionTest(fixtures.TablesTest): + run_inserts = run_deletes = None + + __backend__ = True + + @classmethod + def setup_bind(cls): + if config.requirements.independent_connections.enabled: + from sqlalchemy import pool + return engines.testing_engine( + options=dict(poolclass=pool.StaticPool)) + else: + return config.db + + @classmethod + def define_tables(cls, metadata): + cls.define_reflected_tables(metadata, None) + if testing.requires.schemas.enabled: + cls.define_reflected_tables(metadata, testing.config.test_schema) + + @classmethod + def define_reflected_tables(cls, metadata, schema): + if schema: + schema_prefix = schema + "." + else: + schema_prefix = "" + + if testing.requires.self_referential_foreign_keys.enabled: + users = Table('users', metadata, + Column('user_id', sa.INT, primary_key=True), + Column('test1', sa.CHAR(5), nullable=False), + Column('test2', sa.Float(5), nullable=False), + Column('parent_user_id', sa.Integer, + sa.ForeignKey('%susers.user_id' % + schema_prefix)), + schema=schema, + test_needs_fk=True, + ) + else: + users = Table('users', metadata, + Column('user_id', sa.INT, primary_key=True), + Column('test1', sa.CHAR(5), nullable=False), + Column('test2', sa.Float(5), nullable=False), + schema=schema, + test_needs_fk=True, + ) + + Table("dingalings", metadata, + Column('dingaling_id', sa.Integer, primary_key=True), + Column('address_id', sa.Integer, + sa.ForeignKey('%semail_addresses.address_id' % + schema_prefix)), + Column('data', sa.String(30)), + schema=schema, + test_needs_fk=True, + ) + Table('email_addresses', metadata, + Column('address_id', sa.Integer), + Column('remote_user_id', sa.Integer, + sa.ForeignKey(users.c.user_id)), + Column('email_address', sa.String(20)), + sa.PrimaryKeyConstraint('address_id', name='email_ad_pk'), + schema=schema, + test_needs_fk=True, + ) + + if testing.requires.index_reflection.enabled: + cls.define_index(metadata, users) + if testing.requires.view_column_reflection.enabled: + cls.define_views(metadata, schema) + if not schema and testing.requires.temp_table_reflection.enabled: + cls.define_temp_tables(metadata) + + @classmethod + def define_temp_tables(cls, metadata): + # cheat a bit, we should fix this with some dialect-level + # temp table fixture + if testing.against("oracle"): + kw = { + 'prefixes': ["GLOBAL TEMPORARY"], + 'oracle_on_commit': 'PRESERVE ROWS' + } + else: + kw = { + 'prefixes': ["TEMPORARY"], + } + + user_tmp = Table( + "user_tmp", metadata, + Column("id", sa.INT, primary_key=True), + Column('name', sa.VARCHAR(50)), + Column('foo', sa.INT), + sa.UniqueConstraint('name', name='user_tmp_uq'), + sa.Index("user_tmp_ix", "foo"), + **kw + ) + if testing.requires.view_reflection.enabled and \ + testing.requires.temporary_views.enabled: + event.listen( + user_tmp, "after_create", + DDL("create temporary view user_tmp_v as " + "select * from user_tmp") + ) + event.listen( + user_tmp, "before_drop", + DDL("drop view user_tmp_v") + ) + + @classmethod + def define_index(cls, metadata, users): + Index("users_t_idx", users.c.test1, users.c.test2) + Index("users_all_idx", users.c.user_id, users.c.test2, users.c.test1) + + @classmethod + def define_views(cls, metadata, schema): + for table_name in ('users', 'email_addresses'): + fullname = table_name + if schema: + fullname = "%s.%s" % (schema, table_name) + view_name = fullname + '_v' + query = "CREATE VIEW %s AS SELECT * FROM %s" % ( + view_name, fullname) + + event.listen( + metadata, + "after_create", + DDL(query) + ) + event.listen( + metadata, + "before_drop", + DDL("DROP VIEW %s" % view_name) + ) + + @testing.requires.schema_reflection + def test_get_schema_names(self): + insp = inspect(testing.db) + + self.assert_(testing.config.test_schema in insp.get_schema_names()) + + @testing.requires.schema_reflection + def test_dialect_initialize(self): + engine = engines.testing_engine() + assert not hasattr(engine.dialect, 'default_schema_name') + inspect(engine) + assert hasattr(engine.dialect, 'default_schema_name') + + @testing.requires.schema_reflection + def test_get_default_schema_name(self): + insp = inspect(testing.db) + eq_(insp.default_schema_name, testing.db.dialect.default_schema_name) + + @testing.provide_metadata + def _test_get_table_names(self, schema=None, table_type='table', + order_by=None): + meta = self.metadata + users, addresses, dingalings = self.tables.users, \ + self.tables.email_addresses, self.tables.dingalings + insp = inspect(meta.bind) + + if table_type == 'view': + table_names = insp.get_view_names(schema) + table_names.sort() + answer = ['email_addresses_v', 'users_v'] + eq_(sorted(table_names), answer) + else: + table_names = insp.get_table_names(schema, + order_by=order_by) + if order_by == 'foreign_key': + answer = ['users', 'email_addresses', 'dingalings'] + eq_(table_names, answer) + else: + answer = ['dingalings', 'email_addresses', 'users'] + eq_(sorted(table_names), answer) + + @testing.requires.temp_table_names + def test_get_temp_table_names(self): + insp = inspect(self.bind) + temp_table_names = insp.get_temp_table_names() + eq_(sorted(temp_table_names), ['user_tmp']) + + @testing.requires.view_reflection + @testing.requires.temp_table_names + @testing.requires.temporary_views + def test_get_temp_view_names(self): + insp = inspect(self.bind) + temp_table_names = insp.get_temp_view_names() + eq_(sorted(temp_table_names), ['user_tmp_v']) + + @testing.requires.table_reflection + def test_get_table_names(self): + self._test_get_table_names() + + @testing.requires.table_reflection + @testing.requires.foreign_key_constraint_reflection + def test_get_table_names_fks(self): + self._test_get_table_names(order_by='foreign_key') + + @testing.requires.table_reflection + @testing.requires.schemas + def test_get_table_names_with_schema(self): + self._test_get_table_names(testing.config.test_schema) + + @testing.requires.view_column_reflection + def test_get_view_names(self): + self._test_get_table_names(table_type='view') + + @testing.requires.view_column_reflection + @testing.requires.schemas + def test_get_view_names_with_schema(self): + self._test_get_table_names( + testing.config.test_schema, table_type='view') + + @testing.requires.table_reflection + @testing.requires.view_column_reflection + def test_get_tables_and_views(self): + self._test_get_table_names() + self._test_get_table_names(table_type='view') + + def _test_get_columns(self, schema=None, table_type='table'): + meta = MetaData(testing.db) + users, addresses, dingalings = self.tables.users, \ + self.tables.email_addresses, self.tables.dingalings + table_names = ['users', 'email_addresses'] + if table_type == 'view': + table_names = ['users_v', 'email_addresses_v'] + insp = inspect(meta.bind) + for table_name, table in zip(table_names, (users, + addresses)): + schema_name = schema + cols = insp.get_columns(table_name, schema=schema_name) + self.assert_(len(cols) > 0, len(cols)) + + # should be in order + + for i, col in enumerate(table.columns): + eq_(col.name, cols[i]['name']) + ctype = cols[i]['type'].__class__ + ctype_def = col.type + if isinstance(ctype_def, sa.types.TypeEngine): + ctype_def = ctype_def.__class__ + + # Oracle returns Date for DateTime. + + if testing.against('oracle') and ctype_def \ + in (sql_types.Date, sql_types.DateTime): + ctype_def = sql_types.Date + + # assert that the desired type and return type share + # a base within one of the generic types. + + self.assert_(len(set(ctype.__mro__). + intersection(ctype_def.__mro__). + intersection([ + sql_types.Integer, + sql_types.Numeric, + sql_types.DateTime, + sql_types.Date, + sql_types.Time, + sql_types.String, + sql_types._Binary, + ])) > 0, '%s(%s), %s(%s)' % + (col.name, col.type, cols[i]['name'], ctype)) + + if not col.primary_key: + assert cols[i]['default'] is None + + @testing.requires.table_reflection + def test_get_columns(self): + self._test_get_columns() + + @testing.provide_metadata + def _type_round_trip(self, *types): + t = Table('t', self.metadata, + *[ + Column('t%d' % i, type_) + for i, type_ in enumerate(types) + ] + ) + t.create() + + return [ + c['type'] for c in + inspect(self.metadata.bind).get_columns('t') + ] + + @testing.requires.table_reflection + def test_numeric_reflection(self): + for typ in self._type_round_trip( + sql_types.Numeric(18, 5), + ): + assert isinstance(typ, sql_types.Numeric) + eq_(typ.precision, 18) + eq_(typ.scale, 5) + + @testing.requires.table_reflection + def test_varchar_reflection(self): + typ = self._type_round_trip(sql_types.String(52))[0] + assert isinstance(typ, sql_types.String) + eq_(typ.length, 52) + + @testing.requires.table_reflection + @testing.provide_metadata + def test_nullable_reflection(self): + t = Table('t', self.metadata, + Column('a', Integer, nullable=True), + Column('b', Integer, nullable=False)) + t.create() + eq_( + dict( + (col['name'], col['nullable']) + for col in inspect(self.metadata.bind).get_columns('t') + ), + {"a": True, "b": False} + ) + + @testing.requires.table_reflection + @testing.requires.schemas + def test_get_columns_with_schema(self): + self._test_get_columns(schema=testing.config.test_schema) + + @testing.requires.temp_table_reflection + def test_get_temp_table_columns(self): + meta = MetaData(self.bind) + user_tmp = self.tables.user_tmp + insp = inspect(meta.bind) + cols = insp.get_columns('user_tmp') + self.assert_(len(cols) > 0, len(cols)) + + for i, col in enumerate(user_tmp.columns): + eq_(col.name, cols[i]['name']) + + @testing.requires.temp_table_reflection + @testing.requires.view_column_reflection + @testing.requires.temporary_views + def test_get_temp_view_columns(self): + insp = inspect(self.bind) + cols = insp.get_columns('user_tmp_v') + eq_( + [col['name'] for col in cols], + ['id', 'name', 'foo'] + ) + + @testing.requires.view_column_reflection + def test_get_view_columns(self): + self._test_get_columns(table_type='view') + + @testing.requires.view_column_reflection + @testing.requires.schemas + def test_get_view_columns_with_schema(self): + self._test_get_columns( + schema=testing.config.test_schema, table_type='view') + + @testing.provide_metadata + def _test_get_pk_constraint(self, schema=None): + meta = self.metadata + users, addresses = self.tables.users, self.tables.email_addresses + insp = inspect(meta.bind) + + users_cons = insp.get_pk_constraint(users.name, schema=schema) + users_pkeys = users_cons['constrained_columns'] + eq_(users_pkeys, ['user_id']) + + addr_cons = insp.get_pk_constraint(addresses.name, schema=schema) + addr_pkeys = addr_cons['constrained_columns'] + eq_(addr_pkeys, ['address_id']) + + with testing.requires.reflects_pk_names.fail_if(): + eq_(addr_cons['name'], 'email_ad_pk') + + @testing.requires.primary_key_constraint_reflection + def test_get_pk_constraint(self): + self._test_get_pk_constraint() + + @testing.requires.table_reflection + @testing.requires.primary_key_constraint_reflection + @testing.requires.schemas + def test_get_pk_constraint_with_schema(self): + self._test_get_pk_constraint(schema=testing.config.test_schema) + + @testing.requires.table_reflection + @testing.provide_metadata + def test_deprecated_get_primary_keys(self): + meta = self.metadata + users = self.tables.users + insp = Inspector(meta.bind) + assert_raises_message( + sa_exc.SADeprecationWarning, + "Call to deprecated method get_primary_keys." + " Use get_pk_constraint instead.", + insp.get_primary_keys, users.name + ) + + @testing.provide_metadata + def _test_get_foreign_keys(self, schema=None): + meta = self.metadata + users, addresses, dingalings = self.tables.users, \ + self.tables.email_addresses, self.tables.dingalings + insp = inspect(meta.bind) + expected_schema = schema + # users + + if testing.requires.self_referential_foreign_keys.enabled: + users_fkeys = insp.get_foreign_keys(users.name, + schema=schema) + fkey1 = users_fkeys[0] + + with testing.requires.named_constraints.fail_if(): + self.assert_(fkey1['name'] is not None) + + eq_(fkey1['referred_schema'], expected_schema) + eq_(fkey1['referred_table'], users.name) + eq_(fkey1['referred_columns'], ['user_id', ]) + if testing.requires.self_referential_foreign_keys.enabled: + eq_(fkey1['constrained_columns'], ['parent_user_id']) + + # addresses + addr_fkeys = insp.get_foreign_keys(addresses.name, + schema=schema) + fkey1 = addr_fkeys[0] + + with testing.requires.named_constraints.fail_if(): + self.assert_(fkey1['name'] is not None) + + eq_(fkey1['referred_schema'], expected_schema) + eq_(fkey1['referred_table'], users.name) + eq_(fkey1['referred_columns'], ['user_id', ]) + eq_(fkey1['constrained_columns'], ['remote_user_id']) + + @testing.requires.foreign_key_constraint_reflection + def test_get_foreign_keys(self): + self._test_get_foreign_keys() + + @testing.requires.foreign_key_constraint_reflection + @testing.requires.schemas + def test_get_foreign_keys_with_schema(self): + self._test_get_foreign_keys(schema=testing.config.test_schema) + + @testing.provide_metadata + def _test_get_indexes(self, schema=None): + meta = self.metadata + users, addresses, dingalings = self.tables.users, \ + self.tables.email_addresses, self.tables.dingalings + # The database may decide to create indexes for foreign keys, etc. + # so there may be more indexes than expected. + insp = inspect(meta.bind) + indexes = insp.get_indexes('users', schema=schema) + expected_indexes = [ + {'unique': False, + 'column_names': ['test1', 'test2'], + 'name': 'users_t_idx'}, + {'unique': False, + 'column_names': ['user_id', 'test2', 'test1'], + 'name': 'users_all_idx'} + ] + index_names = [d['name'] for d in indexes] + for e_index in expected_indexes: + assert e_index['name'] in index_names + index = indexes[index_names.index(e_index['name'])] + for key in e_index: + eq_(e_index[key], index[key]) + + @testing.requires.index_reflection + def test_get_indexes(self): + self._test_get_indexes() + + @testing.requires.index_reflection + @testing.requires.schemas + def test_get_indexes_with_schema(self): + self._test_get_indexes(schema=testing.config.test_schema) + + @testing.requires.unique_constraint_reflection + def test_get_unique_constraints(self): + self._test_get_unique_constraints() + + @testing.requires.temp_table_reflection + @testing.requires.unique_constraint_reflection + def test_get_temp_table_unique_constraints(self): + insp = inspect(self.bind) + reflected = insp.get_unique_constraints('user_tmp') + for refl in reflected: + # Different dialects handle duplicate index and constraints + # differently, so ignore this flag + refl.pop('duplicates_index', None) + eq_(reflected, [{'column_names': ['name'], 'name': 'user_tmp_uq'}]) + + @testing.requires.temp_table_reflection + def test_get_temp_table_indexes(self): + insp = inspect(self.bind) + indexes = insp.get_indexes('user_tmp') + for ind in indexes: + ind.pop('dialect_options', None) + eq_( + # TODO: we need to add better filtering for indexes/uq constraints + # that are doubled up + [idx for idx in indexes if idx['name'] == 'user_tmp_ix'], + [{'unique': False, 'column_names': ['foo'], 'name': 'user_tmp_ix'}] + ) + + @testing.requires.unique_constraint_reflection + @testing.requires.schemas + def test_get_unique_constraints_with_schema(self): + self._test_get_unique_constraints(schema=testing.config.test_schema) + + @testing.provide_metadata + def _test_get_unique_constraints(self, schema=None): + # SQLite dialect needs to parse the names of the constraints + # separately from what it gets from PRAGMA index_list(), and + # then matches them up. so same set of column_names in two + # constraints will confuse it. Perhaps we should no longer + # bother with index_list() here since we have the whole + # CREATE TABLE? + uniques = sorted( + [ + {'name': 'unique_a', 'column_names': ['a']}, + {'name': 'unique_a_b_c', 'column_names': ['a', 'b', 'c']}, + {'name': 'unique_c_a_b', 'column_names': ['c', 'a', 'b']}, + {'name': 'unique_asc_key', 'column_names': ['asc', 'key']}, + {'name': 'i.have.dots', 'column_names': ['b']}, + {'name': 'i have spaces', 'column_names': ['c']}, + ], + key=operator.itemgetter('name') + ) + orig_meta = self.metadata + table = Table( + 'testtbl', orig_meta, + Column('a', sa.String(20)), + Column('b', sa.String(30)), + Column('c', sa.Integer), + # reserved identifiers + Column('asc', sa.String(30)), + Column('key', sa.String(30)), + schema=schema + ) + for uc in uniques: + table.append_constraint( + sa.UniqueConstraint(*uc['column_names'], name=uc['name']) + ) + orig_meta.create_all() + + inspector = inspect(orig_meta.bind) + reflected = sorted( + inspector.get_unique_constraints('testtbl', schema=schema), + key=operator.itemgetter('name') + ) + + for orig, refl in zip(uniques, reflected): + # Different dialects handle duplicate index and constraints + # differently, so ignore this flag + refl.pop('duplicates_index', None) + eq_(orig, refl) + + @testing.provide_metadata + def _test_get_view_definition(self, schema=None): + meta = self.metadata + users, addresses, dingalings = self.tables.users, \ + self.tables.email_addresses, self.tables.dingalings + view_name1 = 'users_v' + view_name2 = 'email_addresses_v' + insp = inspect(meta.bind) + v1 = insp.get_view_definition(view_name1, schema=schema) + self.assert_(v1) + v2 = insp.get_view_definition(view_name2, schema=schema) + self.assert_(v2) + + @testing.requires.view_reflection + def test_get_view_definition(self): + self._test_get_view_definition() + + @testing.requires.view_reflection + @testing.requires.schemas + def test_get_view_definition_with_schema(self): + self._test_get_view_definition(schema=testing.config.test_schema) + + @testing.only_on("postgresql", "PG specific feature") + @testing.provide_metadata + def _test_get_table_oid(self, table_name, schema=None): + meta = self.metadata + users, addresses, dingalings = self.tables.users, \ + self.tables.email_addresses, self.tables.dingalings + insp = inspect(meta.bind) + oid = insp.get_table_oid(table_name, schema) + self.assert_(isinstance(oid, int)) + + def test_get_table_oid(self): + self._test_get_table_oid('users') + + @testing.requires.schemas + def test_get_table_oid_with_schema(self): + self._test_get_table_oid('users', schema=testing.config.test_schema) + + @testing.requires.table_reflection + @testing.provide_metadata + def test_autoincrement_col(self): + """test that 'autoincrement' is reflected according to sqla's policy. + + Don't mark this test as unsupported for any backend ! + + (technically it fails with MySQL InnoDB since "id" comes before "id2") + + A backend is better off not returning "autoincrement" at all, + instead of potentially returning "False" for an auto-incrementing + primary key column. + + """ + + meta = self.metadata + insp = inspect(meta.bind) + + for tname, cname in [ + ('users', 'user_id'), + ('email_addresses', 'address_id'), + ('dingalings', 'dingaling_id'), + ]: + cols = insp.get_columns(tname) + id_ = dict((c['name'], c) for c in cols)[cname] + assert id_.get('autoincrement', True) + + +__all__ = ('ComponentReflectionTest', 'HasTableTest') diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/suite/test_results.py b/lib/python3.4/site-packages/sqlalchemy/testing/suite/test_results.py new file mode 100644 index 0000000..9ffaa6e --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/suite/test_results.py @@ -0,0 +1,220 @@ +from .. import fixtures, config +from ..config import requirements +from .. import exclusions +from ..assertions import eq_ +from .. import engines + +from sqlalchemy import Integer, String, select, util, sql, DateTime +import datetime +from ..schema import Table, Column + + +class RowFetchTest(fixtures.TablesTest): + __backend__ = True + + @classmethod + def define_tables(cls, metadata): + Table('plain_pk', metadata, + Column('id', Integer, primary_key=True), + Column('data', String(50)) + ) + Table('has_dates', metadata, + Column('id', Integer, primary_key=True), + Column('today', DateTime) + ) + + @classmethod + def insert_data(cls): + config.db.execute( + cls.tables.plain_pk.insert(), + [ + {"id": 1, "data": "d1"}, + {"id": 2, "data": "d2"}, + {"id": 3, "data": "d3"}, + ] + ) + + config.db.execute( + cls.tables.has_dates.insert(), + [ + {"id": 1, "today": datetime.datetime(2006, 5, 12, 12, 0, 0)} + ] + ) + + def test_via_string(self): + row = config.db.execute( + self.tables.plain_pk.select(). + order_by(self.tables.plain_pk.c.id) + ).first() + + eq_( + row['id'], 1 + ) + eq_( + row['data'], "d1" + ) + + def test_via_int(self): + row = config.db.execute( + self.tables.plain_pk.select(). + order_by(self.tables.plain_pk.c.id) + ).first() + + eq_( + row[0], 1 + ) + eq_( + row[1], "d1" + ) + + def test_via_col_object(self): + row = config.db.execute( + self.tables.plain_pk.select(). + order_by(self.tables.plain_pk.c.id) + ).first() + + eq_( + row[self.tables.plain_pk.c.id], 1 + ) + eq_( + row[self.tables.plain_pk.c.data], "d1" + ) + + @requirements.duplicate_names_in_cursor_description + def test_row_with_dupe_names(self): + result = config.db.execute( + select([self.tables.plain_pk.c.data, + self.tables.plain_pk.c.data.label('data')]). + order_by(self.tables.plain_pk.c.id) + ) + row = result.first() + eq_(result.keys(), ['data', 'data']) + eq_(row, ('d1', 'd1')) + + def test_row_w_scalar_select(self): + """test that a scalar select as a column is returned as such + and that type conversion works OK. + + (this is half a SQLAlchemy Core test and half to catch database + backends that may have unusual behavior with scalar selects.) + + """ + datetable = self.tables.has_dates + s = select([datetable.alias('x').c.today]).as_scalar() + s2 = select([datetable.c.id, s.label('somelabel')]) + row = config.db.execute(s2).first() + + eq_(row['somelabel'], datetime.datetime(2006, 5, 12, 12, 0, 0)) + + +class PercentSchemaNamesTest(fixtures.TablesTest): + """tests using percent signs, spaces in table and column names. + + This is a very fringe use case, doesn't work for MySQL + or Postgresql. the requirement, "percent_schema_names", + is marked "skip" by default. + + """ + + __requires__ = ('percent_schema_names', ) + + __backend__ = True + + @classmethod + def define_tables(cls, metadata): + cls.tables.percent_table = Table('percent%table', metadata, + Column("percent%", Integer), + Column( + "spaces % more spaces", Integer), + ) + cls.tables.lightweight_percent_table = sql.table( + 'percent%table', sql.column("percent%"), + sql.column("spaces % more spaces") + ) + + def test_single_roundtrip(self): + percent_table = self.tables.percent_table + for params in [ + {'percent%': 5, 'spaces % more spaces': 12}, + {'percent%': 7, 'spaces % more spaces': 11}, + {'percent%': 9, 'spaces % more spaces': 10}, + {'percent%': 11, 'spaces % more spaces': 9} + ]: + config.db.execute(percent_table.insert(), params) + self._assert_table() + + def test_executemany_roundtrip(self): + percent_table = self.tables.percent_table + config.db.execute( + percent_table.insert(), + {'percent%': 5, 'spaces % more spaces': 12} + ) + config.db.execute( + percent_table.insert(), + [{'percent%': 7, 'spaces % more spaces': 11}, + {'percent%': 9, 'spaces % more spaces': 10}, + {'percent%': 11, 'spaces % more spaces': 9}] + ) + self._assert_table() + + def _assert_table(self): + percent_table = self.tables.percent_table + lightweight_percent_table = self.tables.lightweight_percent_table + + for table in ( + percent_table, + percent_table.alias(), + lightweight_percent_table, + lightweight_percent_table.alias()): + eq_( + list( + config.db.execute( + table.select().order_by(table.c['percent%']) + ) + ), + [ + (5, 12), + (7, 11), + (9, 10), + (11, 9) + ] + ) + + eq_( + list( + config.db.execute( + table.select(). + where(table.c['spaces % more spaces'].in_([9, 10])). + order_by(table.c['percent%']), + ) + ), + [ + (9, 10), + (11, 9) + ] + ) + + row = config.db.execute(table.select(). + order_by(table.c['percent%'])).first() + eq_(row['percent%'], 5) + eq_(row['spaces % more spaces'], 12) + + eq_(row[table.c['percent%']], 5) + eq_(row[table.c['spaces % more spaces']], 12) + + config.db.execute( + percent_table.update().values( + {percent_table.c['spaces % more spaces']: 15} + ) + ) + + eq_( + list( + config.db.execute( + percent_table. + select(). + order_by(percent_table.c['percent%']) + ) + ), + [(5, 15), (7, 15), (9, 15), (11, 15)] + ) diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/suite/test_select.py b/lib/python3.4/site-packages/sqlalchemy/testing/suite/test_select.py new file mode 100644 index 0000000..d4bf63b --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/suite/test_select.py @@ -0,0 +1,192 @@ +from .. import fixtures, config +from ..assertions import eq_ + +from sqlalchemy import util +from sqlalchemy import Integer, String, select, func, bindparam +from sqlalchemy import testing + +from ..schema import Table, Column + + +class OrderByLabelTest(fixtures.TablesTest): + """Test the dialect sends appropriate ORDER BY expressions when + labels are used. + + This essentially exercises the "supports_simple_order_by_label" + setting. + + """ + __backend__ = True + + @classmethod + def define_tables(cls, metadata): + Table("some_table", metadata, + Column('id', Integer, primary_key=True), + Column('x', Integer), + Column('y', Integer), + Column('q', String(50)), + Column('p', String(50)) + ) + + @classmethod + def insert_data(cls): + config.db.execute( + cls.tables.some_table.insert(), + [ + {"id": 1, "x": 1, "y": 2, "q": "q1", "p": "p3"}, + {"id": 2, "x": 2, "y": 3, "q": "q2", "p": "p2"}, + {"id": 3, "x": 3, "y": 4, "q": "q3", "p": "p1"}, + ] + ) + + def _assert_result(self, select, result): + eq_( + config.db.execute(select).fetchall(), + result + ) + + def test_plain(self): + table = self.tables.some_table + lx = table.c.x.label('lx') + self._assert_result( + select([lx]).order_by(lx), + [(1, ), (2, ), (3, )] + ) + + def test_composed_int(self): + table = self.tables.some_table + lx = (table.c.x + table.c.y).label('lx') + self._assert_result( + select([lx]).order_by(lx), + [(3, ), (5, ), (7, )] + ) + + def test_composed_multiple(self): + table = self.tables.some_table + lx = (table.c.x + table.c.y).label('lx') + ly = (func.lower(table.c.q) + table.c.p).label('ly') + self._assert_result( + select([lx, ly]).order_by(lx, ly.desc()), + [(3, util.u('q1p3')), (5, util.u('q2p2')), (7, util.u('q3p1'))] + ) + + def test_plain_desc(self): + table = self.tables.some_table + lx = table.c.x.label('lx') + self._assert_result( + select([lx]).order_by(lx.desc()), + [(3, ), (2, ), (1, )] + ) + + def test_composed_int_desc(self): + table = self.tables.some_table + lx = (table.c.x + table.c.y).label('lx') + self._assert_result( + select([lx]).order_by(lx.desc()), + [(7, ), (5, ), (3, )] + ) + + def test_group_by_composed(self): + table = self.tables.some_table + expr = (table.c.x + table.c.y).label('lx') + stmt = select([func.count(table.c.id), expr]).group_by(expr).order_by(expr) + self._assert_result( + stmt, + [(1, 3), (1, 5), (1, 7)] + ) + + +class LimitOffsetTest(fixtures.TablesTest): + __backend__ = True + + @classmethod + def define_tables(cls, metadata): + Table("some_table", metadata, + Column('id', Integer, primary_key=True), + Column('x', Integer), + Column('y', Integer)) + + @classmethod + def insert_data(cls): + config.db.execute( + cls.tables.some_table.insert(), + [ + {"id": 1, "x": 1, "y": 2}, + {"id": 2, "x": 2, "y": 3}, + {"id": 3, "x": 3, "y": 4}, + {"id": 4, "x": 4, "y": 5}, + ] + ) + + def _assert_result(self, select, result, params=()): + eq_( + config.db.execute(select, params).fetchall(), + result + ) + + def test_simple_limit(self): + table = self.tables.some_table + self._assert_result( + select([table]).order_by(table.c.id).limit(2), + [(1, 1, 2), (2, 2, 3)] + ) + + @testing.requires.offset + def test_simple_offset(self): + table = self.tables.some_table + self._assert_result( + select([table]).order_by(table.c.id).offset(2), + [(3, 3, 4), (4, 4, 5)] + ) + + @testing.requires.offset + def test_simple_limit_offset(self): + table = self.tables.some_table + self._assert_result( + select([table]).order_by(table.c.id).limit(2).offset(1), + [(2, 2, 3), (3, 3, 4)] + ) + + @testing.requires.offset + def test_limit_offset_nobinds(self): + """test that 'literal binds' mode works - no bound params.""" + + table = self.tables.some_table + stmt = select([table]).order_by(table.c.id).limit(2).offset(1) + sql = stmt.compile( + dialect=config.db.dialect, + compile_kwargs={"literal_binds": True}) + sql = str(sql) + + self._assert_result( + sql, + [(2, 2, 3), (3, 3, 4)] + ) + + @testing.requires.bound_limit_offset + def test_bound_limit(self): + table = self.tables.some_table + self._assert_result( + select([table]).order_by(table.c.id).limit(bindparam('l')), + [(1, 1, 2), (2, 2, 3)], + params={"l": 2} + ) + + @testing.requires.bound_limit_offset + def test_bound_offset(self): + table = self.tables.some_table + self._assert_result( + select([table]).order_by(table.c.id).offset(bindparam('o')), + [(3, 3, 4), (4, 4, 5)], + params={"o": 2} + ) + + @testing.requires.bound_limit_offset + def test_bound_limit_offset(self): + table = self.tables.some_table + self._assert_result( + select([table]).order_by(table.c.id). + limit(bindparam("l")).offset(bindparam("o")), + [(2, 2, 3), (3, 3, 4)], + params={"l": 2, "o": 1} + ) diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/suite/test_sequence.py b/lib/python3.4/site-packages/sqlalchemy/testing/suite/test_sequence.py new file mode 100644 index 0000000..b2d52f2 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/suite/test_sequence.py @@ -0,0 +1,126 @@ +from .. import fixtures, config +from ..config import requirements +from ..assertions import eq_ +from ... import testing + +from ... import Integer, String, Sequence, schema + +from ..schema import Table, Column + + +class SequenceTest(fixtures.TablesTest): + __requires__ = ('sequences',) + __backend__ = True + + run_create_tables = 'each' + + @classmethod + def define_tables(cls, metadata): + Table('seq_pk', metadata, + Column('id', Integer, Sequence('tab_id_seq'), primary_key=True), + Column('data', String(50)) + ) + + Table('seq_opt_pk', metadata, + Column('id', Integer, Sequence('tab_id_seq', optional=True), + primary_key=True), + Column('data', String(50)) + ) + + def test_insert_roundtrip(self): + config.db.execute( + self.tables.seq_pk.insert(), + data="some data" + ) + self._assert_round_trip(self.tables.seq_pk, config.db) + + def test_insert_lastrowid(self): + r = config.db.execute( + self.tables.seq_pk.insert(), + data="some data" + ) + eq_( + r.inserted_primary_key, + [1] + ) + + def test_nextval_direct(self): + r = config.db.execute( + self.tables.seq_pk.c.id.default + ) + eq_( + r, 1 + ) + + @requirements.sequences_optional + def test_optional_seq(self): + r = config.db.execute( + self.tables.seq_opt_pk.insert(), + data="some data" + ) + eq_( + r.inserted_primary_key, + [1] + ) + + def _assert_round_trip(self, table, conn): + row = conn.execute(table.select()).first() + eq_( + row, + (1, "some data") + ) + + +class HasSequenceTest(fixtures.TestBase): + __requires__ = 'sequences', + __backend__ = True + + def test_has_sequence(self): + s1 = Sequence('user_id_seq') + testing.db.execute(schema.CreateSequence(s1)) + try: + eq_(testing.db.dialect.has_sequence(testing.db, + 'user_id_seq'), True) + finally: + testing.db.execute(schema.DropSequence(s1)) + + @testing.requires.schemas + def test_has_sequence_schema(self): + s1 = Sequence('user_id_seq', schema=config.test_schema) + testing.db.execute(schema.CreateSequence(s1)) + try: + eq_(testing.db.dialect.has_sequence( + testing.db, 'user_id_seq', schema=config.test_schema), True) + finally: + testing.db.execute(schema.DropSequence(s1)) + + def test_has_sequence_neg(self): + eq_(testing.db.dialect.has_sequence(testing.db, 'user_id_seq'), + False) + + @testing.requires.schemas + def test_has_sequence_schemas_neg(self): + eq_(testing.db.dialect.has_sequence(testing.db, 'user_id_seq', + schema=config.test_schema), + False) + + @testing.requires.schemas + def test_has_sequence_default_not_in_remote(self): + s1 = Sequence('user_id_seq') + testing.db.execute(schema.CreateSequence(s1)) + try: + eq_(testing.db.dialect.has_sequence(testing.db, 'user_id_seq', + schema=config.test_schema), + False) + finally: + testing.db.execute(schema.DropSequence(s1)) + + @testing.requires.schemas + def test_has_sequence_remote_not_in_default(self): + s1 = Sequence('user_id_seq', schema=config.test_schema) + testing.db.execute(schema.CreateSequence(s1)) + try: + eq_(testing.db.dialect.has_sequence(testing.db, 'user_id_seq'), + False) + finally: + testing.db.execute(schema.DropSequence(s1)) diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/suite/test_types.py b/lib/python3.4/site-packages/sqlalchemy/testing/suite/test_types.py new file mode 100644 index 0000000..230aeb1 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/suite/test_types.py @@ -0,0 +1,594 @@ +# coding: utf-8 + +from .. import fixtures, config +from ..assertions import eq_ +from ..config import requirements +from sqlalchemy import Integer, Unicode, UnicodeText, select +from sqlalchemy import Date, DateTime, Time, MetaData, String, \ + Text, Numeric, Float, literal, Boolean +from ..schema import Table, Column +from ... import testing +import decimal +import datetime +from ...util import u +from ... import util + + +class _LiteralRoundTripFixture(object): + @testing.provide_metadata + def _literal_round_trip(self, type_, input_, output, filter_=None): + """test literal rendering """ + + # for literal, we test the literal render in an INSERT + # into a typed column. we can then SELECT it back as its + # official type; ideally we'd be able to use CAST here + # but MySQL in particular can't CAST fully + t = Table('t', self.metadata, Column('x', type_)) + t.create() + + for value in input_: + ins = t.insert().values(x=literal(value)).compile( + dialect=testing.db.dialect, + compile_kwargs=dict(literal_binds=True) + ) + testing.db.execute(ins) + + for row in t.select().execute(): + value = row[0] + if filter_ is not None: + value = filter_(value) + assert value in output + + +class _UnicodeFixture(_LiteralRoundTripFixture): + __requires__ = 'unicode_data', + + data = u("Alors vous imaginez ma surprise, au lever du jour, " + "quand une drôle de petite voix m’a réveillé. Elle " + "disait: « S’il vous plaît… dessine-moi un mouton! »") + + @classmethod + def define_tables(cls, metadata): + Table('unicode_table', metadata, + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('unicode_data', cls.datatype), + ) + + def test_round_trip(self): + unicode_table = self.tables.unicode_table + + config.db.execute( + unicode_table.insert(), + { + 'unicode_data': self.data, + } + ) + + row = config.db.execute( + select([ + unicode_table.c.unicode_data, + ]) + ).first() + + eq_( + row, + (self.data, ) + ) + assert isinstance(row[0], util.text_type) + + def test_round_trip_executemany(self): + unicode_table = self.tables.unicode_table + + config.db.execute( + unicode_table.insert(), + [ + { + 'unicode_data': self.data, + } + for i in range(3) + ] + ) + + rows = config.db.execute( + select([ + unicode_table.c.unicode_data, + ]) + ).fetchall() + eq_( + rows, + [(self.data, ) for i in range(3)] + ) + for row in rows: + assert isinstance(row[0], util.text_type) + + def _test_empty_strings(self): + unicode_table = self.tables.unicode_table + + config.db.execute( + unicode_table.insert(), + {"unicode_data": u('')} + ) + row = config.db.execute( + select([unicode_table.c.unicode_data]) + ).first() + eq_(row, (u(''),)) + + def test_literal(self): + self._literal_round_trip(self.datatype, [self.data], [self.data]) + + +class UnicodeVarcharTest(_UnicodeFixture, fixtures.TablesTest): + __requires__ = 'unicode_data', + __backend__ = True + + datatype = Unicode(255) + + @requirements.empty_strings_varchar + def test_empty_strings_varchar(self): + self._test_empty_strings() + + +class UnicodeTextTest(_UnicodeFixture, fixtures.TablesTest): + __requires__ = 'unicode_data', 'text_type' + __backend__ = True + + datatype = UnicodeText() + + @requirements.empty_strings_text + def test_empty_strings_text(self): + self._test_empty_strings() + + +class TextTest(_LiteralRoundTripFixture, fixtures.TablesTest): + __requires__ = 'text_type', + __backend__ = True + + @classmethod + def define_tables(cls, metadata): + Table('text_table', metadata, + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('text_data', Text), + ) + + def test_text_roundtrip(self): + text_table = self.tables.text_table + + config.db.execute( + text_table.insert(), + {"text_data": 'some text'} + ) + row = config.db.execute( + select([text_table.c.text_data]) + ).first() + eq_(row, ('some text',)) + + def test_text_empty_strings(self): + text_table = self.tables.text_table + + config.db.execute( + text_table.insert(), + {"text_data": ''} + ) + row = config.db.execute( + select([text_table.c.text_data]) + ).first() + eq_(row, ('',)) + + def test_literal(self): + self._literal_round_trip(Text, ["some text"], ["some text"]) + + def test_literal_quoting(self): + data = '''some 'text' hey "hi there" that's text''' + self._literal_round_trip(Text, [data], [data]) + + def test_literal_backslashes(self): + data = r'backslash one \ backslash two \\ end' + self._literal_round_trip(Text, [data], [data]) + + +class StringTest(_LiteralRoundTripFixture, fixtures.TestBase): + __backend__ = True + + @requirements.unbounded_varchar + def test_nolength_string(self): + metadata = MetaData() + foo = Table('foo', metadata, + Column('one', String) + ) + + foo.create(config.db) + foo.drop(config.db) + + def test_literal(self): + self._literal_round_trip(String(40), ["some text"], ["some text"]) + + def test_literal_quoting(self): + data = '''some 'text' hey "hi there" that's text''' + self._literal_round_trip(String(40), [data], [data]) + + def test_literal_backslashes(self): + data = r'backslash one \ backslash two \\ end' + self._literal_round_trip(String(40), [data], [data]) + + +class _DateFixture(_LiteralRoundTripFixture): + compare = None + + @classmethod + def define_tables(cls, metadata): + Table('date_table', metadata, + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('date_data', cls.datatype), + ) + + def test_round_trip(self): + date_table = self.tables.date_table + + config.db.execute( + date_table.insert(), + {'date_data': self.data} + ) + + row = config.db.execute( + select([ + date_table.c.date_data, + ]) + ).first() + + compare = self.compare or self.data + eq_(row, + (compare, )) + assert isinstance(row[0], type(compare)) + + def test_null(self): + date_table = self.tables.date_table + + config.db.execute( + date_table.insert(), + {'date_data': None} + ) + + row = config.db.execute( + select([ + date_table.c.date_data, + ]) + ).first() + eq_(row, (None,)) + + @testing.requires.datetime_literals + def test_literal(self): + compare = self.compare or self.data + self._literal_round_trip(self.datatype, [self.data], [compare]) + + +class DateTimeTest(_DateFixture, fixtures.TablesTest): + __requires__ = 'datetime', + __backend__ = True + datatype = DateTime + data = datetime.datetime(2012, 10, 15, 12, 57, 18) + + +class DateTimeMicrosecondsTest(_DateFixture, fixtures.TablesTest): + __requires__ = 'datetime_microseconds', + __backend__ = True + datatype = DateTime + data = datetime.datetime(2012, 10, 15, 12, 57, 18, 396) + + +class TimeTest(_DateFixture, fixtures.TablesTest): + __requires__ = 'time', + __backend__ = True + datatype = Time + data = datetime.time(12, 57, 18) + + +class TimeMicrosecondsTest(_DateFixture, fixtures.TablesTest): + __requires__ = 'time_microseconds', + __backend__ = True + datatype = Time + data = datetime.time(12, 57, 18, 396) + + +class DateTest(_DateFixture, fixtures.TablesTest): + __requires__ = 'date', + __backend__ = True + datatype = Date + data = datetime.date(2012, 10, 15) + + +class DateTimeCoercedToDateTimeTest(_DateFixture, fixtures.TablesTest): + __requires__ = 'date', 'date_coerces_from_datetime' + __backend__ = True + datatype = Date + data = datetime.datetime(2012, 10, 15, 12, 57, 18) + compare = datetime.date(2012, 10, 15) + + +class DateTimeHistoricTest(_DateFixture, fixtures.TablesTest): + __requires__ = 'datetime_historic', + __backend__ = True + datatype = DateTime + data = datetime.datetime(1850, 11, 10, 11, 52, 35) + + +class DateHistoricTest(_DateFixture, fixtures.TablesTest): + __requires__ = 'date_historic', + __backend__ = True + datatype = Date + data = datetime.date(1727, 4, 1) + + +class IntegerTest(_LiteralRoundTripFixture, fixtures.TestBase): + __backend__ = True + + def test_literal(self): + self._literal_round_trip(Integer, [5], [5]) + + +class NumericTest(_LiteralRoundTripFixture, fixtures.TestBase): + __backend__ = True + + @testing.emits_warning(r".*does \*not\* support Decimal objects natively") + @testing.provide_metadata + def _do_test(self, type_, input_, output, + filter_=None, check_scale=False): + metadata = self.metadata + t = Table('t', metadata, Column('x', type_)) + t.create() + t.insert().execute([{'x': x} for x in input_]) + + result = set([row[0] for row in t.select().execute()]) + output = set(output) + if filter_: + result = set(filter_(x) for x in result) + output = set(filter_(x) for x in output) + eq_(result, output) + if check_scale: + eq_( + [str(x) for x in result], + [str(x) for x in output], + ) + + @testing.emits_warning(r".*does \*not\* support Decimal objects natively") + def test_render_literal_numeric(self): + self._literal_round_trip( + Numeric(precision=8, scale=4), + [15.7563, decimal.Decimal("15.7563")], + [decimal.Decimal("15.7563")], + ) + + @testing.emits_warning(r".*does \*not\* support Decimal objects natively") + def test_render_literal_numeric_asfloat(self): + self._literal_round_trip( + Numeric(precision=8, scale=4, asdecimal=False), + [15.7563, decimal.Decimal("15.7563")], + [15.7563], + ) + + def test_render_literal_float(self): + self._literal_round_trip( + Float(4), + [15.7563, decimal.Decimal("15.7563")], + [15.7563, ], + filter_=lambda n: n is not None and round(n, 5) or None + ) + + @testing.requires.precision_generic_float_type + def test_float_custom_scale(self): + self._do_test( + Float(None, decimal_return_scale=7, asdecimal=True), + [15.7563827, decimal.Decimal("15.7563827")], + [decimal.Decimal("15.7563827"), ], + check_scale=True + ) + + def test_numeric_as_decimal(self): + self._do_test( + Numeric(precision=8, scale=4), + [15.7563, decimal.Decimal("15.7563")], + [decimal.Decimal("15.7563")], + ) + + def test_numeric_as_float(self): + self._do_test( + Numeric(precision=8, scale=4, asdecimal=False), + [15.7563, decimal.Decimal("15.7563")], + [15.7563], + ) + + @testing.requires.fetch_null_from_numeric + def test_numeric_null_as_decimal(self): + self._do_test( + Numeric(precision=8, scale=4), + [None], + [None], + ) + + @testing.requires.fetch_null_from_numeric + def test_numeric_null_as_float(self): + self._do_test( + Numeric(precision=8, scale=4, asdecimal=False), + [None], + [None], + ) + + @testing.requires.floats_to_four_decimals + def test_float_as_decimal(self): + self._do_test( + Float(precision=8, asdecimal=True), + [15.7563, decimal.Decimal("15.7563"), None], + [decimal.Decimal("15.7563"), None], + ) + + def test_float_as_float(self): + self._do_test( + Float(precision=8), + [15.7563, decimal.Decimal("15.7563")], + [15.7563], + filter_=lambda n: n is not None and round(n, 5) or None + ) + + @testing.requires.precision_numerics_general + def test_precision_decimal(self): + numbers = set([ + decimal.Decimal("54.234246451650"), + decimal.Decimal("0.004354"), + decimal.Decimal("900.0"), + ]) + + self._do_test( + Numeric(precision=18, scale=12), + numbers, + numbers, + ) + + @testing.requires.precision_numerics_enotation_large + def test_enotation_decimal(self): + """test exceedingly small decimals. + + Decimal reports values with E notation when the exponent + is greater than 6. + + """ + + numbers = set([ + decimal.Decimal('1E-2'), + decimal.Decimal('1E-3'), + decimal.Decimal('1E-4'), + decimal.Decimal('1E-5'), + decimal.Decimal('1E-6'), + decimal.Decimal('1E-7'), + decimal.Decimal('1E-8'), + decimal.Decimal("0.01000005940696"), + decimal.Decimal("0.00000005940696"), + decimal.Decimal("0.00000000000696"), + decimal.Decimal("0.70000000000696"), + decimal.Decimal("696E-12"), + ]) + self._do_test( + Numeric(precision=18, scale=14), + numbers, + numbers + ) + + @testing.requires.precision_numerics_enotation_large + def test_enotation_decimal_large(self): + """test exceedingly large decimals. + + """ + + numbers = set([ + decimal.Decimal('4E+8'), + decimal.Decimal("5748E+15"), + decimal.Decimal('1.521E+15'), + decimal.Decimal('00000000000000.1E+12'), + ]) + self._do_test( + Numeric(precision=25, scale=2), + numbers, + numbers + ) + + @testing.requires.precision_numerics_many_significant_digits + def test_many_significant_digits(self): + numbers = set([ + decimal.Decimal("31943874831932418390.01"), + decimal.Decimal("319438950232418390.273596"), + decimal.Decimal("87673.594069654243"), + ]) + self._do_test( + Numeric(precision=38, scale=12), + numbers, + numbers + ) + + @testing.requires.precision_numerics_retains_significant_digits + def test_numeric_no_decimal(self): + numbers = set([ + decimal.Decimal("1.000") + ]) + self._do_test( + Numeric(precision=5, scale=3), + numbers, + numbers, + check_scale=True + ) + + +class BooleanTest(_LiteralRoundTripFixture, fixtures.TablesTest): + __backend__ = True + + @classmethod + def define_tables(cls, metadata): + Table('boolean_table', metadata, + Column('id', Integer, primary_key=True, autoincrement=False), + Column('value', Boolean), + Column('unconstrained_value', Boolean(create_constraint=False)), + ) + + def test_render_literal_bool(self): + self._literal_round_trip( + Boolean(), + [True, False], + [True, False] + ) + + def test_round_trip(self): + boolean_table = self.tables.boolean_table + + config.db.execute( + boolean_table.insert(), + { + 'id': 1, + 'value': True, + 'unconstrained_value': False + } + ) + + row = config.db.execute( + select([ + boolean_table.c.value, + boolean_table.c.unconstrained_value + ]) + ).first() + + eq_( + row, + (True, False) + ) + assert isinstance(row[0], bool) + + def test_null(self): + boolean_table = self.tables.boolean_table + + config.db.execute( + boolean_table.insert(), + { + 'id': 1, + 'value': None, + 'unconstrained_value': None + } + ) + + row = config.db.execute( + select([ + boolean_table.c.value, + boolean_table.c.unconstrained_value + ]) + ).first() + + eq_( + row, + (None, None) + ) + + +__all__ = ('UnicodeVarcharTest', 'UnicodeTextTest', + 'DateTest', 'DateTimeTest', 'TextTest', + 'NumericTest', 'IntegerTest', + 'DateTimeHistoricTest', 'DateTimeCoercedToDateTimeTest', + 'TimeMicrosecondsTest', 'TimeTest', 'DateTimeMicrosecondsTest', + 'DateHistoricTest', 'StringTest', 'BooleanTest') diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/suite/test_update_delete.py b/lib/python3.4/site-packages/sqlalchemy/testing/suite/test_update_delete.py new file mode 100644 index 0000000..e4c61e7 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/suite/test_update_delete.py @@ -0,0 +1,63 @@ +from .. import fixtures, config +from ..assertions import eq_ + +from sqlalchemy import Integer, String +from ..schema import Table, Column + + +class SimpleUpdateDeleteTest(fixtures.TablesTest): + run_deletes = 'each' + __backend__ = True + + @classmethod + def define_tables(cls, metadata): + Table('plain_pk', metadata, + Column('id', Integer, primary_key=True), + Column('data', String(50)) + ) + + @classmethod + def insert_data(cls): + config.db.execute( + cls.tables.plain_pk.insert(), + [ + {"id": 1, "data": "d1"}, + {"id": 2, "data": "d2"}, + {"id": 3, "data": "d3"}, + ] + ) + + def test_update(self): + t = self.tables.plain_pk + r = config.db.execute( + t.update().where(t.c.id == 2), + data="d2_new" + ) + assert not r.is_insert + assert not r.returns_rows + + eq_( + config.db.execute(t.select().order_by(t.c.id)).fetchall(), + [ + (1, "d1"), + (2, "d2_new"), + (3, "d3") + ] + ) + + def test_delete(self): + t = self.tables.plain_pk + r = config.db.execute( + t.delete().where(t.c.id == 2) + ) + assert not r.is_insert + assert not r.returns_rows + eq_( + config.db.execute(t.select().order_by(t.c.id)).fetchall(), + [ + (1, "d1"), + (3, "d3") + ] + ) + +__all__ = ('SimpleUpdateDeleteTest', ) diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/util.py b/lib/python3.4/site-packages/sqlalchemy/testing/util.py new file mode 100644 index 0000000..754e2ad --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/util.py @@ -0,0 +1,280 @@ +# testing/util.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from ..util import jython, pypy, defaultdict, decorator, py2k +import decimal +import gc +import time +import random +import sys +import types + +if jython: + def jython_gc_collect(*args): + """aggressive gc.collect for tests.""" + gc.collect() + time.sleep(0.1) + gc.collect() + gc.collect() + return 0 + + # "lazy" gc, for VM's that don't GC on refcount == 0 + gc_collect = lazy_gc = jython_gc_collect +elif pypy: + def pypy_gc_collect(*args): + gc.collect() + gc.collect() + gc_collect = lazy_gc = pypy_gc_collect +else: + # assume CPython - straight gc.collect, lazy_gc() is a pass + gc_collect = gc.collect + + def lazy_gc(): + pass + + +def picklers(): + picklers = set() + if py2k: + try: + import cPickle + picklers.add(cPickle) + except ImportError: + pass + + import pickle + picklers.add(pickle) + + # yes, this thing needs this much testing + for pickle_ in picklers: + for protocol in -1, 0, 1, 2: + yield pickle_.loads, lambda d: pickle_.dumps(d, protocol) + + +def round_decimal(value, prec): + if isinstance(value, float): + return round(value, prec) + + # can also use shift() here but that is 2.6 only + return (value * decimal.Decimal("1" + "0" * prec) + ).to_integral(decimal.ROUND_FLOOR) / \ + pow(10, prec) + + +class RandomSet(set): + def __iter__(self): + l = list(set.__iter__(self)) + random.shuffle(l) + return iter(l) + + def pop(self): + index = random.randint(0, len(self) - 1) + item = list(set.__iter__(self))[index] + self.remove(item) + return item + + def union(self, other): + return RandomSet(set.union(self, other)) + + def difference(self, other): + return RandomSet(set.difference(self, other)) + + def intersection(self, other): + return RandomSet(set.intersection(self, other)) + + def copy(self): + return RandomSet(self) + + +def conforms_partial_ordering(tuples, sorted_elements): + """True if the given sorting conforms to the given partial ordering.""" + + deps = defaultdict(set) + for parent, child in tuples: + deps[parent].add(child) + for i, node in enumerate(sorted_elements): + for n in sorted_elements[i:]: + if node in deps[n]: + return False + else: + return True + + +def all_partial_orderings(tuples, elements): + edges = defaultdict(set) + for parent, child in tuples: + edges[child].add(parent) + + def _all_orderings(elements): + + if len(elements) == 1: + yield list(elements) + else: + for elem in elements: + subset = set(elements).difference([elem]) + if not subset.intersection(edges[elem]): + for sub_ordering in _all_orderings(subset): + yield [elem] + sub_ordering + + return iter(_all_orderings(elements)) + + +def function_named(fn, name): + """Return a function with a given __name__. + + Will assign to __name__ and return the original function if possible on + the Python implementation, otherwise a new function will be constructed. + + This function should be phased out as much as possible + in favor of @decorator. Tests that "generate" many named tests + should be modernized. + + """ + try: + fn.__name__ = name + except TypeError: + fn = types.FunctionType(fn.__code__, fn.__globals__, name, + fn.__defaults__, fn.__closure__) + return fn + + +def run_as_contextmanager(ctx, fn, *arg, **kw): + """Run the given function under the given contextmanager, + simulating the behavior of 'with' to support older + Python versions. + + This is not necessary anymore as we have placed 2.6 + as minimum Python version, however some tests are still using + this structure. + + """ + + obj = ctx.__enter__() + try: + result = fn(obj, *arg, **kw) + ctx.__exit__(None, None, None) + return result + except: + exc_info = sys.exc_info() + raise_ = ctx.__exit__(*exc_info) + if raise_ is None: + raise + else: + return raise_ + + +def rowset(results): + """Converts the results of sql execution into a plain set of column tuples. + + Useful for asserting the results of an unordered query. + """ + + return set([tuple(row) for row in results]) + + +def fail(msg): + assert False, msg + + +@decorator +def provide_metadata(fn, *args, **kw): + """Provide bound MetaData for a single test, dropping afterwards.""" + + from . import config + from . import engines + from sqlalchemy import schema + + metadata = schema.MetaData(config.db) + self = args[0] + prev_meta = getattr(self, 'metadata', None) + self.metadata = metadata + try: + return fn(*args, **kw) + finally: + engines.drop_all_tables(metadata, config.db) + self.metadata = prev_meta + + +def force_drop_names(*names): + """Force the given table names to be dropped after test complete, + isolating for foreign key cycles + + """ + from . import config + from sqlalchemy import inspect + + @decorator + def go(fn, *args, **kw): + + try: + return fn(*args, **kw) + finally: + drop_all_tables( + config.db, inspect(config.db), include_names=names) + return go + + +class adict(dict): + """Dict keys available as attributes. Shadows.""" + + def __getattribute__(self, key): + try: + return self[key] + except KeyError: + return dict.__getattribute__(self, key) + + def __call__(self, *keys): + return tuple([self[key] for key in keys]) + + get_all = __call__ + + +def drop_all_tables(engine, inspector, schema=None, include_names=None): + from sqlalchemy import Column, Table, Integer, MetaData, \ + ForeignKeyConstraint + from sqlalchemy.schema import DropTable, DropConstraint + + if include_names is not None: + include_names = set(include_names) + + with engine.connect() as conn: + for tname, fkcs in reversed( + inspector.get_sorted_table_and_fkc_names(schema=schema)): + if tname: + if include_names is not None and tname not in include_names: + continue + conn.execute(DropTable( + Table(tname, MetaData(), schema=schema) + )) + elif fkcs: + if not engine.dialect.supports_alter: + continue + for tname, fkc in fkcs: + if include_names is not None and \ + tname not in include_names: + continue + tb = Table( + tname, MetaData(), + Column('x', Integer), + Column('y', Integer), + schema=schema + ) + conn.execute(DropConstraint( + ForeignKeyConstraint( + [tb.c.x], [tb.c.y], name=fkc) + )) + + +def teardown_events(event_cls): + @decorator + def decorate(fn, *arg, **kw): + try: + return fn(*arg, **kw) + finally: + event_cls._clear() + return decorate + diff --git a/lib/python3.4/site-packages/sqlalchemy/testing/warnings.py b/lib/python3.4/site-packages/sqlalchemy/testing/warnings.py new file mode 100644 index 0000000..de372dc --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/testing/warnings.py @@ -0,0 +1,34 @@ +# testing/warnings.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from __future__ import absolute_import + +import warnings +from .. import exc as sa_exc +from . import assertions + + +def setup_filters(): + """Set global warning behavior for the test suite.""" + + warnings.filterwarnings('ignore', + category=sa_exc.SAPendingDeprecationWarning) + warnings.filterwarnings('error', category=sa_exc.SADeprecationWarning) + warnings.filterwarnings('error', category=sa_exc.SAWarning) + + +def assert_warnings(fn, warning_msgs, regex=False): + """Assert that each of the given warnings are emitted by fn. + + Deprecated. Please use assertions.expect_warnings(). + + """ + + with assertions._expect_warnings( + sa_exc.SAWarning, warning_msgs, regex=regex): + return fn() + diff --git a/lib/python3.4/site-packages/sqlalchemy/types.py b/lib/python3.4/site-packages/sqlalchemy/types.py new file mode 100644 index 0000000..44ed696 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/types.py @@ -0,0 +1,78 @@ +# types.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Compatibility namespace for sqlalchemy.sql.types. + +""" + +__all__ = ['TypeEngine', 'TypeDecorator', 'UserDefinedType', + 'INT', 'CHAR', 'VARCHAR', 'NCHAR', 'NVARCHAR', 'TEXT', 'Text', + 'FLOAT', 'NUMERIC', 'REAL', 'DECIMAL', 'TIMESTAMP', 'DATETIME', + 'CLOB', 'BLOB', 'BINARY', 'VARBINARY', 'BOOLEAN', 'BIGINT', + 'SMALLINT', 'INTEGER', 'DATE', 'TIME', 'String', 'Integer', + 'SmallInteger', 'BigInteger', 'Numeric', 'Float', 'DateTime', + 'Date', 'Time', 'LargeBinary', 'Binary', 'Boolean', 'Unicode', + 'Concatenable', 'UnicodeText', 'PickleType', 'Interval', 'Enum'] + +from .sql.type_api import ( + adapt_type, + TypeEngine, + TypeDecorator, + Variant, + to_instance, + UserDefinedType +) +from .sql.sqltypes import ( + BIGINT, + BINARY, + BLOB, + BOOLEAN, + BigInteger, + Binary, + _Binary, + Boolean, + CHAR, + CLOB, + Concatenable, + DATE, + DATETIME, + DECIMAL, + Date, + DateTime, + Enum, + FLOAT, + Float, + INT, + INTEGER, + Integer, + Interval, + LargeBinary, + MatchType, + NCHAR, + NVARCHAR, + NullType, + NULLTYPE, + NUMERIC, + Numeric, + PickleType, + REAL, + SchemaType, + SMALLINT, + SmallInteger, + String, + STRINGTYPE, + TEXT, + TIME, + TIMESTAMP, + Text, + Time, + Unicode, + UnicodeText, + VARBINARY, + VARCHAR, + _type_map + ) diff --git a/lib/python3.4/site-packages/sqlalchemy/util/__init__.py b/lib/python3.4/site-packages/sqlalchemy/util/__init__.py new file mode 100644 index 0000000..8dcec48 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/util/__init__.py @@ -0,0 +1,49 @@ +# util/__init__.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from .compat import callable, cmp, reduce, \ + threading, py3k, py33, py36, py2k, jython, pypy, cpython, win32, \ + pickle, dottedgetter, parse_qsl, namedtuple, next, reraise, \ + raise_from_cause, text_type, safe_kwarg, string_types, int_types, \ + binary_type, nested, \ + quote_plus, with_metaclass, print_, itertools_filterfalse, u, ue, b,\ + unquote_plus, unquote, b64decode, b64encode, byte_buffer, itertools_filter,\ + iterbytes, StringIO, inspect_getargspec, zip_longest + +from ._collections import KeyedTuple, ImmutableContainer, immutabledict, \ + Properties, OrderedProperties, ImmutableProperties, OrderedDict, \ + OrderedSet, IdentitySet, OrderedIdentitySet, column_set, \ + column_dict, ordered_column_set, populate_column_dict, unique_list, \ + UniqueAppender, PopulateDict, EMPTY_SET, to_list, to_set, \ + to_column_set, update_copy, flatten_iterator, has_intersection, \ + LRUCache, ScopedRegistry, ThreadLocalRegistry, WeakSequence, \ + coerce_generator_arg, lightweight_named_tuple + +from .langhelpers import iterate_attributes, class_hierarchy, \ + portable_instancemethod, unbound_method_to_callable, \ + getargspec_init, format_argspec_init, format_argspec_plus, \ + get_func_kwargs, get_cls_kwargs, decorator, as_interface, \ + memoized_property, memoized_instancemethod, md5_hex, \ + group_expirable_memoized_property, dependencies, decode_slice, \ + monkeypatch_proxied_specials, asbool, bool_or_str, coerce_kw_type,\ + duck_type_collection, assert_arg_type, symbol, dictlike_iteritems,\ + classproperty, set_creation_order, warn_exception, warn, NoneType,\ + constructor_copy, methods_equivalent, chop_traceback, asint,\ + generic_repr, counter, PluginLoader, hybridproperty, hybridmethod, \ + safe_reraise,\ + get_callable_argspec, only_once, attrsetter, ellipses_string, \ + warn_limited, map_bits, MemoizedSlots, EnsureKWArgType + +from .deprecations import warn_deprecated, warn_pending_deprecation, \ + deprecated, pending_deprecation, inject_docstring_text + +# things that used to be not always available, +# but are now as of current support Python versions +from collections import defaultdict +from functools import partial +from functools import update_wrapper +from contextlib import contextmanager diff --git a/lib/python3.4/site-packages/sqlalchemy/util/_collections.py b/lib/python3.4/site-packages/sqlalchemy/util/_collections.py new file mode 100644 index 0000000..ed1a3e4 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/util/_collections.py @@ -0,0 +1,1043 @@ +# util/_collections.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Collection classes and helpers.""" + +from __future__ import absolute_import +import weakref +import operator +from .compat import threading, itertools_filterfalse, string_types +from . import py2k +import types +import collections + +EMPTY_SET = frozenset() + + +class AbstractKeyedTuple(tuple): + __slots__ = () + + def keys(self): + """Return a list of string key names for this :class:`.KeyedTuple`. + + .. seealso:: + + :attr:`.KeyedTuple._fields` + + """ + + return list(self._fields) + + +class KeyedTuple(AbstractKeyedTuple): + """``tuple`` subclass that adds labeled names. + + E.g.:: + + >>> k = KeyedTuple([1, 2, 3], labels=["one", "two", "three"]) + >>> k.one + 1 + >>> k.two + 2 + + Result rows returned by :class:`.Query` that contain multiple + ORM entities and/or column expressions make use of this + class to return rows. + + The :class:`.KeyedTuple` exhibits similar behavior to the + ``collections.namedtuple()`` construct provided in the Python + standard library, however is architected very differently. + Unlike ``collections.namedtuple()``, :class:`.KeyedTuple` is + does not rely on creation of custom subtypes in order to represent + a new series of keys, instead each :class:`.KeyedTuple` instance + receives its list of keys in place. The subtype approach + of ``collections.namedtuple()`` introduces significant complexity + and performance overhead, which is not necessary for the + :class:`.Query` object's use case. + + .. versionchanged:: 0.8 + Compatibility methods with ``collections.namedtuple()`` have been + added including :attr:`.KeyedTuple._fields` and + :meth:`.KeyedTuple._asdict`. + + .. seealso:: + + :ref:`ormtutorial_querying` + + """ + + def __new__(cls, vals, labels=None): + t = tuple.__new__(cls, vals) + if labels: + t.__dict__.update(zip(labels, vals)) + else: + labels = [] + t.__dict__['_labels'] = labels + return t + + @property + def _fields(self): + """Return a tuple of string key names for this :class:`.KeyedTuple`. + + This method provides compatibility with ``collections.namedtuple()``. + + .. versionadded:: 0.8 + + .. seealso:: + + :meth:`.KeyedTuple.keys` + + """ + return tuple([l for l in self._labels if l is not None]) + + def __setattr__(self, key, value): + raise AttributeError("Can't set attribute: %s" % key) + + def _asdict(self): + """Return the contents of this :class:`.KeyedTuple` as a dictionary. + + This method provides compatibility with ``collections.namedtuple()``, + with the exception that the dictionary returned is **not** ordered. + + .. versionadded:: 0.8 + + """ + return dict((key, self.__dict__[key]) for key in self.keys()) + + +class _LW(AbstractKeyedTuple): + __slots__ = () + + def __new__(cls, vals): + return tuple.__new__(cls, vals) + + def __reduce__(self): + # for pickling, degrade down to the regular + # KeyedTuple, thus avoiding anonymous class pickling + # difficulties + return KeyedTuple, (list(self), self._real_fields) + + def _asdict(self): + """Return the contents of this :class:`.KeyedTuple` as a dictionary.""" + + d = dict(zip(self._real_fields, self)) + d.pop(None, None) + return d + + +class ImmutableContainer(object): + def _immutable(self, *arg, **kw): + raise TypeError("%s object is immutable" % self.__class__.__name__) + + __delitem__ = __setitem__ = __setattr__ = _immutable + + +class immutabledict(ImmutableContainer, dict): + + clear = pop = popitem = setdefault = \ + update = ImmutableContainer._immutable + + def __new__(cls, *args): + new = dict.__new__(cls) + dict.__init__(new, *args) + return new + + def __init__(self, *args): + pass + + def __reduce__(self): + return immutabledict, (dict(self), ) + + def union(self, d): + if not d: + return self + elif not self: + if isinstance(d, immutabledict): + return d + else: + return immutabledict(d) + else: + d2 = immutabledict(self) + dict.update(d2, d) + return d2 + + def __repr__(self): + return "immutabledict(%s)" % dict.__repr__(self) + + +class Properties(object): + """Provide a __getattr__/__setattr__ interface over a dict.""" + + __slots__ = '_data', + + def __init__(self, data): + object.__setattr__(self, '_data', data) + + def __len__(self): + return len(self._data) + + def __iter__(self): + return iter(list(self._data.values())) + + def __add__(self, other): + return list(self) + list(other) + + def __setitem__(self, key, object): + self._data[key] = object + + def __getitem__(self, key): + return self._data[key] + + def __delitem__(self, key): + del self._data[key] + + def __setattr__(self, key, obj): + self._data[key] = obj + + def __getstate__(self): + return {'_data': self.__dict__['_data']} + + def __setstate__(self, state): + self.__dict__['_data'] = state['_data'] + + def __getattr__(self, key): + try: + return self._data[key] + except KeyError: + raise AttributeError(key) + + def __contains__(self, key): + return key in self._data + + def as_immutable(self): + """Return an immutable proxy for this :class:`.Properties`.""" + + return ImmutableProperties(self._data) + + def update(self, value): + self._data.update(value) + + def get(self, key, default=None): + if key in self: + return self[key] + else: + return default + + def keys(self): + return list(self._data) + + def values(self): + return list(self._data.values()) + + def items(self): + return list(self._data.items()) + + def has_key(self, key): + return key in self._data + + def clear(self): + self._data.clear() + + +class OrderedProperties(Properties): + """Provide a __getattr__/__setattr__ interface with an OrderedDict + as backing store.""" + + __slots__ = () + + def __init__(self): + Properties.__init__(self, OrderedDict()) + + +class ImmutableProperties(ImmutableContainer, Properties): + """Provide immutable dict/object attribute to an underlying dictionary.""" + + __slots__ = () + + +class OrderedDict(dict): + """A dict that returns keys/values/items in the order they were added.""" + + __slots__ = '_list', + + def __reduce__(self): + return OrderedDict, (self.items(),) + + def __init__(self, ____sequence=None, **kwargs): + self._list = [] + if ____sequence is None: + if kwargs: + self.update(**kwargs) + else: + self.update(____sequence, **kwargs) + + def clear(self): + self._list = [] + dict.clear(self) + + def copy(self): + return self.__copy__() + + def __copy__(self): + return OrderedDict(self) + + def sort(self, *arg, **kw): + self._list.sort(*arg, **kw) + + def update(self, ____sequence=None, **kwargs): + if ____sequence is not None: + if hasattr(____sequence, 'keys'): + for key in ____sequence.keys(): + self.__setitem__(key, ____sequence[key]) + else: + for key, value in ____sequence: + self[key] = value + if kwargs: + self.update(kwargs) + + def setdefault(self, key, value): + if key not in self: + self.__setitem__(key, value) + return value + else: + return self.__getitem__(key) + + def __iter__(self): + return iter(self._list) + + def keys(self): + return list(self) + + def values(self): + return [self[key] for key in self._list] + + def items(self): + return [(key, self[key]) for key in self._list] + + if py2k: + def itervalues(self): + return iter(self.values()) + + def iterkeys(self): + return iter(self) + + def iteritems(self): + return iter(self.items()) + + def __setitem__(self, key, object): + if key not in self: + try: + self._list.append(key) + except AttributeError: + # work around Python pickle loads() with + # dict subclass (seems to ignore __setstate__?) + self._list = [key] + dict.__setitem__(self, key, object) + + def __delitem__(self, key): + dict.__delitem__(self, key) + self._list.remove(key) + + def pop(self, key, *default): + present = key in self + value = dict.pop(self, key, *default) + if present: + self._list.remove(key) + return value + + def popitem(self): + item = dict.popitem(self) + self._list.remove(item[0]) + return item + + +class OrderedSet(set): + def __init__(self, d=None): + set.__init__(self) + self._list = [] + if d is not None: + self._list = unique_list(d) + set.update(self, self._list) + else: + self._list = [] + + def add(self, element): + if element not in self: + self._list.append(element) + set.add(self, element) + + def remove(self, element): + set.remove(self, element) + self._list.remove(element) + + def insert(self, pos, element): + if element not in self: + self._list.insert(pos, element) + set.add(self, element) + + def discard(self, element): + if element in self: + self._list.remove(element) + set.remove(self, element) + + def clear(self): + set.clear(self) + self._list = [] + + def __getitem__(self, key): + return self._list[key] + + def __iter__(self): + return iter(self._list) + + def __add__(self, other): + return self.union(other) + + def __repr__(self): + return '%s(%r)' % (self.__class__.__name__, self._list) + + __str__ = __repr__ + + def update(self, iterable): + for e in iterable: + if e not in self: + self._list.append(e) + set.add(self, e) + return self + + __ior__ = update + + def union(self, other): + result = self.__class__(self) + result.update(other) + return result + + __or__ = union + + def intersection(self, other): + other = set(other) + return self.__class__(a for a in self if a in other) + + __and__ = intersection + + def symmetric_difference(self, other): + other = set(other) + result = self.__class__(a for a in self if a not in other) + result.update(a for a in other if a not in self) + return result + + __xor__ = symmetric_difference + + def difference(self, other): + other = set(other) + return self.__class__(a for a in self if a not in other) + + __sub__ = difference + + def intersection_update(self, other): + other = set(other) + set.intersection_update(self, other) + self._list = [a for a in self._list if a in other] + return self + + __iand__ = intersection_update + + def symmetric_difference_update(self, other): + set.symmetric_difference_update(self, other) + self._list = [a for a in self._list if a in self] + self._list += [a for a in other._list if a in self] + return self + + __ixor__ = symmetric_difference_update + + def difference_update(self, other): + set.difference_update(self, other) + self._list = [a for a in self._list if a in self] + return self + + __isub__ = difference_update + + +class IdentitySet(object): + """A set that considers only object id() for uniqueness. + + This strategy has edge cases for builtin types- it's possible to have + two 'foo' strings in one of these sets, for example. Use sparingly. + + """ + + _working_set = set + + def __init__(self, iterable=None): + self._members = dict() + if iterable: + for o in iterable: + self.add(o) + + def add(self, value): + self._members[id(value)] = value + + def __contains__(self, value): + return id(value) in self._members + + def remove(self, value): + del self._members[id(value)] + + def discard(self, value): + try: + self.remove(value) + except KeyError: + pass + + def pop(self): + try: + pair = self._members.popitem() + return pair[1] + except KeyError: + raise KeyError('pop from an empty set') + + def clear(self): + self._members.clear() + + def __cmp__(self, other): + raise TypeError('cannot compare sets using cmp()') + + def __eq__(self, other): + if isinstance(other, IdentitySet): + return self._members == other._members + else: + return False + + def __ne__(self, other): + if isinstance(other, IdentitySet): + return self._members != other._members + else: + return True + + def issubset(self, iterable): + other = type(self)(iterable) + + if len(self) > len(other): + return False + for m in itertools_filterfalse(other._members.__contains__, + iter(self._members.keys())): + return False + return True + + def __le__(self, other): + if not isinstance(other, IdentitySet): + return NotImplemented + return self.issubset(other) + + def __lt__(self, other): + if not isinstance(other, IdentitySet): + return NotImplemented + return len(self) < len(other) and self.issubset(other) + + def issuperset(self, iterable): + other = type(self)(iterable) + + if len(self) < len(other): + return False + + for m in itertools_filterfalse(self._members.__contains__, + iter(other._members.keys())): + return False + return True + + def __ge__(self, other): + if not isinstance(other, IdentitySet): + return NotImplemented + return self.issuperset(other) + + def __gt__(self, other): + if not isinstance(other, IdentitySet): + return NotImplemented + return len(self) > len(other) and self.issuperset(other) + + def union(self, iterable): + result = type(self)() + # testlib.pragma exempt:__hash__ + members = self._member_id_tuples() + other = _iter_id(iterable) + result._members.update(self._working_set(members).union(other)) + return result + + def __or__(self, other): + if not isinstance(other, IdentitySet): + return NotImplemented + return self.union(other) + + def update(self, iterable): + self._members = self.union(iterable)._members + + def __ior__(self, other): + if not isinstance(other, IdentitySet): + return NotImplemented + self.update(other) + return self + + def difference(self, iterable): + result = type(self)() + # testlib.pragma exempt:__hash__ + members = self._member_id_tuples() + other = _iter_id(iterable) + result._members.update(self._working_set(members).difference(other)) + return result + + def __sub__(self, other): + if not isinstance(other, IdentitySet): + return NotImplemented + return self.difference(other) + + def difference_update(self, iterable): + self._members = self.difference(iterable)._members + + def __isub__(self, other): + if not isinstance(other, IdentitySet): + return NotImplemented + self.difference_update(other) + return self + + def intersection(self, iterable): + result = type(self)() + # testlib.pragma exempt:__hash__ + members = self._member_id_tuples() + other = _iter_id(iterable) + result._members.update(self._working_set(members).intersection(other)) + return result + + def __and__(self, other): + if not isinstance(other, IdentitySet): + return NotImplemented + return self.intersection(other) + + def intersection_update(self, iterable): + self._members = self.intersection(iterable)._members + + def __iand__(self, other): + if not isinstance(other, IdentitySet): + return NotImplemented + self.intersection_update(other) + return self + + def symmetric_difference(self, iterable): + result = type(self)() + # testlib.pragma exempt:__hash__ + members = self._member_id_tuples() + other = _iter_id(iterable) + result._members.update( + self._working_set(members).symmetric_difference(other)) + return result + + def _member_id_tuples(self): + return ((id(v), v) for v in self._members.values()) + + def __xor__(self, other): + if not isinstance(other, IdentitySet): + return NotImplemented + return self.symmetric_difference(other) + + def symmetric_difference_update(self, iterable): + self._members = self.symmetric_difference(iterable)._members + + def __ixor__(self, other): + if not isinstance(other, IdentitySet): + return NotImplemented + self.symmetric_difference(other) + return self + + def copy(self): + return type(self)(iter(self._members.values())) + + __copy__ = copy + + def __len__(self): + return len(self._members) + + def __iter__(self): + return iter(self._members.values()) + + def __hash__(self): + raise TypeError('set objects are unhashable') + + def __repr__(self): + return '%s(%r)' % (type(self).__name__, list(self._members.values())) + + +class WeakSequence(object): + def __init__(self, __elements=()): + self._storage = [ + weakref.ref(element, self._remove) for element in __elements + ] + + def append(self, item): + self._storage.append(weakref.ref(item, self._remove)) + + def _remove(self, ref): + self._storage.remove(ref) + + def __len__(self): + return len(self._storage) + + def __iter__(self): + return (obj for obj in + (ref() for ref in self._storage) if obj is not None) + + def __getitem__(self, index): + try: + obj = self._storage[index] + except KeyError: + raise IndexError("Index %s out of range" % index) + else: + return obj() + + +class OrderedIdentitySet(IdentitySet): + class _working_set(OrderedSet): + # a testing pragma: exempt the OIDS working set from the test suite's + # "never call the user's __hash__" assertions. this is a big hammer, + # but it's safe here: IDS operates on (id, instance) tuples in the + # working set. + __sa_hash_exempt__ = True + + def __init__(self, iterable=None): + IdentitySet.__init__(self) + self._members = OrderedDict() + if iterable: + for o in iterable: + self.add(o) + + +class PopulateDict(dict): + """A dict which populates missing values via a creation function. + + Note the creation function takes a key, unlike + collections.defaultdict. + + """ + + def __init__(self, creator): + self.creator = creator + + def __missing__(self, key): + self[key] = val = self.creator(key) + return val + +# Define collections that are capable of storing +# ColumnElement objects as hashable keys/elements. +# At this point, these are mostly historical, things +# used to be more complicated. +column_set = set +column_dict = dict +ordered_column_set = OrderedSet +populate_column_dict = PopulateDict + + +_getters = PopulateDict(operator.itemgetter) + +_property_getters = PopulateDict( + lambda idx: property(operator.itemgetter(idx))) + + +def unique_list(seq, hashfunc=None): + seen = set() + seen_add = seen.add + if not hashfunc: + return [x for x in seq + if x not in seen + and not seen_add(x)] + else: + return [x for x in seq + if hashfunc(x) not in seen + and not seen_add(hashfunc(x))] + + +class UniqueAppender(object): + """Appends items to a collection ensuring uniqueness. + + Additional appends() of the same object are ignored. Membership is + determined by identity (``is a``) not equality (``==``). + """ + + def __init__(self, data, via=None): + self.data = data + self._unique = {} + if via: + self._data_appender = getattr(data, via) + elif hasattr(data, 'append'): + self._data_appender = data.append + elif hasattr(data, 'add'): + self._data_appender = data.add + + def append(self, item): + id_ = id(item) + if id_ not in self._unique: + self._data_appender(item) + self._unique[id_] = True + + def __iter__(self): + return iter(self.data) + + +def coerce_generator_arg(arg): + if len(arg) == 1 and isinstance(arg[0], types.GeneratorType): + return list(arg[0]) + else: + return arg + + +def to_list(x, default=None): + if x is None: + return default + if not isinstance(x, collections.Iterable) or isinstance(x, string_types): + return [x] + elif isinstance(x, list): + return x + else: + return list(x) + + +def has_intersection(set_, iterable): + """return True if any items of set_ are present in iterable. + + Goes through special effort to ensure __hash__ is not called + on items in iterable that don't support it. + + """ + # TODO: optimize, write in C, etc. + return bool( + set_.intersection([i for i in iterable if i.__hash__]) + ) + + +def to_set(x): + if x is None: + return set() + if not isinstance(x, set): + return set(to_list(x)) + else: + return x + + +def to_column_set(x): + if x is None: + return column_set() + if not isinstance(x, column_set): + return column_set(to_list(x)) + else: + return x + + +def update_copy(d, _new=None, **kw): + """Copy the given dict and update with the given values.""" + + d = d.copy() + if _new: + d.update(_new) + d.update(**kw) + return d + + +def flatten_iterator(x): + """Given an iterator of which further sub-elements may also be + iterators, flatten the sub-elements into a single iterator. + + """ + for elem in x: + if not isinstance(elem, str) and hasattr(elem, '__iter__'): + for y in flatten_iterator(elem): + yield y + else: + yield elem + + +class LRUCache(dict): + """Dictionary with 'squishy' removal of least + recently used items. + + Note that either get() or [] should be used here, but + generally its not safe to do an "in" check first as the dictionary + can change subsequent to that call. + + """ + + def __init__(self, capacity=100, threshold=.5): + self.capacity = capacity + self.threshold = threshold + self._counter = 0 + self._mutex = threading.Lock() + + def _inc_counter(self): + self._counter += 1 + return self._counter + + def get(self, key, default=None): + item = dict.get(self, key, default) + if item is not default: + item[2] = self._inc_counter() + return item[1] + else: + return default + + def __getitem__(self, key): + item = dict.__getitem__(self, key) + item[2] = self._inc_counter() + return item[1] + + def values(self): + return [i[1] for i in dict.values(self)] + + def setdefault(self, key, value): + if key in self: + return self[key] + else: + self[key] = value + return value + + def __setitem__(self, key, value): + item = dict.get(self, key) + if item is None: + item = [key, value, self._inc_counter()] + dict.__setitem__(self, key, item) + else: + item[1] = value + self._manage_size() + + def _manage_size(self): + if not self._mutex.acquire(False): + return + try: + while len(self) > self.capacity + self.capacity * self.threshold: + by_counter = sorted(dict.values(self), + key=operator.itemgetter(2), + reverse=True) + for item in by_counter[self.capacity:]: + try: + del self[item[0]] + except KeyError: + # deleted elsewhere; skip + continue + finally: + self._mutex.release() + + +_lw_tuples = LRUCache(100) + + +def lightweight_named_tuple(name, fields): + hash_ = (name, ) + tuple(fields) + tp_cls = _lw_tuples.get(hash_) + if tp_cls: + return tp_cls + + tp_cls = type( + name, (_LW,), + dict([ + (field, _property_getters[idx]) + for idx, field in enumerate(fields) if field is not None + ] + [('__slots__', ())]) + ) + + tp_cls._real_fields = fields + tp_cls._fields = tuple([f for f in fields if f is not None]) + + _lw_tuples[hash_] = tp_cls + return tp_cls + + +class ScopedRegistry(object): + """A Registry that can store one or multiple instances of a single + class on the basis of a "scope" function. + + The object implements ``__call__`` as the "getter", so by + calling ``myregistry()`` the contained object is returned + for the current scope. + + :param createfunc: + a callable that returns a new object to be placed in the registry + + :param scopefunc: + a callable that will return a key to store/retrieve an object. + """ + + def __init__(self, createfunc, scopefunc): + """Construct a new :class:`.ScopedRegistry`. + + :param createfunc: A creation function that will generate + a new value for the current scope, if none is present. + + :param scopefunc: A function that returns a hashable + token representing the current scope (such as, current + thread identifier). + + """ + self.createfunc = createfunc + self.scopefunc = scopefunc + self.registry = {} + + def __call__(self): + key = self.scopefunc() + try: + return self.registry[key] + except KeyError: + return self.registry.setdefault(key, self.createfunc()) + + def has(self): + """Return True if an object is present in the current scope.""" + + return self.scopefunc() in self.registry + + def set(self, obj): + """Set the value for the current scope.""" + + self.registry[self.scopefunc()] = obj + + def clear(self): + """Clear the current scope, if any.""" + + try: + del self.registry[self.scopefunc()] + except KeyError: + pass + + +class ThreadLocalRegistry(ScopedRegistry): + """A :class:`.ScopedRegistry` that uses a ``threading.local()`` + variable for storage. + + """ + + def __init__(self, createfunc): + self.createfunc = createfunc + self.registry = threading.local() + + def __call__(self): + try: + return self.registry.value + except AttributeError: + val = self.registry.value = self.createfunc() + return val + + def has(self): + return hasattr(self.registry, "value") + + def set(self, obj): + self.registry.value = obj + + def clear(self): + try: + del self.registry.value + except AttributeError: + pass + + +def _iter_id(iterable): + """Generator: ((id(o), o) for o in iterable).""" + + for item in iterable: + yield id(item), item diff --git a/lib/python3.4/site-packages/sqlalchemy/util/compat.py b/lib/python3.4/site-packages/sqlalchemy/util/compat.py new file mode 100644 index 0000000..99b5177 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/util/compat.py @@ -0,0 +1,263 @@ +# util/compat.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Handle Python version/platform incompatibilities.""" + +import sys + +try: + import threading +except ImportError: + import dummy_threading as threading + +py36 = sys.version_info >= (3, 6) +py33 = sys.version_info >= (3, 3) +py32 = sys.version_info >= (3, 2) +py3k = sys.version_info >= (3, 0) +py2k = sys.version_info < (3, 0) +py265 = sys.version_info >= (2, 6, 5) +jython = sys.platform.startswith('java') +pypy = hasattr(sys, 'pypy_version_info') +win32 = sys.platform.startswith('win') +cpython = not pypy and not jython # TODO: something better for this ? + +import collections +next = next + +if py3k: + import pickle +else: + try: + import cPickle as pickle + except ImportError: + import pickle + +# work around http://bugs.python.org/issue2646 +if py265: + safe_kwarg = lambda arg: arg +else: + safe_kwarg = str + +ArgSpec = collections.namedtuple("ArgSpec", + ["args", "varargs", "keywords", "defaults"]) + +if py3k: + import builtins + + from inspect import getfullargspec as inspect_getfullargspec + from urllib.parse import (quote_plus, unquote_plus, + parse_qsl, quote, unquote) + import configparser + from io import StringIO + + from io import BytesIO as byte_buffer + + def inspect_getargspec(func): + return ArgSpec( + *inspect_getfullargspec(func)[0:4] + ) + + string_types = str, + binary_type = bytes + text_type = str + int_types = int, + iterbytes = iter + + def u(s): + return s + + def ue(s): + return s + + def b(s): + return s.encode("latin-1") + + if py32: + callable = callable + else: + def callable(fn): + return hasattr(fn, '__call__') + + def cmp(a, b): + return (a > b) - (a < b) + + from functools import reduce + + print_ = getattr(builtins, "print") + + import_ = getattr(builtins, '__import__') + + import itertools + itertools_filterfalse = itertools.filterfalse + itertools_filter = filter + itertools_imap = map + from itertools import zip_longest + + import base64 + + def b64encode(x): + return base64.b64encode(x).decode('ascii') + + def b64decode(x): + return base64.b64decode(x.encode('ascii')) + +else: + from inspect import getargspec as inspect_getfullargspec + inspect_getargspec = inspect_getfullargspec + from urllib import quote_plus, unquote_plus, quote, unquote + from urlparse import parse_qsl + import ConfigParser as configparser + from StringIO import StringIO + from cStringIO import StringIO as byte_buffer + + string_types = basestring, + binary_type = str + text_type = unicode + int_types = int, long + + def iterbytes(buf): + return (ord(byte) for byte in buf) + + def u(s): + # this differs from what six does, which doesn't support non-ASCII + # strings - we only use u() with + # literal source strings, and all our source files with non-ascii + # in them (all are tests) are utf-8 encoded. + return unicode(s, "utf-8") + + def ue(s): + return unicode(s, "unicode_escape") + + def b(s): + return s + + def import_(*args): + if len(args) == 4: + args = args[0:3] + ([str(arg) for arg in args[3]],) + return __import__(*args) + + callable = callable + cmp = cmp + reduce = reduce + + import base64 + b64encode = base64.b64encode + b64decode = base64.b64decode + + def print_(*args, **kwargs): + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + for arg in enumerate(args): + if not isinstance(arg, basestring): + arg = str(arg) + fp.write(arg) + + import itertools + itertools_filterfalse = itertools.ifilterfalse + itertools_filter = itertools.ifilter + itertools_imap = itertools.imap + from itertools import izip_longest as zip_longest + + +import time +if win32 or jython: + time_func = time.clock +else: + time_func = time.time + +from collections import namedtuple +from operator import attrgetter as dottedgetter + + +if py3k: + def reraise(tp, value, tb=None, cause=None): + if cause is not None: + assert cause is not value, "Same cause emitted" + value.__cause__ = cause + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + +else: + # not as nice as that of Py3K, but at least preserves + # the code line where the issue occurred + exec("def reraise(tp, value, tb=None, cause=None):\n" + " if cause is not None:\n" + " assert cause is not value, 'Same cause emitted'\n" + " raise tp, value, tb\n") + + +def raise_from_cause(exception, exc_info=None): + if exc_info is None: + exc_info = sys.exc_info() + exc_type, exc_value, exc_tb = exc_info + cause = exc_value if exc_value is not exception else None + reraise(type(exception), exception, tb=exc_tb, cause=cause) + +if py3k: + exec_ = getattr(builtins, 'exec') +else: + def exec_(func_text, globals_, lcl=None): + if lcl is None: + exec('exec func_text in globals_') + else: + exec('exec func_text in globals_, lcl') + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass. + + Drops the middle class upon creation. + + Source: http://lucumr.pocoo.org/2013/5/21/porting-to-python-3-redux/ + + """ + + class metaclass(meta): + __call__ = type.__call__ + __init__ = type.__init__ + + def __new__(cls, name, this_bases, d): + if this_bases is None: + return type.__new__(cls, name, (), d) + return meta(name, bases, d) + return metaclass('temporary_class', None, {}) + + +from contextlib import contextmanager + +try: + from contextlib import nested +except ImportError: + # removed in py3k, credit to mitsuhiko for + # workaround + + @contextmanager + def nested(*managers): + exits = [] + vars = [] + exc = (None, None, None) + try: + for mgr in managers: + exit = mgr.__exit__ + enter = mgr.__enter__ + vars.append(enter()) + exits.append(exit) + yield vars + except: + exc = sys.exc_info() + finally: + while exits: + exit = exits.pop() + try: + if exit(*exc): + exc = (None, None, None) + except: + exc = sys.exc_info() + if exc != (None, None, None): + reraise(exc[0], exc[1], exc[2]) diff --git a/lib/python3.4/site-packages/sqlalchemy/util/deprecations.py b/lib/python3.4/site-packages/sqlalchemy/util/deprecations.py new file mode 100644 index 0000000..12fa636 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/util/deprecations.py @@ -0,0 +1,146 @@ +# util/deprecations.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Helpers related to deprecation of functions, methods, classes, other +functionality.""" + +from .. import exc +import warnings +import re +from .langhelpers import decorator + + +def warn_deprecated(msg, stacklevel=3): + warnings.warn(msg, exc.SADeprecationWarning, stacklevel=stacklevel) + + +def warn_pending_deprecation(msg, stacklevel=3): + warnings.warn(msg, exc.SAPendingDeprecationWarning, stacklevel=stacklevel) + + +def deprecated(version, message=None, add_deprecation_to_docstring=True): + """Decorates a function and issues a deprecation warning on use. + + :param message: + If provided, issue message in the warning. A sensible default + is used if not provided. + + :param add_deprecation_to_docstring: + Default True. If False, the wrapped function's __doc__ is left + as-is. If True, the 'message' is prepended to the docs if + provided, or sensible default if message is omitted. + + """ + + if add_deprecation_to_docstring: + header = ".. deprecated:: %s %s" % \ + (version, (message or '')) + else: + header = None + + if message is None: + message = "Call to deprecated function %(func)s" + + def decorate(fn): + return _decorate_with_warning( + fn, exc.SADeprecationWarning, + message % dict(func=fn.__name__), header) + return decorate + + +def pending_deprecation(version, message=None, + add_deprecation_to_docstring=True): + """Decorates a function and issues a pending deprecation warning on use. + + :param version: + An approximate future version at which point the pending deprecation + will become deprecated. Not used in messaging. + + :param message: + If provided, issue message in the warning. A sensible default + is used if not provided. + + :param add_deprecation_to_docstring: + Default True. If False, the wrapped function's __doc__ is left + as-is. If True, the 'message' is prepended to the docs if + provided, or sensible default if message is omitted. + """ + + if add_deprecation_to_docstring: + header = ".. deprecated:: %s (pending) %s" % \ + (version, (message or '')) + else: + header = None + + if message is None: + message = "Call to deprecated function %(func)s" + + def decorate(fn): + return _decorate_with_warning( + fn, exc.SAPendingDeprecationWarning, + message % dict(func=fn.__name__), header) + return decorate + + +def _sanitize_restructured_text(text): + def repl(m): + type_, name = m.group(1, 2) + if type_ in ("func", "meth"): + name += "()" + return name + return re.sub(r'\:(\w+)\:`~?\.?(.+?)`', repl, text) + + +def _decorate_with_warning(func, wtype, message, docstring_header=None): + """Wrap a function with a warnings.warn and augmented docstring.""" + + message = _sanitize_restructured_text(message) + + @decorator + def warned(fn, *args, **kwargs): + warnings.warn(message, wtype, stacklevel=3) + return fn(*args, **kwargs) + + doc = func.__doc__ is not None and func.__doc__ or '' + if docstring_header is not None: + docstring_header %= dict(func=func.__name__) + + doc = inject_docstring_text(doc, docstring_header, 1) + + decorated = warned(func) + decorated.__doc__ = doc + return decorated + +import textwrap + + +def _dedent_docstring(text): + split_text = text.split("\n", 1) + if len(split_text) == 1: + return text + else: + firstline, remaining = split_text + if not firstline.startswith(" "): + return firstline + "\n" + textwrap.dedent(remaining) + else: + return textwrap.dedent(text) + + +def inject_docstring_text(doctext, injecttext, pos): + doctext = _dedent_docstring(doctext or "") + lines = doctext.split('\n') + injectlines = textwrap.dedent(injecttext).split("\n") + if injectlines[0]: + injectlines.insert(0, "") + + blanks = [num for num, line in enumerate(lines) if not line.strip()] + blanks.insert(0, 0) + + inject_pos = blanks[min(pos, len(blanks) - 1)] + + lines = lines[0:inject_pos] + injectlines + lines[inject_pos:] + return "\n".join(lines) diff --git a/lib/python3.4/site-packages/sqlalchemy/util/langhelpers.py b/lib/python3.4/site-packages/sqlalchemy/util/langhelpers.py new file mode 100644 index 0000000..0318d1e --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/util/langhelpers.py @@ -0,0 +1,1384 @@ +# util/langhelpers.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Routines to help with the creation, loading and introspection of +modules, classes, hierarchies, attributes, functions, and methods. + +""" +import itertools +import inspect +import operator +import re +import sys +import types +import warnings +from functools import update_wrapper +from .. import exc +import hashlib +from . import compat +from . import _collections + + +def md5_hex(x): + if compat.py3k: + x = x.encode('utf-8') + m = hashlib.md5() + m.update(x) + return m.hexdigest() + + +class safe_reraise(object): + """Reraise an exception after invoking some + handler code. + + Stores the existing exception info before + invoking so that it is maintained across a potential + coroutine context switch. + + e.g.:: + + try: + sess.commit() + except: + with safe_reraise(): + sess.rollback() + + """ + + def __enter__(self): + self._exc_info = sys.exc_info() + + def __exit__(self, type_, value, traceback): + # see #2703 for notes + if type_ is None: + exc_type, exc_value, exc_tb = self._exc_info + self._exc_info = None # remove potential circular references + compat.reraise(exc_type, exc_value, exc_tb) + else: + if not compat.py3k and self._exc_info and self._exc_info[1]: + # emulate Py3K's behavior of telling us when an exception + # occurs in an exception handler. + warn( + "An exception has occurred during handling of a " + "previous exception. The previous exception " + "is:\n %s %s\n" % (self._exc_info[0], self._exc_info[1])) + self._exc_info = None # remove potential circular references + compat.reraise(type_, value, traceback) + + +def decode_slice(slc): + """decode a slice object as sent to __getitem__. + + takes into account the 2.5 __index__() method, basically. + + """ + ret = [] + for x in slc.start, slc.stop, slc.step: + if hasattr(x, '__index__'): + x = x.__index__() + ret.append(x) + return tuple(ret) + + +def _unique_symbols(used, *bases): + used = set(used) + for base in bases: + pool = itertools.chain((base,), + compat.itertools_imap(lambda i: base + str(i), + range(1000))) + for sym in pool: + if sym not in used: + used.add(sym) + yield sym + break + else: + raise NameError("exhausted namespace for symbol base %s" % base) + + +def map_bits(fn, n): + """Call the given function given each nonzero bit from n.""" + + while n: + b = n & (~n + 1) + yield fn(b) + n ^= b + + +def decorator(target): + """A signature-matching decorator factory.""" + + def decorate(fn): + if not inspect.isfunction(fn): + raise Exception("not a decoratable function") + spec = compat.inspect_getfullargspec(fn) + names = tuple(spec[0]) + spec[1:3] + (fn.__name__,) + targ_name, fn_name = _unique_symbols(names, 'target', 'fn') + + metadata = dict(target=targ_name, fn=fn_name) + metadata.update(format_argspec_plus(spec, grouped=False)) + metadata['name'] = fn.__name__ + code = """\ +def %(name)s(%(args)s): + return %(target)s(%(fn)s, %(apply_kw)s) +""" % metadata + decorated = _exec_code_in_env(code, + {targ_name: target, fn_name: fn}, + fn.__name__) + decorated.__defaults__ = getattr(fn, 'im_func', fn).__defaults__ + decorated.__wrapped__ = fn + return update_wrapper(decorated, fn) + return update_wrapper(decorate, target) + + +def _exec_code_in_env(code, env, fn_name): + exec(code, env) + return env[fn_name] + + +def public_factory(target, location): + """Produce a wrapping function for the given cls or classmethod. + + Rationale here is so that the __init__ method of the + class can serve as documentation for the function. + + """ + if isinstance(target, type): + fn = target.__init__ + callable_ = target + doc = "Construct a new :class:`.%s` object. \n\n"\ + "This constructor is mirrored as a public API function; "\ + "see :func:`~%s` "\ + "for a full usage and argument description." % ( + target.__name__, location, ) + else: + fn = callable_ = target + doc = "This function is mirrored; see :func:`~%s` "\ + "for a description of arguments." % location + + location_name = location.split(".")[-1] + spec = compat.inspect_getfullargspec(fn) + del spec[0][0] + metadata = format_argspec_plus(spec, grouped=False) + metadata['name'] = location_name + code = """\ +def %(name)s(%(args)s): + return cls(%(apply_kw)s) +""" % metadata + env = {'cls': callable_, 'symbol': symbol} + exec(code, env) + decorated = env[location_name] + decorated.__doc__ = fn.__doc__ + decorated.__module__ = "sqlalchemy" + location.rsplit(".", 1)[0] + if compat.py2k or hasattr(fn, '__func__'): + fn.__func__.__doc__ = doc + else: + fn.__doc__ = doc + return decorated + + +class PluginLoader(object): + + def __init__(self, group, auto_fn=None): + self.group = group + self.impls = {} + self.auto_fn = auto_fn + + def load(self, name): + if name in self.impls: + return self.impls[name]() + + if self.auto_fn: + loader = self.auto_fn(name) + if loader: + self.impls[name] = loader + return loader() + + try: + import pkg_resources + except ImportError: + pass + else: + for impl in pkg_resources.iter_entry_points( + self.group, name): + self.impls[name] = impl.load + return impl.load() + + raise exc.NoSuchModuleError( + "Can't load plugin: %s:%s" % + (self.group, name)) + + def register(self, name, modulepath, objname): + def load(): + mod = compat.import_(modulepath) + for token in modulepath.split(".")[1:]: + mod = getattr(mod, token) + return getattr(mod, objname) + self.impls[name] = load + + +def get_cls_kwargs(cls, _set=None): + """Return the full set of inherited kwargs for the given `cls`. + + Probes a class's __init__ method, collecting all named arguments. If the + __init__ defines a \**kwargs catch-all, then the constructor is presumed + to pass along unrecognized keywords to its base classes, and the + collection process is repeated recursively on each of the bases. + + Uses a subset of inspect.getargspec() to cut down on method overhead. + No anonymous tuple arguments please ! + + """ + toplevel = _set is None + if toplevel: + _set = set() + + ctr = cls.__dict__.get('__init__', False) + + has_init = ctr and isinstance(ctr, types.FunctionType) and \ + isinstance(ctr.__code__, types.CodeType) + + if has_init: + names, has_kw = inspect_func_args(ctr) + _set.update(names) + + if not has_kw and not toplevel: + return None + + if not has_init or has_kw: + for c in cls.__bases__: + if get_cls_kwargs(c, _set) is None: + break + + _set.discard('self') + return _set + + +try: + # TODO: who doesn't have this constant? + from inspect import CO_VARKEYWORDS + + def inspect_func_args(fn): + co = fn.__code__ + nargs = co.co_argcount + names = co.co_varnames + args = list(names[:nargs]) + has_kw = bool(co.co_flags & CO_VARKEYWORDS) + return args, has_kw + +except ImportError: + def inspect_func_args(fn): + names, _, has_kw, _ = inspect.getargspec(fn) + return names, bool(has_kw) + + +def get_func_kwargs(func): + """Return the set of legal kwargs for the given `func`. + + Uses getargspec so is safe to call for methods, functions, + etc. + + """ + + return compat.inspect_getargspec(func)[0] + + +def get_callable_argspec(fn, no_self=False, _is_init=False): + """Return the argument signature for any callable. + + All pure-Python callables are accepted, including + functions, methods, classes, objects with __call__; + builtins and other edge cases like functools.partial() objects + raise a TypeError. + + """ + if inspect.isbuiltin(fn): + raise TypeError("Can't inspect builtin: %s" % fn) + elif inspect.isfunction(fn): + if _is_init and no_self: + spec = compat.inspect_getargspec(fn) + return compat.ArgSpec(spec.args[1:], spec.varargs, + spec.keywords, spec.defaults) + else: + return compat.inspect_getargspec(fn) + elif inspect.ismethod(fn): + if no_self and (_is_init or fn.__self__): + spec = compat.inspect_getargspec(fn.__func__) + return compat.ArgSpec(spec.args[1:], spec.varargs, + spec.keywords, spec.defaults) + else: + return compat.inspect_getargspec(fn.__func__) + elif inspect.isclass(fn): + return get_callable_argspec( + fn.__init__, no_self=no_self, _is_init=True) + elif hasattr(fn, '__func__'): + return compat.inspect_getargspec(fn.__func__) + elif hasattr(fn, '__call__'): + if inspect.ismethod(fn.__call__): + return get_callable_argspec(fn.__call__, no_self=no_self) + else: + raise TypeError("Can't inspect callable: %s" % fn) + else: + raise TypeError("Can't inspect callable: %s" % fn) + + +def format_argspec_plus(fn, grouped=True): + """Returns a dictionary of formatted, introspected function arguments. + + A enhanced variant of inspect.formatargspec to support code generation. + + fn + An inspectable callable or tuple of inspect getargspec() results. + grouped + Defaults to True; include (parens, around, argument) lists + + Returns: + + args + Full inspect.formatargspec for fn + self_arg + The name of the first positional argument, varargs[0], or None + if the function defines no positional arguments. + apply_pos + args, re-written in calling rather than receiving syntax. Arguments are + passed positionally. + apply_kw + Like apply_pos, except keyword-ish args are passed as keywords. + + Example:: + + >>> format_argspec_plus(lambda self, a, b, c=3, **d: 123) + {'args': '(self, a, b, c=3, **d)', + 'self_arg': 'self', + 'apply_kw': '(self, a, b, c=c, **d)', + 'apply_pos': '(self, a, b, c, **d)'} + + """ + if compat.callable(fn): + spec = compat.inspect_getfullargspec(fn) + else: + # we accept an existing argspec... + spec = fn + args = inspect.formatargspec(*spec) + if spec[0]: + self_arg = spec[0][0] + elif spec[1]: + self_arg = '%s[0]' % spec[1] + else: + self_arg = None + + if compat.py3k: + apply_pos = inspect.formatargspec(spec[0], spec[1], + spec[2], None, spec[4]) + num_defaults = 0 + if spec[3]: + num_defaults += len(spec[3]) + if spec[4]: + num_defaults += len(spec[4]) + name_args = spec[0] + spec[4] + else: + apply_pos = inspect.formatargspec(spec[0], spec[1], spec[2]) + num_defaults = 0 + if spec[3]: + num_defaults += len(spec[3]) + name_args = spec[0] + + if num_defaults: + defaulted_vals = name_args[0 - num_defaults:] + else: + defaulted_vals = () + + apply_kw = inspect.formatargspec(name_args, spec[1], spec[2], + defaulted_vals, + formatvalue=lambda x: '=' + x) + if grouped: + return dict(args=args, self_arg=self_arg, + apply_pos=apply_pos, apply_kw=apply_kw) + else: + return dict(args=args[1:-1], self_arg=self_arg, + apply_pos=apply_pos[1:-1], apply_kw=apply_kw[1:-1]) + + +def format_argspec_init(method, grouped=True): + """format_argspec_plus with considerations for typical __init__ methods + + Wraps format_argspec_plus with error handling strategies for typical + __init__ cases:: + + object.__init__ -> (self) + other unreflectable (usually C) -> (self, *args, **kwargs) + + """ + if method is object.__init__: + args = grouped and '(self)' or 'self' + else: + try: + return format_argspec_plus(method, grouped=grouped) + except TypeError: + args = (grouped and '(self, *args, **kwargs)' + or 'self, *args, **kwargs') + return dict(self_arg='self', args=args, apply_pos=args, apply_kw=args) + + +def getargspec_init(method): + """inspect.getargspec with considerations for typical __init__ methods + + Wraps inspect.getargspec with error handling for typical __init__ cases:: + + object.__init__ -> (self) + other unreflectable (usually C) -> (self, *args, **kwargs) + + """ + try: + return compat.inspect_getargspec(method) + except TypeError: + if method is object.__init__: + return (['self'], None, None, None) + else: + return (['self'], 'args', 'kwargs', None) + + +def unbound_method_to_callable(func_or_cls): + """Adjust the incoming callable such that a 'self' argument is not + required. + + """ + + if isinstance(func_or_cls, types.MethodType) and not func_or_cls.__self__: + return func_or_cls.__func__ + else: + return func_or_cls + + +def generic_repr(obj, additional_kw=(), to_inspect=None, omit_kwarg=()): + """Produce a __repr__() based on direct association of the __init__() + specification vs. same-named attributes present. + + """ + if to_inspect is None: + to_inspect = [obj] + else: + to_inspect = _collections.to_list(to_inspect) + + missing = object() + + pos_args = [] + kw_args = _collections.OrderedDict() + vargs = None + for i, insp in enumerate(to_inspect): + try: + (_args, _vargs, vkw, defaults) = \ + compat.inspect_getargspec(insp.__init__) + except TypeError: + continue + else: + default_len = defaults and len(defaults) or 0 + if i == 0: + if _vargs: + vargs = _vargs + if default_len: + pos_args.extend(_args[1:-default_len]) + else: + pos_args.extend(_args[1:]) + else: + kw_args.update([ + (arg, missing) for arg in _args[1:-default_len] + ]) + + if default_len: + kw_args.update([ + (arg, default) + for arg, default + in zip(_args[-default_len:], defaults) + ]) + output = [] + + output.extend(repr(getattr(obj, arg, None)) for arg in pos_args) + + if vargs is not None and hasattr(obj, vargs): + output.extend([repr(val) for val in getattr(obj, vargs)]) + + for arg, defval in kw_args.items(): + if arg in omit_kwarg: + continue + try: + val = getattr(obj, arg, missing) + if val is not missing and val != defval: + output.append('%s=%r' % (arg, val)) + except Exception: + pass + + if additional_kw: + for arg, defval in additional_kw: + try: + val = getattr(obj, arg, missing) + if val is not missing and val != defval: + output.append('%s=%r' % (arg, val)) + except Exception: + pass + + return "%s(%s)" % (obj.__class__.__name__, ", ".join(output)) + + +class portable_instancemethod(object): + """Turn an instancemethod into a (parent, name) pair + to produce a serializable callable. + + """ + + __slots__ = 'target', 'name', '__weakref__' + + def __getstate__(self): + return {'target': self.target, 'name': self.name} + + def __setstate__(self, state): + self.target = state['target'] + self.name = state['name'] + + def __init__(self, meth): + self.target = meth.__self__ + self.name = meth.__name__ + + def __call__(self, *arg, **kw): + return getattr(self.target, self.name)(*arg, **kw) + + +def class_hierarchy(cls): + """Return an unordered sequence of all classes related to cls. + + Traverses diamond hierarchies. + + Fibs slightly: subclasses of builtin types are not returned. Thus + class_hierarchy(class A(object)) returns (A, object), not A plus every + class systemwide that derives from object. + + Old-style classes are discarded and hierarchies rooted on them + will not be descended. + + """ + if compat.py2k: + if isinstance(cls, types.ClassType): + return list() + + hier = set([cls]) + process = list(cls.__mro__) + while process: + c = process.pop() + if compat.py2k: + if isinstance(c, types.ClassType): + continue + bases = (_ for _ in c.__bases__ + if _ not in hier and not isinstance(_, types.ClassType)) + else: + bases = (_ for _ in c.__bases__ if _ not in hier) + + for b in bases: + process.append(b) + hier.add(b) + + if compat.py3k: + if c.__module__ == 'builtins' or not hasattr(c, '__subclasses__'): + continue + else: + if c.__module__ == '__builtin__' or not hasattr( + c, '__subclasses__'): + continue + + for s in [_ for _ in c.__subclasses__() if _ not in hier]: + process.append(s) + hier.add(s) + return list(hier) + + +def iterate_attributes(cls): + """iterate all the keys and attributes associated + with a class, without using getattr(). + + Does not use getattr() so that class-sensitive + descriptors (i.e. property.__get__()) are not called. + + """ + keys = dir(cls) + for key in keys: + for c in cls.__mro__: + if key in c.__dict__: + yield (key, c.__dict__[key]) + break + + +def monkeypatch_proxied_specials(into_cls, from_cls, skip=None, only=None, + name='self.proxy', from_instance=None): + """Automates delegation of __specials__ for a proxying type.""" + + if only: + dunders = only + else: + if skip is None: + skip = ('__slots__', '__del__', '__getattribute__', + '__metaclass__', '__getstate__', '__setstate__') + dunders = [m for m in dir(from_cls) + if (m.startswith('__') and m.endswith('__') and + not hasattr(into_cls, m) and m not in skip)] + + for method in dunders: + try: + fn = getattr(from_cls, method) + if not hasattr(fn, '__call__'): + continue + fn = getattr(fn, 'im_func', fn) + except AttributeError: + continue + try: + spec = compat.inspect_getargspec(fn) + fn_args = inspect.formatargspec(spec[0]) + d_args = inspect.formatargspec(spec[0][1:]) + except TypeError: + fn_args = '(self, *args, **kw)' + d_args = '(*args, **kw)' + + py = ("def %(method)s%(fn_args)s: " + "return %(name)s.%(method)s%(d_args)s" % locals()) + + env = from_instance is not None and {name: from_instance} or {} + compat.exec_(py, env) + try: + env[method].__defaults__ = fn.__defaults__ + except AttributeError: + pass + setattr(into_cls, method, env[method]) + + +def methods_equivalent(meth1, meth2): + """Return True if the two methods are the same implementation.""" + + return getattr(meth1, '__func__', meth1) is getattr( + meth2, '__func__', meth2) + + +def as_interface(obj, cls=None, methods=None, required=None): + """Ensure basic interface compliance for an instance or dict of callables. + + Checks that ``obj`` implements public methods of ``cls`` or has members + listed in ``methods``. If ``required`` is not supplied, implementing at + least one interface method is sufficient. Methods present on ``obj`` that + are not in the interface are ignored. + + If ``obj`` is a dict and ``dict`` does not meet the interface + requirements, the keys of the dictionary are inspected. Keys present in + ``obj`` that are not in the interface will raise TypeErrors. + + Raises TypeError if ``obj`` does not meet the interface criteria. + + In all passing cases, an object with callable members is returned. In the + simple case, ``obj`` is returned as-is; if dict processing kicks in then + an anonymous class is returned. + + obj + A type, instance, or dictionary of callables. + cls + Optional, a type. All public methods of cls are considered the + interface. An ``obj`` instance of cls will always pass, ignoring + ``required``.. + methods + Optional, a sequence of method names to consider as the interface. + required + Optional, a sequence of mandatory implementations. If omitted, an + ``obj`` that provides at least one interface method is considered + sufficient. As a convenience, required may be a type, in which case + all public methods of the type are required. + + """ + if not cls and not methods: + raise TypeError('a class or collection of method names are required') + + if isinstance(cls, type) and isinstance(obj, cls): + return obj + + interface = set(methods or [m for m in dir(cls) if not m.startswith('_')]) + implemented = set(dir(obj)) + + complies = operator.ge + if isinstance(required, type): + required = interface + elif not required: + required = set() + complies = operator.gt + else: + required = set(required) + + if complies(implemented.intersection(interface), required): + return obj + + # No dict duck typing here. + if not isinstance(obj, dict): + qualifier = complies is operator.gt and 'any of' or 'all of' + raise TypeError("%r does not implement %s: %s" % ( + obj, qualifier, ', '.join(interface))) + + class AnonymousInterface(object): + """A callable-holding shell.""" + + if cls: + AnonymousInterface.__name__ = 'Anonymous' + cls.__name__ + found = set() + + for method, impl in dictlike_iteritems(obj): + if method not in interface: + raise TypeError("%r: unknown in this interface" % method) + if not compat.callable(impl): + raise TypeError("%r=%r is not callable" % (method, impl)) + setattr(AnonymousInterface, method, staticmethod(impl)) + found.add(method) + + if complies(found, required): + return AnonymousInterface + + raise TypeError("dictionary does not contain required keys %s" % + ', '.join(required - found)) + + +class memoized_property(object): + """A read-only @property that is only evaluated once.""" + + def __init__(self, fget, doc=None): + self.fget = fget + self.__doc__ = doc or fget.__doc__ + self.__name__ = fget.__name__ + + def __get__(self, obj, cls): + if obj is None: + return self + obj.__dict__[self.__name__] = result = self.fget(obj) + return result + + def _reset(self, obj): + memoized_property.reset(obj, self.__name__) + + @classmethod + def reset(cls, obj, name): + obj.__dict__.pop(name, None) + + +def memoized_instancemethod(fn): + """Decorate a method memoize its return value. + + Best applied to no-arg methods: memoization is not sensitive to + argument values, and will always return the same value even when + called with different arguments. + + """ + + def oneshot(self, *args, **kw): + result = fn(self, *args, **kw) + memo = lambda *a, **kw: result + memo.__name__ = fn.__name__ + memo.__doc__ = fn.__doc__ + self.__dict__[fn.__name__] = memo + return result + return update_wrapper(oneshot, fn) + + +class group_expirable_memoized_property(object): + """A family of @memoized_properties that can be expired in tandem.""" + + def __init__(self, attributes=()): + self.attributes = [] + if attributes: + self.attributes.extend(attributes) + + def expire_instance(self, instance): + """Expire all memoized properties for *instance*.""" + stash = instance.__dict__ + for attribute in self.attributes: + stash.pop(attribute, None) + + def __call__(self, fn): + self.attributes.append(fn.__name__) + return memoized_property(fn) + + def method(self, fn): + self.attributes.append(fn.__name__) + return memoized_instancemethod(fn) + + +class MemoizedSlots(object): + """Apply memoized items to an object using a __getattr__ scheme. + + This allows the functionality of memoized_property and + memoized_instancemethod to be available to a class using __slots__. + + """ + + __slots__ = () + + def _fallback_getattr(self, key): + raise AttributeError(key) + + def __getattr__(self, key): + if key.startswith('_memoized'): + raise AttributeError(key) + elif hasattr(self, '_memoized_attr_%s' % key): + value = getattr(self, '_memoized_attr_%s' % key)() + setattr(self, key, value) + return value + elif hasattr(self, '_memoized_method_%s' % key): + fn = getattr(self, '_memoized_method_%s' % key) + + def oneshot(*args, **kw): + result = fn(*args, **kw) + memo = lambda *a, **kw: result + memo.__name__ = fn.__name__ + memo.__doc__ = fn.__doc__ + setattr(self, key, memo) + return result + oneshot.__doc__ = fn.__doc__ + return oneshot + else: + return self._fallback_getattr(key) + + +def dependency_for(modulename): + def decorate(obj): + # TODO: would be nice to improve on this import silliness, + # unfortunately importlib doesn't work that great either + tokens = modulename.split(".") + mod = compat.import_( + ".".join(tokens[0:-1]), globals(), locals(), tokens[-1]) + mod = getattr(mod, tokens[-1]) + setattr(mod, obj.__name__, obj) + return obj + return decorate + + +class dependencies(object): + """Apply imported dependencies as arguments to a function. + + E.g.:: + + @util.dependencies( + "sqlalchemy.sql.widget", + "sqlalchemy.engine.default" + ); + def some_func(self, widget, default, arg1, arg2, **kw): + # ... + + Rationale is so that the impact of a dependency cycle can be + associated directly with the few functions that cause the cycle, + and not pollute the module-level namespace. + + """ + + def __init__(self, *deps): + self.import_deps = [] + for dep in deps: + tokens = dep.split(".") + self.import_deps.append( + dependencies._importlater( + ".".join(tokens[0:-1]), + tokens[-1] + ) + ) + + def __call__(self, fn): + import_deps = self.import_deps + spec = compat.inspect_getfullargspec(fn) + + spec_zero = list(spec[0]) + hasself = spec_zero[0] in ('self', 'cls') + + for i in range(len(import_deps)): + spec[0][i + (1 if hasself else 0)] = "import_deps[%r]" % i + + inner_spec = format_argspec_plus(spec, grouped=False) + + for impname in import_deps: + del spec_zero[1 if hasself else 0] + spec[0][:] = spec_zero + + outer_spec = format_argspec_plus(spec, grouped=False) + + code = 'lambda %(args)s: fn(%(apply_kw)s)' % { + "args": outer_spec['args'], + "apply_kw": inner_spec['apply_kw'] + } + + decorated = eval(code, locals()) + decorated.__defaults__ = getattr(fn, 'im_func', fn).__defaults__ + return update_wrapper(decorated, fn) + + @classmethod + def resolve_all(cls, path): + for m in list(dependencies._unresolved): + if m._full_path.startswith(path): + m._resolve() + + _unresolved = set() + _by_key = {} + + class _importlater(object): + _unresolved = set() + + _by_key = {} + + def __new__(cls, path, addtl): + key = path + "." + addtl + if key in dependencies._by_key: + return dependencies._by_key[key] + else: + dependencies._by_key[key] = imp = object.__new__(cls) + return imp + + def __init__(self, path, addtl): + self._il_path = path + self._il_addtl = addtl + dependencies._unresolved.add(self) + + @property + def _full_path(self): + return self._il_path + "." + self._il_addtl + + @memoized_property + def module(self): + if self in dependencies._unresolved: + raise ImportError( + "importlater.resolve_all() hasn't " + "been called (this is %s %s)" + % (self._il_path, self._il_addtl)) + + return getattr(self._initial_import, self._il_addtl) + + def _resolve(self): + dependencies._unresolved.discard(self) + self._initial_import = compat.import_( + self._il_path, globals(), locals(), + [self._il_addtl]) + + def __getattr__(self, key): + if key == 'module': + raise ImportError("Could not resolve module %s" + % self._full_path) + try: + attr = getattr(self.module, key) + except AttributeError: + raise AttributeError( + "Module %s has no attribute '%s'" % + (self._full_path, key) + ) + self.__dict__[key] = attr + return attr + + +# from paste.deploy.converters +def asbool(obj): + if isinstance(obj, compat.string_types): + obj = obj.strip().lower() + if obj in ['true', 'yes', 'on', 'y', 't', '1']: + return True + elif obj in ['false', 'no', 'off', 'n', 'f', '0']: + return False + else: + raise ValueError("String is not true/false: %r" % obj) + return bool(obj) + + +def bool_or_str(*text): + """Return a callable that will evaluate a string as + boolean, or one of a set of "alternate" string values. + + """ + def bool_or_value(obj): + if obj in text: + return obj + else: + return asbool(obj) + return bool_or_value + + +def asint(value): + """Coerce to integer.""" + + if value is None: + return value + return int(value) + + +def coerce_kw_type(kw, key, type_, flexi_bool=True): + """If 'key' is present in dict 'kw', coerce its value to type 'type\_' if + necessary. If 'flexi_bool' is True, the string '0' is considered false + when coercing to boolean. + """ + + if key in kw and not isinstance(kw[key], type_) and kw[key] is not None: + if type_ is bool and flexi_bool: + kw[key] = asbool(kw[key]) + else: + kw[key] = type_(kw[key]) + + +def constructor_copy(obj, cls, *args, **kw): + """Instantiate cls using the __dict__ of obj as constructor arguments. + + Uses inspect to match the named arguments of ``cls``. + + """ + + names = get_cls_kwargs(cls) + kw.update((k, obj.__dict__[k]) for k in names if k in obj.__dict__) + return cls(*args, **kw) + + +def counter(): + """Return a threadsafe counter function.""" + + lock = compat.threading.Lock() + counter = itertools.count(1) + + # avoid the 2to3 "next" transformation... + def _next(): + lock.acquire() + try: + return next(counter) + finally: + lock.release() + + return _next + + +def duck_type_collection(specimen, default=None): + """Given an instance or class, guess if it is or is acting as one of + the basic collection types: list, set and dict. If the __emulates__ + property is present, return that preferentially. + """ + + if hasattr(specimen, '__emulates__'): + # canonicalize set vs sets.Set to a standard: the builtin set + if (specimen.__emulates__ is not None and + issubclass(specimen.__emulates__, set)): + return set + else: + return specimen.__emulates__ + + isa = isinstance(specimen, type) and issubclass or isinstance + if isa(specimen, list): + return list + elif isa(specimen, set): + return set + elif isa(specimen, dict): + return dict + + if hasattr(specimen, 'append'): + return list + elif hasattr(specimen, 'add'): + return set + elif hasattr(specimen, 'set'): + return dict + else: + return default + + +def assert_arg_type(arg, argtype, name): + if isinstance(arg, argtype): + return arg + else: + if isinstance(argtype, tuple): + raise exc.ArgumentError( + "Argument '%s' is expected to be one of type %s, got '%s'" % + (name, ' or '.join("'%s'" % a for a in argtype), type(arg))) + else: + raise exc.ArgumentError( + "Argument '%s' is expected to be of type '%s', got '%s'" % + (name, argtype, type(arg))) + + +def dictlike_iteritems(dictlike): + """Return a (key, value) iterator for almost any dict-like object.""" + + if compat.py3k: + if hasattr(dictlike, 'items'): + return list(dictlike.items()) + else: + if hasattr(dictlike, 'iteritems'): + return dictlike.iteritems() + elif hasattr(dictlike, 'items'): + return iter(dictlike.items()) + + getter = getattr(dictlike, '__getitem__', getattr(dictlike, 'get', None)) + if getter is None: + raise TypeError( + "Object '%r' is not dict-like" % dictlike) + + if hasattr(dictlike, 'iterkeys'): + def iterator(): + for key in dictlike.iterkeys(): + yield key, getter(key) + return iterator() + elif hasattr(dictlike, 'keys'): + return iter((key, getter(key)) for key in dictlike.keys()) + else: + raise TypeError( + "Object '%r' is not dict-like" % dictlike) + + +class classproperty(property): + """A decorator that behaves like @property except that operates + on classes rather than instances. + + The decorator is currently special when using the declarative + module, but note that the + :class:`~.sqlalchemy.ext.declarative.declared_attr` + decorator should be used for this purpose with declarative. + + """ + + def __init__(self, fget, *arg, **kw): + super(classproperty, self).__init__(fget, *arg, **kw) + self.__doc__ = fget.__doc__ + + def __get__(desc, self, cls): + return desc.fget(cls) + + +class hybridproperty(object): + def __init__(self, func): + self.func = func + + def __get__(self, instance, owner): + if instance is None: + clsval = self.func(owner) + clsval.__doc__ = self.func.__doc__ + return clsval + else: + return self.func(instance) + + +class hybridmethod(object): + """Decorate a function as cls- or instance- level.""" + + def __init__(self, func): + self.func = func + + def __get__(self, instance, owner): + if instance is None: + return self.func.__get__(owner, owner.__class__) + else: + return self.func.__get__(instance, owner) + + +class _symbol(int): + def __new__(self, name, doc=None, canonical=None): + """Construct a new named symbol.""" + assert isinstance(name, compat.string_types) + if canonical is None: + canonical = hash(name) + v = int.__new__(_symbol, canonical) + v.name = name + if doc: + v.__doc__ = doc + return v + + def __reduce__(self): + return symbol, (self.name, "x", int(self)) + + def __str__(self): + return repr(self) + + def __repr__(self): + return "symbol(%r)" % self.name + +_symbol.__name__ = 'symbol' + + +class symbol(object): + """A constant symbol. + + >>> symbol('foo') is symbol('foo') + True + >>> symbol('foo') + + + A slight refinement of the MAGICCOOKIE=object() pattern. The primary + advantage of symbol() is its repr(). They are also singletons. + + Repeated calls of symbol('name') will all return the same instance. + + The optional ``doc`` argument assigns to ``__doc__``. This + is strictly so that Sphinx autoattr picks up the docstring we want + (it doesn't appear to pick up the in-module docstring if the datamember + is in a different module - autoattribute also blows up completely). + If Sphinx fixes/improves this then we would no longer need + ``doc`` here. + + """ + symbols = {} + _lock = compat.threading.Lock() + + def __new__(cls, name, doc=None, canonical=None): + cls._lock.acquire() + try: + sym = cls.symbols.get(name) + if sym is None: + cls.symbols[name] = sym = _symbol(name, doc, canonical) + return sym + finally: + symbol._lock.release() + + +_creation_order = 1 + + +def set_creation_order(instance): + """Assign a '_creation_order' sequence to the given instance. + + This allows multiple instances to be sorted in order of creation + (typically within a single thread; the counter is not particularly + threadsafe). + + """ + global _creation_order + instance._creation_order = _creation_order + _creation_order += 1 + + +def warn_exception(func, *args, **kwargs): + """executes the given function, catches all exceptions and converts to + a warning. + + """ + try: + return func(*args, **kwargs) + except Exception: + warn("%s('%s') ignored" % sys.exc_info()[0:2]) + + +def ellipses_string(value, len_=25): + try: + if len(value) > len_: + return "%s..." % value[0:len_] + else: + return value + except TypeError: + return value + + +class _hash_limit_string(compat.text_type): + """A string subclass that can only be hashed on a maximum amount + of unique values. + + This is used for warnings so that we can send out parameterized warnings + without the __warningregistry__ of the module, or the non-overridable + "once" registry within warnings.py, overloading memory, + + + """ + def __new__(cls, value, num, args): + interpolated = (value % args) + \ + (" (this warning may be suppressed after %d occurrences)" % num) + self = super(_hash_limit_string, cls).__new__(cls, interpolated) + self._hash = hash("%s_%d" % (value, hash(interpolated) % num)) + return self + + def __hash__(self): + return self._hash + + def __eq__(self, other): + return hash(self) == hash(other) + + +def warn(msg): + """Issue a warning. + + If msg is a string, :class:`.exc.SAWarning` is used as + the category. + + """ + warnings.warn(msg, exc.SAWarning, stacklevel=2) + + +def warn_limited(msg, args): + """Issue a warning with a paramterized string, limiting the number + of registrations. + + """ + if args: + msg = _hash_limit_string(msg, 10, args) + warnings.warn(msg, exc.SAWarning, stacklevel=2) + + +def only_once(fn): + """Decorate the given function to be a no-op after it is called exactly + once.""" + + once = [fn] + + def go(*arg, **kw): + if once: + once_fn = once.pop() + return once_fn(*arg, **kw) + + return go + + +_SQLA_RE = re.compile(r'sqlalchemy/([a-z_]+/){0,2}[a-z_]+\.py') +_UNITTEST_RE = re.compile(r'unit(?:2|test2?/)') + + +def chop_traceback(tb, exclude_prefix=_UNITTEST_RE, exclude_suffix=_SQLA_RE): + """Chop extraneous lines off beginning and end of a traceback. + + :param tb: + a list of traceback lines as returned by ``traceback.format_stack()`` + + :param exclude_prefix: + a regular expression object matching lines to skip at beginning of + ``tb`` + + :param exclude_suffix: + a regular expression object matching lines to skip at end of ``tb`` + """ + start = 0 + end = len(tb) - 1 + while start <= end and exclude_prefix.search(tb[start]): + start += 1 + while start <= end and exclude_suffix.search(tb[end]): + end -= 1 + return tb[start:end + 1] + +NoneType = type(None) + + +def attrsetter(attrname): + code = \ + "def set(obj, value):"\ + " obj.%s = value" % attrname + env = locals().copy() + exec(code, env) + return env['set'] + + +class EnsureKWArgType(type): + """Apply translation of functions to accept **kw arguments if they + don't already. + + """ + def __init__(cls, clsname, bases, clsdict): + fn_reg = cls.ensure_kwarg + if fn_reg: + for key in clsdict: + m = re.match(fn_reg, key) + if m: + fn = clsdict[key] + spec = compat.inspect_getargspec(fn) + if not spec.keywords: + clsdict[key] = wrapped = cls._wrap_w_kw(fn) + setattr(cls, key, wrapped) + super(EnsureKWArgType, cls).__init__(clsname, bases, clsdict) + + def _wrap_w_kw(self, fn): + + def wrap(*arg, **kw): + return fn(*arg) + return update_wrapper(wrap, fn) + diff --git a/lib/python3.4/site-packages/sqlalchemy/util/queue.py b/lib/python3.4/site-packages/sqlalchemy/util/queue.py new file mode 100644 index 0000000..2213471 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/util/queue.py @@ -0,0 +1,199 @@ +# util/queue.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""An adaptation of Py2.3/2.4's Queue module which supports reentrant +behavior, using RLock instead of Lock for its mutex object. The +Queue object is used exclusively by the sqlalchemy.pool.QueuePool +class. + +This is to support the connection pool's usage of weakref callbacks to return +connections to the underlying Queue, which can in extremely +rare cases be invoked within the ``get()`` method of the Queue itself, +producing a ``put()`` inside the ``get()`` and therefore a reentrant +condition. + +""" + +from collections import deque +from time import time as _time +from .compat import threading + + +__all__ = ['Empty', 'Full', 'Queue'] + + +class Empty(Exception): + "Exception raised by Queue.get(block=0)/get_nowait()." + + pass + + +class Full(Exception): + "Exception raised by Queue.put(block=0)/put_nowait()." + + pass + + +class Queue: + def __init__(self, maxsize=0): + """Initialize a queue object with a given maximum size. + + If `maxsize` is <= 0, the queue size is infinite. + """ + + self._init(maxsize) + # mutex must be held whenever the queue is mutating. All methods + # that acquire mutex must release it before returning. mutex + # is shared between the two conditions, so acquiring and + # releasing the conditions also acquires and releases mutex. + self.mutex = threading.RLock() + # Notify not_empty whenever an item is added to the queue; a + # thread waiting to get is notified then. + self.not_empty = threading.Condition(self.mutex) + # Notify not_full whenever an item is removed from the queue; + # a thread waiting to put is notified then. + self.not_full = threading.Condition(self.mutex) + + def qsize(self): + """Return the approximate size of the queue (not reliable!).""" + + self.mutex.acquire() + n = self._qsize() + self.mutex.release() + return n + + def empty(self): + """Return True if the queue is empty, False otherwise (not + reliable!).""" + + self.mutex.acquire() + n = self._empty() + self.mutex.release() + return n + + def full(self): + """Return True if the queue is full, False otherwise (not + reliable!).""" + + self.mutex.acquire() + n = self._full() + self.mutex.release() + return n + + def put(self, item, block=True, timeout=None): + """Put an item into the queue. + + If optional args `block` is True and `timeout` is None (the + default), block if necessary until a free slot is + available. If `timeout` is a positive number, it blocks at + most `timeout` seconds and raises the ``Full`` exception if no + free slot was available within that time. Otherwise (`block` + is false), put an item on the queue if a free slot is + immediately available, else raise the ``Full`` exception + (`timeout` is ignored in that case). + """ + + self.not_full.acquire() + try: + if not block: + if self._full(): + raise Full + elif timeout is None: + while self._full(): + self.not_full.wait() + else: + if timeout < 0: + raise ValueError("'timeout' must be a positive number") + endtime = _time() + timeout + while self._full(): + remaining = endtime - _time() + if remaining <= 0.0: + raise Full + self.not_full.wait(remaining) + self._put(item) + self.not_empty.notify() + finally: + self.not_full.release() + + def put_nowait(self, item): + """Put an item into the queue without blocking. + + Only enqueue the item if a free slot is immediately available. + Otherwise raise the ``Full`` exception. + """ + return self.put(item, False) + + def get(self, block=True, timeout=None): + """Remove and return an item from the queue. + + If optional args `block` is True and `timeout` is None (the + default), block if necessary until an item is available. If + `timeout` is a positive number, it blocks at most `timeout` + seconds and raises the ``Empty`` exception if no item was + available within that time. Otherwise (`block` is false), + return an item if one is immediately available, else raise the + ``Empty`` exception (`timeout` is ignored in that case). + """ + self.not_empty.acquire() + try: + if not block: + if self._empty(): + raise Empty + elif timeout is None: + while self._empty(): + self.not_empty.wait() + else: + if timeout < 0: + raise ValueError("'timeout' must be a positive number") + endtime = _time() + timeout + while self._empty(): + remaining = endtime - _time() + if remaining <= 0.0: + raise Empty + self.not_empty.wait(remaining) + item = self._get() + self.not_full.notify() + return item + finally: + self.not_empty.release() + + def get_nowait(self): + """Remove and return an item from the queue without blocking. + + Only get an item if one is immediately available. Otherwise + raise the ``Empty`` exception. + """ + + return self.get(False) + + # Override these methods to implement other queue organizations + # (e.g. stack or priority queue). + # These will only be called with appropriate locks held + + # Initialize the queue representation + def _init(self, maxsize): + self.maxsize = maxsize + self.queue = deque() + + def _qsize(self): + return len(self.queue) + + # Check whether the queue is empty + def _empty(self): + return not self.queue + + # Check whether the queue is full + def _full(self): + return self.maxsize > 0 and len(self.queue) == self.maxsize + + # Put a new item in the queue + def _put(self, item): + self.queue.append(item) + + # Get an item from the queue + def _get(self): + return self.queue.popleft() diff --git a/lib/python3.4/site-packages/sqlalchemy/util/topological.py b/lib/python3.4/site-packages/sqlalchemy/util/topological.py new file mode 100644 index 0000000..5c5c54c --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/util/topological.py @@ -0,0 +1,100 @@ +# util/topological.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Topological sorting algorithms.""" + +from ..exc import CircularDependencyError +from .. import util + +__all__ = ['sort', 'sort_as_subsets', 'find_cycles'] + + +def sort_as_subsets(tuples, allitems, deterministic_order=False): + + edges = util.defaultdict(set) + for parent, child in tuples: + edges[child].add(parent) + + Set = util.OrderedSet if deterministic_order else set + + todo = Set(allitems) + + while todo: + output = Set() + for node in todo: + if todo.isdisjoint(edges[node]): + output.add(node) + + if not output: + raise CircularDependencyError( + "Circular dependency detected.", + find_cycles(tuples, allitems), + _gen_edges(edges) + ) + + todo.difference_update(output) + yield output + + +def sort(tuples, allitems, deterministic_order=False): + """sort the given list of items by dependency. + + 'tuples' is a list of tuples representing a partial ordering. + 'deterministic_order' keeps items within a dependency tier in list order. + """ + + for set_ in sort_as_subsets(tuples, allitems, deterministic_order): + for s in set_: + yield s + + +def find_cycles(tuples, allitems): + # adapted from: + # http://neopythonic.blogspot.com/2009/01/detecting-cycles-in-directed-graph.html + + edges = util.defaultdict(set) + for parent, child in tuples: + edges[parent].add(child) + nodes_to_test = set(edges) + + output = set() + + # we'd like to find all nodes that are + # involved in cycles, so we do the full + # pass through the whole thing for each + # node in the original list. + + # we can go just through parent edge nodes. + # if a node is only a child and never a parent, + # by definition it can't be part of a cycle. same + # if it's not in the edges at all. + for node in nodes_to_test: + stack = [node] + todo = nodes_to_test.difference(stack) + while stack: + top = stack[-1] + for node in edges[top]: + if node in stack: + cyc = stack[stack.index(node):] + todo.difference_update(cyc) + output.update(cyc) + + if node in todo: + stack.append(node) + todo.remove(node) + break + else: + node = stack.pop() + return output + + +def _gen_edges(edges): + return set([ + (right, left) + for left in edges + for right in edges[left] + ]) diff --git a/update.sh b/update.sh new file mode 100755 index 0000000..ca35a22 --- /dev/null +++ b/update.sh @@ -0,0 +1,25 @@ +#!/bin/bash +set -e +cd `dirname $0` +export PIP_DOWNLOAD_CACHE=`pwd`/../pip_cache +cat ../openmedialibrary/requirements.txt \ + | grep -v lxml \ + | grep -v pyCrypto \ + | grep -v pillow \ + | grep -v simplejson \ + > requirements.txt +echo setuptools >> requirements.txt +echo six >> requirements.txt +mkdir old +test -e lib && mv lib old/ +test -e bin && mv bin old/ + +rm -rf p34 + +virtualenv -p /usr/bin/python3.4 p34 +p34/bin/pip3 install -r requirements.txt + +mkdir -p lib/python3.4 +mv p34/lib/python3.4/site-packages lib/python3.4/site-packages + +rm -r p34 old requirements.txt